From 3504265229c589ecc166e3ad4c33bb198b11e4ce Mon Sep 17 00:00:00 2001 From: sunil unnava Date: Tue, 23 Oct 2018 12:18:59 -0400 Subject: update the package name Issue-ID: DMAAP-858 Change-Id: I49ae6eb9c51a261b64b911e607fcbbca46c5423c Signed-off-by: sunil unnava --- .../java/com/att/dmf/mr/CambriaApiException.java | 80 -- .../java/com/att/dmf/mr/CambriaApiVersionInfo.java | 88 --- .../java/com/att/dmf/mr/backends/Consumer.java | 105 --- .../com/att/dmf/mr/backends/ConsumerFactory.java | 118 --- .../java/com/att/dmf/mr/backends/MetricsSet.java | 71 -- .../java/com/att/dmf/mr/backends/Publisher.java | 99 --- .../dmf/mr/backends/kafka/Kafka011Consumer.java | 397 ---------- .../mr/backends/kafka/Kafka011ConsumerUtil.java | 123 --- .../att/dmf/mr/backends/kafka/KafkaConsumer.txt | 386 --------- .../dmf/mr/backends/kafka/KafkaConsumerCache.java | 742 ------------------ .../mr/backends/kafka/KafkaLiveLockAvoider2.java | 159 ---- .../att/dmf/mr/backends/kafka/KafkaPublisher.java | 228 ------ .../dmf/mr/backends/kafka/LiveLockAvoidance.java | 45 -- .../mr/backends/kafka/LockInstructionWatcher.java | 100 --- .../mr/backends/memory/MemoryConsumerFactory.java | 184 ----- .../dmf/mr/backends/memory/MemoryMetaBroker.java | 201 ----- .../att/dmf/mr/backends/memory/MemoryQueue.java | 207 ----- .../mr/backends/memory/MemoryQueuePublisher.java | 92 --- .../att/dmf/mr/backends/memory/MessageLogger.java | 109 --- src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java | 88 --- .../com/att/dmf/mr/beans/DMaaPCambriaLimiter.java | 288 ------- .../java/com/att/dmf/mr/beans/DMaaPContext.java | 104 --- .../dmf/mr/beans/DMaaPKafkaConsumerFactory.java | 361 --------- .../com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java | 495 ------------ .../java/com/att/dmf/mr/beans/DMaaPMetricsSet.java | 231 ------ .../java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java | 140 ---- .../java/com/att/dmf/mr/beans/DMaaPZkClient.java | 45 -- .../java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java | 51 -- src/main/java/com/att/dmf/mr/beans/LogDetails.java | 214 ----- src/main/java/com/att/dmf/mr/beans/TopicBean.java | 155 ---- .../com/att/dmf/mr/constants/CambriaConstants.java | 126 --- .../mr/exception/DMaaPAccessDeniedException.java | 42 - .../mr/exception/DMaaPCambriaExceptionMapper.java | 94 --- .../att/dmf/mr/exception/DMaaPErrorMessages.java | 248 ------ .../att/dmf/mr/exception/DMaaPResponseCode.java | 93 --- .../dmf/mr/exception/DMaaPWebExceptionMapper.java | 137 ---- .../com/att/dmf/mr/exception/ErrorResponse.java | 135 ---- .../mr/listener/CambriaServletContextListener.java | 64 -- .../att/dmf/mr/listener/DME2EndPointLoader.java | 123 --- .../java/com/att/dmf/mr/metabroker/Broker.java | 92 --- .../java/com/att/dmf/mr/metabroker/Broker1.java | 95 --- src/main/java/com/att/dmf/mr/metabroker/Topic.java | 133 ---- .../publisher/CambriaBatchingPublisher.java | 52 -- .../dmf/mr/metrics/publisher/CambriaClient.java | 89 --- .../dmf/mr/metrics/publisher/CambriaConsumer.java | 52 -- .../dmf/mr/metrics/publisher/CambriaPublisher.java | 101 --- .../metrics/publisher/CambriaPublisherUtility.java | 146 ---- .../publisher/DMaaPCambriaClientFactory.java | 420 ---------- .../metrics/publisher/impl/CambriaBaseClient.java | 100 --- .../att/dmf/mr/metrics/publisher/impl/Clock.java | 74 -- .../publisher/impl/DMaaPCambriaConsumerImpl.java | 169 ---- .../impl/DMaaPCambriaSimplerBatchPublisher.java | 422 ---------- .../com/att/dmf/mr/resources/CambriaEventSet.java | 114 --- .../mr/resources/CambriaOutboundEventStream.java | 554 ------------- .../streamReaders/CambriaJsonStreamReader.java | 169 ---- .../streamReaders/CambriaRawStreamReader.java | 141 ---- .../streamReaders/CambriaStreamReader.java | 229 ------ .../streamReaders/CambriaTextStreamReader.java | 140 ---- .../att/dmf/mr/security/DMaaPAAFAuthenticator.java | 39 - .../dmf/mr/security/DMaaPAAFAuthenticatorImpl.java | 80 -- .../att/dmf/mr/security/DMaaPAuthenticator.java | 61 -- .../dmf/mr/security/DMaaPAuthenticatorImpl.java | 133 ---- .../mr/security/impl/DMaaPMechIdAuthenticator.java | 87 --- .../impl/DMaaPOriginalUebAuthenticator.java | 293 ------- .../java/com/att/dmf/mr/service/AdminService.java | 83 -- .../com/att/dmf/mr/service/ApiKeysService.java | 105 --- .../java/com/att/dmf/mr/service/EventsService.java | 75 -- .../java/com/att/dmf/mr/service/MMService.java | 66 -- .../com/att/dmf/mr/service/MetricsService.java | 54 -- .../java/com/att/dmf/mr/service/TopicService.java | 176 ----- .../com/att/dmf/mr/service/TransactionService.java | 61 -- .../java/com/att/dmf/mr/service/UIService.java | 92 --- .../att/dmf/mr/service/impl/AdminServiceImpl.java | 190 ----- .../dmf/mr/service/impl/ApiKeysServiceImpl.java | 320 -------- .../dmf/mr/service/impl/BaseTransactionDbImpl.java | 153 ---- .../att/dmf/mr/service/impl/EventsServiceImpl.java | 867 --------------------- .../com/att/dmf/mr/service/impl/MMServiceImpl.java | 600 -------------- .../dmf/mr/service/impl/MetricsServiceImpl.java | 115 --- .../att/dmf/mr/service/impl/TopicServiceImpl.java | 694 ----------------- .../mr/service/impl/TransactionServiceImpl.java | 100 --- .../com/att/dmf/mr/service/impl/UIServiceImpl.java | 210 ----- .../mr/transaction/DMaaPTransactionFactory.java | 44 -- .../dmf/mr/transaction/DMaaPTransactionObj.java | 83 -- .../dmf/mr/transaction/DMaaPTransactionObjDB.java | 86 -- .../com/att/dmf/mr/transaction/TransactionObj.java | 202 ----- .../com/att/dmf/mr/transaction/TrnRequest.java | 183 ----- .../impl/DMaaPSimpleTransactionFactory.java | 62 -- .../com/att/dmf/mr/utils/ConfigurationReader.java | 492 ------------ .../com/att/dmf/mr/utils/DMaaPCuratorFactory.java | 69 -- .../com/att/dmf/mr/utils/DMaaPResponseBuilder.java | 370 --------- src/main/java/com/att/dmf/mr/utils/Emailer.java | 211 ----- .../java/com/att/dmf/mr/utils/PropertyReader.java | 125 --- src/main/java/com/att/dmf/mr/utils/Utils.java | 175 ----- .../metrics/cambria/DMaaPMetricsSender.java | 197 ----- .../com/att/mr/filter/ContentLengthFilter.java | 134 ---- src/main/java/com/att/mr/filter/DefaultLength.java | 37 - .../org/onap/dmaap/dmf/mr/CambriaApiException.java | 80 ++ .../onap/dmaap/dmf/mr/CambriaApiVersionInfo.java | 88 +++ .../org/onap/dmaap/dmf/mr/backends/Consumer.java | 105 +++ .../dmaap/dmf/mr/backends/ConsumerFactory.java | 118 +++ .../org/onap/dmaap/dmf/mr/backends/MetricsSet.java | 71 ++ .../org/onap/dmaap/dmf/mr/backends/Publisher.java | 99 +++ .../dmf/mr/backends/kafka/Kafka011Consumer.java | 397 ++++++++++ .../mr/backends/kafka/Kafka011ConsumerUtil.java | 123 +++ .../dmaap/dmf/mr/backends/kafka/KafkaConsumer.txt | 386 +++++++++ .../dmf/mr/backends/kafka/KafkaConsumerCache.java | 742 ++++++++++++++++++ .../mr/backends/kafka/KafkaLiveLockAvoider2.java | 159 ++++ .../dmf/mr/backends/kafka/KafkaPublisher.java | 228 ++++++ .../dmf/mr/backends/kafka/LiveLockAvoidance.java | 45 ++ .../mr/backends/kafka/LockInstructionWatcher.java | 100 +++ .../mr/backends/memory/MemoryConsumerFactory.java | 184 +++++ .../dmf/mr/backends/memory/MemoryMetaBroker.java | 201 +++++ .../dmaap/dmf/mr/backends/memory/MemoryQueue.java | 207 +++++ .../mr/backends/memory/MemoryQueuePublisher.java | 92 +++ .../dmf/mr/backends/memory/MessageLogger.java | 109 +++ .../org/onap/dmaap/dmf/mr/beans/ApiKeyBean.java | 88 +++ .../dmaap/dmf/mr/beans/DMaaPCambriaLimiter.java | 288 +++++++ .../org/onap/dmaap/dmf/mr/beans/DMaaPContext.java | 104 +++ .../dmf/mr/beans/DMaaPKafkaConsumerFactory.java | 361 +++++++++ .../dmaap/dmf/mr/beans/DMaaPKafkaMetaBroker.java | 495 ++++++++++++ .../onap/dmaap/dmf/mr/beans/DMaaPMetricsSet.java | 231 ++++++ .../org/onap/dmaap/dmf/mr/beans/DMaaPNsaApiDb.java | 140 ++++ .../org/onap/dmaap/dmf/mr/beans/DMaaPZkClient.java | 45 ++ .../onap/dmaap/dmf/mr/beans/DMaaPZkConfigDb.java | 51 ++ .../org/onap/dmaap/dmf/mr/beans/LogDetails.java | 214 +++++ .../org/onap/dmaap/dmf/mr/beans/TopicBean.java | 155 ++++ .../dmaap/dmf/mr/constants/CambriaConstants.java | 126 +++ .../mr/exception/DMaaPAccessDeniedException.java | 42 + .../mr/exception/DMaaPCambriaExceptionMapper.java | 94 +++ .../dmaap/dmf/mr/exception/DMaaPErrorMessages.java | 248 ++++++ .../dmaap/dmf/mr/exception/DMaaPResponseCode.java | 93 +++ .../dmf/mr/exception/DMaaPWebExceptionMapper.java | 137 ++++ .../onap/dmaap/dmf/mr/exception/ErrorResponse.java | 135 ++++ .../mr/listener/CambriaServletContextListener.java | 64 ++ .../dmaap/dmf/mr/listener/DME2EndPointLoader.java | 123 +++ .../org/onap/dmaap/dmf/mr/metabroker/Broker.java | 92 +++ .../org/onap/dmaap/dmf/mr/metabroker/Broker1.java | 95 +++ .../org/onap/dmaap/dmf/mr/metabroker/Topic.java | 133 ++++ .../publisher/CambriaBatchingPublisher.java | 52 ++ .../dmf/mr/metrics/publisher/CambriaClient.java | 89 +++ .../dmf/mr/metrics/publisher/CambriaConsumer.java | 52 ++ .../dmf/mr/metrics/publisher/CambriaPublisher.java | 101 +++ .../metrics/publisher/CambriaPublisherUtility.java | 146 ++++ .../publisher/DMaaPCambriaClientFactory.java | 420 ++++++++++ .../metrics/publisher/impl/CambriaBaseClient.java | 100 +++ .../dmaap/dmf/mr/metrics/publisher/impl/Clock.java | 74 ++ .../publisher/impl/DMaaPCambriaConsumerImpl.java | 169 ++++ .../impl/DMaaPCambriaSimplerBatchPublisher.java | 422 ++++++++++ .../dmaap/dmf/mr/resources/CambriaEventSet.java | 114 +++ .../mr/resources/CambriaOutboundEventStream.java | 554 +++++++++++++ .../streamReaders/CambriaJsonStreamReader.java | 169 ++++ .../streamReaders/CambriaRawStreamReader.java | 141 ++++ .../streamReaders/CambriaStreamReader.java | 229 ++++++ .../streamReaders/CambriaTextStreamReader.java | 140 ++++ .../dmf/mr/security/DMaaPAAFAuthenticator.java | 39 + .../dmf/mr/security/DMaaPAAFAuthenticatorImpl.java | 80 ++ .../dmaap/dmf/mr/security/DMaaPAuthenticator.java | 61 ++ .../dmf/mr/security/DMaaPAuthenticatorImpl.java | 133 ++++ .../mr/security/impl/DMaaPMechIdAuthenticator.java | 87 +++ .../impl/DMaaPOriginalUebAuthenticator.java | 293 +++++++ .../onap/dmaap/dmf/mr/service/AdminService.java | 83 ++ .../onap/dmaap/dmf/mr/service/ApiKeysService.java | 105 +++ .../onap/dmaap/dmf/mr/service/EventsService.java | 75 ++ .../org/onap/dmaap/dmf/mr/service/MMService.java | 66 ++ .../onap/dmaap/dmf/mr/service/MetricsService.java | 54 ++ .../onap/dmaap/dmf/mr/service/TopicService.java | 176 +++++ .../dmaap/dmf/mr/service/TransactionService.java | 61 ++ .../org/onap/dmaap/dmf/mr/service/UIService.java | 92 +++ .../dmf/mr/service/impl/AdminServiceImpl.java | 190 +++++ .../dmf/mr/service/impl/ApiKeysServiceImpl.java | 320 ++++++++ .../dmf/mr/service/impl/BaseTransactionDbImpl.java | 153 ++++ .../dmf/mr/service/impl/EventsServiceImpl.java | 867 +++++++++++++++++++++ .../dmaap/dmf/mr/service/impl/MMServiceImpl.java | 600 ++++++++++++++ .../dmf/mr/service/impl/MetricsServiceImpl.java | 115 +++ .../dmf/mr/service/impl/TopicServiceImpl.java | 694 +++++++++++++++++ .../mr/service/impl/TransactionServiceImpl.java | 100 +++ .../dmaap/dmf/mr/service/impl/UIServiceImpl.java | 210 +++++ .../mr/transaction/DMaaPTransactionFactory.java | 44 ++ .../dmf/mr/transaction/DMaaPTransactionObj.java | 83 ++ .../dmf/mr/transaction/DMaaPTransactionObjDB.java | 86 ++ .../dmaap/dmf/mr/transaction/TransactionObj.java | 202 +++++ .../onap/dmaap/dmf/mr/transaction/TrnRequest.java | 183 +++++ .../impl/DMaaPSimpleTransactionFactory.java | 62 ++ .../dmaap/dmf/mr/utils/ConfigurationReader.java | 492 ++++++++++++ .../dmaap/dmf/mr/utils/DMaaPCuratorFactory.java | 69 ++ .../dmaap/dmf/mr/utils/DMaaPResponseBuilder.java | 370 +++++++++ .../java/org/onap/dmaap/dmf/mr/utils/Emailer.java | 211 +++++ .../onap/dmaap/dmf/mr/utils/PropertyReader.java | 125 +++ .../java/org/onap/dmaap/dmf/mr/utils/Utils.java | 175 +++++ .../metrics/cambria/DMaaPMetricsSender.java | 197 +++++ .../onap/dmaap/mr/filter/ContentLengthFilter.java | 134 ++++ .../org/onap/dmaap/mr/filter/DefaultLength.java | 37 + .../java/com/att/mr/test/dmaap/ApiKeyBean.java | 72 -- .../com/att/mr/test/dmaap/DMaapPubSubTest.java | 138 ---- .../java/com/att/mr/test/dmaap/DMaapTopicTest.java | 267 ------- .../java/com/att/mr/test/dmaap/DmaapAdminTest.java | 60 -- .../com/att/mr/test/dmaap/DmaapApiKeyTest.java | 162 ---- .../com/att/mr/test/dmaap/DmaapMetricsTest.java | 77 -- .../java/com/att/mr/test/dmaap/JUnitTestSuite.java | 44 -- .../com/att/mr/test/dmaap/LoadPropertyFile.java | 48 -- .../java/com/att/mr/test/dmaap/TestRunner.java | 42 - src/test/java/com/att/mr/test/dmaap/TopicBean.java | 72 -- src/test/java/com/att/mr/test/dme2/ApiKeyBean.java | 72 -- .../java/com/att/mr/test/dme2/DME2AdminTest.java | 149 ---- .../java/com/att/mr/test/dme2/DME2ApiKeyTest.java | 229 ------ .../att/mr/test/dme2/DME2ConsumerFilterTest.java | 97 --- .../com/att/mr/test/dme2/DME2ConsumerTest.java | 95 --- .../java/com/att/mr/test/dme2/DME2MetricsTest.java | 133 ---- .../com/att/mr/test/dme2/DME2ProducerTest.java | 101 --- .../java/com/att/mr/test/dme2/DME2TopicTest.java | 546 ------------- .../java/com/att/mr/test/dme2/JUnitTestSuite.java | 44 -- .../com/att/mr/test/dme2/LoadPropertyFile.java | 69 -- src/test/java/com/att/mr/test/dme2/TestRunner.java | 42 - .../java/com/att/mr/test/dme2/TopicBeanDME2.java | 94 --- .../metrics/cambria/DMaaPMetricsSenderTest.java | 131 ---- .../apiServer/metrics/cambria/JUnitTestSuite.java | 41 - .../nsa/apiServer/metrics/cambria/TestRunner.java | 41 - .../att/nsa/cambria/CambriaApiExceptionTest.java | 74 -- .../com/att/nsa/cambria/CambriaApiTestCase.java | 51 -- .../att/nsa/cambria/CambriaApiVersionInfoTest.java | 56 -- .../att/nsa/cambria/CambriaRateLimiterTest.java | 78 -- .../java/com/att/nsa/cambria/JUnitTestSuite.java | 42 - src/test/java/com/att/nsa/cambria/TestRunner.java | 41 - .../backends/kafka/CuratorFrameworkImpl.java | 278 ------- .../nsa/cambria/backends/kafka/JUnitTestSuite.java | 42 - .../backends/kafka/KafkaConsumerCacheTest.java | 256 ------ .../cambria/backends/kafka/KafkaPublisherTest.java | 153 ---- .../nsa/cambria/backends/kafka/MetricsSetImpl.java | 123 --- .../att/nsa/cambria/backends/kafka/TestRunner.java | 41 - .../cambria/backends/memory/JUnitTestSuite.java | 43 - .../backends/memory/MemoryConsumerFactoryTest.java | 83 -- .../backends/memory/MemoryMetaBrokerTest.java | 92 --- .../backends/memory/MemoryQueuePublisherTest.java | 102 --- .../cambria/backends/memory/MemoryQueueTest.java | 95 --- .../cambria/backends/memory/MessageLoggerTest.java | 104 --- .../nsa/cambria/backends/memory/TestRunner.java | 41 - .../com/att/nsa/cambria/beans/ApiKeyBeanTest.java | 58 -- .../com/att/nsa/cambria/beans/ApiKeyBeanTest2.java | 58 -- .../com/att/nsa/cambria/beans/ApiKeyBeanTest3.java | 58 -- .../com/att/nsa/cambria/beans/ApiKeyBeanTest4.java | 58 -- .../com/att/nsa/cambria/beans/ApiKeyBeanTest5.java | 58 -- .../com/att/nsa/cambria/beans/ApiKeyBeanTest6.java | 58 -- .../nsa/cambria/beans/DMaaPCambriaLimiterTest.java | 83 -- .../att/nsa/cambria/beans/DMaaPContextTest.java | 53 -- .../att/nsa/cambria/beans/DMaaPContextTest2.java | 56 -- .../att/nsa/cambria/beans/DMaaPContextTest3.java | 57 -- .../att/nsa/cambria/beans/DMaaPContextTest4.java | 60 -- .../att/nsa/cambria/beans/DMaaPContextTest5.java | 57 -- .../att/nsa/cambria/beans/DMaaPContextTest6.java | 57 -- .../cambria/beans/DMaaPKafkaMetaBrokerTest.java | 252 ------ .../com/att/nsa/cambria/beans/JUnitTestSuite.java | 49 -- .../com/att/nsa/cambria/beans/LogDetailsTest.java | 70 -- .../att/nsa/cambria/beans/LogDetailsTest10.java | 56 -- .../att/nsa/cambria/beans/LogDetailsTest11.java | 56 -- .../att/nsa/cambria/beans/LogDetailsTest12.java | 56 -- .../att/nsa/cambria/beans/LogDetailsTest13.java | 56 -- .../att/nsa/cambria/beans/LogDetailsTest14.java | 56 -- .../att/nsa/cambria/beans/LogDetailsTest15.java | 56 -- .../att/nsa/cambria/beans/LogDetailsTest16.java | 56 -- .../att/nsa/cambria/beans/LogDetailsTest17.java | 56 -- .../att/nsa/cambria/beans/LogDetailsTest18.java | 56 -- .../com/att/nsa/cambria/beans/LogDetailsTest2.java | 56 -- .../com/att/nsa/cambria/beans/LogDetailsTest3.java | 56 -- .../com/att/nsa/cambria/beans/LogDetailsTest4.java | 56 -- .../com/att/nsa/cambria/beans/LogDetailsTest5.java | 56 -- .../com/att/nsa/cambria/beans/LogDetailsTest6.java | 56 -- .../com/att/nsa/cambria/beans/LogDetailsTest7.java | 56 -- .../com/att/nsa/cambria/beans/LogDetailsTest8.java | 56 -- .../com/att/nsa/cambria/beans/LogDetailsTest9.java | 56 -- .../java/com/att/nsa/cambria/beans/TestRunner.java | 41 - .../com/att/nsa/cambria/beans/TopicBeanTest.java | 56 -- .../com/att/nsa/cambria/beans/TopicBeanTest10.java | 55 -- .../com/att/nsa/cambria/beans/TopicBeanTest2.java | 55 -- .../com/att/nsa/cambria/beans/TopicBeanTest3.java | 55 -- .../com/att/nsa/cambria/beans/TopicBeanTest4.java | 55 -- .../com/att/nsa/cambria/beans/TopicBeanTest5.java | 55 -- .../com/att/nsa/cambria/beans/TopicBeanTest6.java | 55 -- .../com/att/nsa/cambria/beans/TopicBeanTest7.java | 55 -- .../com/att/nsa/cambria/beans/TopicBeanTest8.java | 55 -- .../com/att/nsa/cambria/beans/TopicBeanTest9.java | 55 -- .../cambria/embed/EmbedConfigurationReader.java | 169 ---- .../java/com/att/nsa/cambria/embed/KafkaLocal.java | 58 -- .../com/att/nsa/cambria/embed/ZooKeeperLocal.java | 59 -- .../exception/DMaaPCambriaExceptionMapperTest.java | 60 -- .../cambria/exception/DMaaPErrorMessagesTest.java | 372 --------- .../exception/DMaaPWebExceptionMapperTest.java | 60 -- .../nsa/cambria/exception/ErrorResponseTest.java | 146 ---- .../att/nsa/cambria/exception/JUnitTestSuite.java | 43 - .../com/att/nsa/cambria/exception/TestRunner.java | 41 - .../CambriaServletContextListenerTest.java | 79 -- .../cambria/listener/DME2EndPointLoaderTest.java | 78 -- .../att/nsa/cambria/listener/JUnitTestSuite.java | 43 - .../com/att/nsa/cambria/listener/TestRunner.java | 41 - .../com/att/nsa/cambria/metabroker/BrokerImpl.java | 71 -- .../att/nsa/cambria/metabroker/BrokerImplTest.java | 109 --- .../att/nsa/cambria/metabroker/JUnitTestSuite.java | 42 - .../com/att/nsa/cambria/metabroker/TestRunner.java | 41 - .../att/nsa/cambria/metabroker/TopicImplTest.java | 25 - .../att/nsa/cambria/metabroker/TopicImplem.java | 140 ---- .../nsa/cambria/metabroker/TopicImplemTest.java | 176 ----- .../publisher/CambriaPublisherUtilityTest.java | 95 --- .../publisher/DMaaPCambriaClientFactoryTest.java | 171 ---- .../cambria/metrics/publisher/JUnitTestSuite.java | 42 - .../nsa/cambria/metrics/publisher/TestRunner.java | 41 - .../publisher/impl/CambriaBaseClientTest.java | 97 --- .../cambria/metrics/publisher/impl/ClockTest.java | 84 -- .../impl/DMaaPCambriaConsumerImplTest.java | 94 --- .../DMaaPCambriaSimplerBatchPublisherTest.java | 87 --- .../metrics/publisher/impl/JUnitTestSuite.java | 43 - .../cambria/metrics/publisher/impl/TestRunner.java | 41 - .../nsa/cambria/resources/CambriaEventSetTest.java | 76 -- .../resources/CambriaOutboundEventStreamTest.java | 106 --- .../att/nsa/cambria/resources/JUnitTestSuite.java | 42 - .../com/att/nsa/cambria/resources/TestRunner.java | 41 - .../streamReaders/CambriaJsonStreamReaderTest.java | 72 -- .../streamReaders/CambriaRawStreamReaderTest.java | 72 -- .../streamReaders/CambriaStreamReaderTest.java | 72 -- .../streamReaders/CambriaTextStreamReaderTest.java | 71 -- .../resources/streamReaders/JUnitTestSuite.java | 43 - .../resources/streamReaders/TestRunner.java | 41 - .../security/DMaaPAAFAuthenticatorImplTest.java | 83 -- .../security/DMaaPAuthenticatorImplTest.java | 129 --- .../att/nsa/cambria/security/JUnitTestSuite.java | 43 - .../com/att/nsa/cambria/security/TestRunner.java | 41 - .../impl/DMaaPMechIdAuthenticatorTest.java | 107 --- .../impl/DMaaPOriginalUebAuthenticatorTest.java | 117 --- .../nsa/cambria/security/impl/JUnitTestSuite.java | 43 - .../att/nsa/cambria/security/impl/TestRunner.java | 41 - .../service/impl/AdminServiceImplemTest.java | 182 ----- .../service/impl/ApiKeysServiceImplTest.java | 331 -------- .../service/impl/BaseTransactionDbImplTest.java | 158 ---- .../service/impl/EventsServiceImplTest.java | 312 -------- .../nsa/cambria/service/impl/JUnitTestSuite.java | 43 - .../cambria/service/impl/MMServiceImplTest.java | 382 --------- .../att/nsa/cambria/service/impl/MessageTest.java | 64 -- .../service/impl/MetricsServiceImplTest.java | 95 --- .../service/impl/ShowConsumerCacheTest.java | 149 ---- .../att/nsa/cambria/service/impl/TestRunner.java | 41 - .../cambria/service/impl/TopicServiceImplTest.java | 766 ------------------ .../service/impl/TransactionServiceImplTest.java | 97 --- .../cambria/service/impl/UIServiceImplTest.java | 302 ------- .../nsa/cambria/transaction/JUnitTestSuite.java | 42 - .../att/nsa/cambria/transaction/TestRunner.java | 41 - .../cambria/transaction/TransactionObjTest.java | 175 ----- .../nsa/cambria/transaction/TrnRequestTest.java | 187 ----- .../impl/DMaaPSimpleTransactionFactoryTest.java | 67 -- .../cambria/transaction/impl/JUnitTestSuite.java | 42 - .../nsa/cambria/transaction/impl/TestRunner.java | 41 - .../nsa/cambria/utils/ConfigurationReaderTest.java | 56 -- .../nsa/cambria/utils/DMaaPCuratorFactoryTest.java | 70 -- .../cambria/utils/DMaaPResponseBuilderTest.java | 141 ---- .../java/com/att/nsa/cambria/utils/UtilsTest.java | 125 --- .../att/nsa/filter/ContentLengthFilterTest.java | 88 --- .../java/com/att/nsa/filter/DefaultLengthTest.java | 66 -- .../java/com/att/nsa/filter/JUnitTestSuite.java | 42 - src/test/java/com/att/nsa/filter/TestRunner.java | 41 - .../att/sa/cambria/testClient/SimpleExample.java | 335 -------- .../metrics/cambria/DMaaPMetricsSenderTest.java | 131 ++++ .../apiServer/metrics/cambria/JUnitTestSuite.java | 41 + .../mr/apiServer/metrics/cambria/TestRunner.java | 41 + .../dmaap/mr/cambria/CambriaApiExceptionTest.java | 74 ++ .../onap/dmaap/mr/cambria/CambriaApiTestCase.java | 51 ++ .../mr/cambria/CambriaApiVersionInfoTest.java | 56 ++ .../dmaap/mr/cambria/CambriaRateLimiterTest.java | 78 ++ .../org/onap/dmaap/mr/cambria/JUnitTestSuite.java | 42 + .../java/org/onap/dmaap/mr/cambria/TestRunner.java | 41 + .../backends/kafka/CuratorFrameworkImpl.java | 278 +++++++ .../mr/cambria/backends/kafka/JUnitTestSuite.java | 42 + .../backends/kafka/KafkaConsumerCacheTest.java | 256 ++++++ .../cambria/backends/kafka/KafkaPublisherTest.java | 153 ++++ .../mr/cambria/backends/kafka/MetricsSetImpl.java | 123 +++ .../mr/cambria/backends/kafka/TestRunner.java | 41 + .../mr/cambria/backends/memory/JUnitTestSuite.java | 43 + .../backends/memory/MemoryConsumerFactoryTest.java | 83 ++ .../backends/memory/MemoryMetaBrokerTest.java | 92 +++ .../backends/memory/MemoryQueuePublisherTest.java | 102 +++ .../cambria/backends/memory/MemoryQueueTest.java | 95 +++ .../cambria/backends/memory/MessageLoggerTest.java | 104 +++ .../mr/cambria/backends/memory/TestRunner.java | 41 + .../dmaap/mr/cambria/beans/ApiKeyBeanTest.java | 58 ++ .../dmaap/mr/cambria/beans/ApiKeyBeanTest2.java | 58 ++ .../dmaap/mr/cambria/beans/ApiKeyBeanTest3.java | 58 ++ .../dmaap/mr/cambria/beans/ApiKeyBeanTest4.java | 58 ++ .../dmaap/mr/cambria/beans/ApiKeyBeanTest5.java | 58 ++ .../dmaap/mr/cambria/beans/ApiKeyBeanTest6.java | 58 ++ .../mr/cambria/beans/DMaaPCambriaLimiterTest.java | 83 ++ .../dmaap/mr/cambria/beans/DMaaPContextTest.java | 53 ++ .../dmaap/mr/cambria/beans/DMaaPContextTest2.java | 56 ++ .../dmaap/mr/cambria/beans/DMaaPContextTest3.java | 57 ++ .../dmaap/mr/cambria/beans/DMaaPContextTest4.java | 60 ++ .../dmaap/mr/cambria/beans/DMaaPContextTest5.java | 57 ++ .../dmaap/mr/cambria/beans/DMaaPContextTest6.java | 57 ++ .../mr/cambria/beans/DMaaPKafkaMetaBrokerTest.java | 252 ++++++ .../dmaap/mr/cambria/beans/JUnitTestSuite.java | 49 ++ .../dmaap/mr/cambria/beans/LogDetailsTest.java | 70 ++ .../dmaap/mr/cambria/beans/LogDetailsTest10.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest11.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest12.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest13.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest14.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest15.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest16.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest17.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest18.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest2.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest3.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest4.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest5.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest6.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest7.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest8.java | 56 ++ .../dmaap/mr/cambria/beans/LogDetailsTest9.java | 56 ++ .../onap/dmaap/mr/cambria/beans/TestRunner.java | 41 + .../onap/dmaap/mr/cambria/beans/TopicBeanTest.java | 56 ++ .../dmaap/mr/cambria/beans/TopicBeanTest10.java | 55 ++ .../dmaap/mr/cambria/beans/TopicBeanTest2.java | 55 ++ .../dmaap/mr/cambria/beans/TopicBeanTest3.java | 55 ++ .../dmaap/mr/cambria/beans/TopicBeanTest4.java | 55 ++ .../dmaap/mr/cambria/beans/TopicBeanTest5.java | 55 ++ .../dmaap/mr/cambria/beans/TopicBeanTest6.java | 55 ++ .../dmaap/mr/cambria/beans/TopicBeanTest7.java | 55 ++ .../dmaap/mr/cambria/beans/TopicBeanTest8.java | 55 ++ .../dmaap/mr/cambria/beans/TopicBeanTest9.java | 55 ++ .../mr/cambria/embed/EmbedConfigurationReader.java | 169 ++++ .../onap/dmaap/mr/cambria/embed/KafkaLocal.java | 58 ++ .../dmaap/mr/cambria/embed/ZooKeeperLocal.java | 59 ++ .../exception/DMaaPCambriaExceptionMapperTest.java | 60 ++ .../cambria/exception/DMaaPErrorMessagesTest.java | 372 +++++++++ .../exception/DMaaPWebExceptionMapperTest.java | 60 ++ .../mr/cambria/exception/ErrorResponseTest.java | 146 ++++ .../dmaap/mr/cambria/exception/JUnitTestSuite.java | 43 + .../dmaap/mr/cambria/exception/TestRunner.java | 41 + .../CambriaServletContextListenerTest.java | 79 ++ .../cambria/listener/DME2EndPointLoaderTest.java | 78 ++ .../dmaap/mr/cambria/listener/JUnitTestSuite.java | 43 + .../onap/dmaap/mr/cambria/listener/TestRunner.java | 41 + .../dmaap/mr/cambria/metabroker/BrokerImpl.java | 71 ++ .../mr/cambria/metabroker/BrokerImplTest.java | 109 +++ .../mr/cambria/metabroker/JUnitTestSuite.java | 42 + .../dmaap/mr/cambria/metabroker/TestRunner.java | 41 + .../dmaap/mr/cambria/metabroker/TopicImplTest.java | 25 + .../dmaap/mr/cambria/metabroker/TopicImplem.java | 140 ++++ .../mr/cambria/metabroker/TopicImplemTest.java | 176 +++++ .../publisher/CambriaPublisherUtilityTest.java | 95 +++ .../publisher/DMaaPCambriaClientFactoryTest.java | 171 ++++ .../cambria/metrics/publisher/JUnitTestSuite.java | 42 + .../mr/cambria/metrics/publisher/TestRunner.java | 41 + .../publisher/impl/CambriaBaseClientTest.java | 97 +++ .../cambria/metrics/publisher/impl/ClockTest.java | 84 ++ .../impl/DMaaPCambriaConsumerImplTest.java | 94 +++ .../DMaaPCambriaSimplerBatchPublisherTest.java | 87 +++ .../metrics/publisher/impl/JUnitTestSuite.java | 43 + .../cambria/metrics/publisher/impl/TestRunner.java | 41 + .../mr/cambria/resources/CambriaEventSetTest.java | 76 ++ .../resources/CambriaOutboundEventStreamTest.java | 106 +++ .../dmaap/mr/cambria/resources/JUnitTestSuite.java | 42 + .../dmaap/mr/cambria/resources/TestRunner.java | 41 + .../streamReaders/CambriaJsonStreamReaderTest.java | 72 ++ .../streamReaders/CambriaRawStreamReaderTest.java | 72 ++ .../streamReaders/CambriaStreamReaderTest.java | 72 ++ .../streamReaders/CambriaTextStreamReaderTest.java | 71 ++ .../resources/streamReaders/JUnitTestSuite.java | 43 + .../resources/streamReaders/TestRunner.java | 41 + .../security/DMaaPAAFAuthenticatorImplTest.java | 83 ++ .../security/DMaaPAuthenticatorImplTest.java | 129 +++ .../dmaap/mr/cambria/security/JUnitTestSuite.java | 43 + .../onap/dmaap/mr/cambria/security/TestRunner.java | 41 + .../impl/DMaaPMechIdAuthenticatorTest.java | 107 +++ .../impl/DMaaPOriginalUebAuthenticatorTest.java | 117 +++ .../mr/cambria/security/impl/JUnitTestSuite.java | 43 + .../dmaap/mr/cambria/security/impl/TestRunner.java | 41 + .../service/impl/AdminServiceImplemTest.java | 182 +++++ .../service/impl/ApiKeysServiceImplTest.java | 331 ++++++++ .../service/impl/BaseTransactionDbImplTest.java | 158 ++++ .../service/impl/EventsServiceImplTest.java | 312 ++++++++ .../mr/cambria/service/impl/JUnitTestSuite.java | 43 + .../mr/cambria/service/impl/MMServiceImplTest.java | 382 +++++++++ .../dmaap/mr/cambria/service/impl/MessageTest.java | 64 ++ .../service/impl/MetricsServiceImplTest.java | 95 +++ .../service/impl/ShowConsumerCacheTest.java | 149 ++++ .../dmaap/mr/cambria/service/impl/TestRunner.java | 41 + .../cambria/service/impl/TopicServiceImplTest.java | 766 ++++++++++++++++++ .../service/impl/TransactionServiceImplTest.java | 97 +++ .../mr/cambria/service/impl/UIServiceImplTest.java | 302 +++++++ .../mr/cambria/transaction/JUnitTestSuite.java | 42 + .../dmaap/mr/cambria/transaction/TestRunner.java | 41 + .../mr/cambria/transaction/TransactionObjTest.java | 175 +++++ .../mr/cambria/transaction/TrnRequestTest.java | 187 +++++ .../impl/DMaaPSimpleTransactionFactoryTest.java | 67 ++ .../cambria/transaction/impl/JUnitTestSuite.java | 42 + .../mr/cambria/transaction/impl/TestRunner.java | 41 + .../mr/cambria/utils/ConfigurationReaderTest.java | 56 ++ .../mr/cambria/utils/DMaaPCuratorFactoryTest.java | 70 ++ .../mr/cambria/utils/DMaaPResponseBuilderTest.java | 141 ++++ .../org/onap/dmaap/mr/cambria/utils/UtilsTest.java | 125 +++ .../dmaap/mr/filter/ContentLengthFilterTest.java | 88 +++ .../onap/dmaap/mr/filter/DefaultLengthTest.java | 65 ++ .../org/onap/dmaap/mr/filter/JUnitTestSuite.java | 42 + .../java/org/onap/dmaap/mr/filter/TestRunner.java | 41 + .../org/onap/dmaap/mr/test/dmaap/ApiKeyBean.java | 72 ++ .../onap/dmaap/mr/test/dmaap/DMaapPubSubTest.java | 138 ++++ .../onap/dmaap/mr/test/dmaap/DMaapTopicTest.java | 267 +++++++ .../onap/dmaap/mr/test/dmaap/DmaapAdminTest.java | 60 ++ .../onap/dmaap/mr/test/dmaap/DmaapApiKeyTest.java | 162 ++++ .../onap/dmaap/mr/test/dmaap/DmaapMetricsTest.java | 77 ++ .../onap/dmaap/mr/test/dmaap/JUnitTestSuite.java | 44 ++ .../onap/dmaap/mr/test/dmaap/LoadPropertyFile.java | 48 ++ .../org/onap/dmaap/mr/test/dmaap/TestRunner.java | 42 + .../org/onap/dmaap/mr/test/dmaap/TopicBean.java | 72 ++ .../org/onap/dmaap/mr/test/dme2/ApiKeyBean.java | 72 ++ .../org/onap/dmaap/mr/test/dme2/DME2AdminTest.java | 148 ++++ .../onap/dmaap/mr/test/dme2/DME2ApiKeyTest.java | 229 ++++++ .../dmaap/mr/test/dme2/DME2ConsumerFilterTest.java | 96 +++ .../onap/dmaap/mr/test/dme2/DME2ConsumerTest.java | 94 +++ .../onap/dmaap/mr/test/dme2/DME2MetricsTest.java | 132 ++++ .../onap/dmaap/mr/test/dme2/DME2ProducerTest.java | 100 +++ .../org/onap/dmaap/mr/test/dme2/DME2TopicTest.java | 545 +++++++++++++ .../onap/dmaap/mr/test/dme2/JUnitTestSuite.java | 44 ++ .../onap/dmaap/mr/test/dme2/LoadPropertyFile.java | 69 ++ .../org/onap/dmaap/mr/test/dme2/TestRunner.java | 42 + .../org/onap/dmaap/mr/test/dme2/TopicBeanDME2.java | 94 +++ 521 files changed, 32558 insertions(+), 32900 deletions(-) delete mode 100644 src/main/java/com/att/dmf/mr/CambriaApiException.java delete mode 100644 src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/Consumer.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/MetricsSet.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/Publisher.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt delete mode 100644 src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java delete mode 100644 src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/DMaaPContext.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/LogDetails.java delete mode 100644 src/main/java/com/att/dmf/mr/beans/TopicBean.java delete mode 100644 src/main/java/com/att/dmf/mr/constants/CambriaConstants.java delete mode 100644 src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java delete mode 100644 src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java delete mode 100644 src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java delete mode 100644 src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java delete mode 100644 src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java delete mode 100644 src/main/java/com/att/dmf/mr/exception/ErrorResponse.java delete mode 100644 src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java delete mode 100644 src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java delete mode 100644 src/main/java/com/att/dmf/mr/metabroker/Broker.java delete mode 100644 src/main/java/com/att/dmf/mr/metabroker/Broker1.java delete mode 100644 src/main/java/com/att/dmf/mr/metabroker/Topic.java delete mode 100644 src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java delete mode 100644 src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java delete mode 100644 src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java delete mode 100644 src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java delete mode 100644 src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java delete mode 100644 src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java delete mode 100644 src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java delete mode 100644 src/main/java/com/att/dmf/mr/metrics/publisher/impl/Clock.java delete mode 100644 src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java delete mode 100644 src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java delete mode 100644 src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java delete mode 100644 src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java delete mode 100644 src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java delete mode 100644 src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java delete mode 100644 src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java delete mode 100644 src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java delete mode 100644 src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java delete mode 100644 src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java delete mode 100644 src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java delete mode 100644 src/main/java/com/att/dmf/mr/service/AdminService.java delete mode 100644 src/main/java/com/att/dmf/mr/service/ApiKeysService.java delete mode 100644 src/main/java/com/att/dmf/mr/service/EventsService.java delete mode 100644 src/main/java/com/att/dmf/mr/service/MMService.java delete mode 100644 src/main/java/com/att/dmf/mr/service/MetricsService.java delete mode 100644 src/main/java/com/att/dmf/mr/service/TopicService.java delete mode 100644 src/main/java/com/att/dmf/mr/service/TransactionService.java delete mode 100644 src/main/java/com/att/dmf/mr/service/UIService.java delete mode 100644 src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java delete mode 100644 src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java delete mode 100644 src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java delete mode 100644 src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java delete mode 100644 src/main/java/com/att/dmf/mr/transaction/TransactionObj.java delete mode 100644 src/main/java/com/att/dmf/mr/transaction/TrnRequest.java delete mode 100644 src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java delete mode 100644 src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java delete mode 100644 src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java delete mode 100644 src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java delete mode 100644 src/main/java/com/att/dmf/mr/utils/Emailer.java delete mode 100644 src/main/java/com/att/dmf/mr/utils/PropertyReader.java delete mode 100644 src/main/java/com/att/dmf/mr/utils/Utils.java delete mode 100644 src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java delete mode 100644 src/main/java/com/att/mr/filter/ContentLengthFilter.java delete mode 100644 src/main/java/com/att/mr/filter/DefaultLength.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/CambriaApiException.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/CambriaApiVersionInfo.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/Consumer.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/ConsumerFactory.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/MetricsSet.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/Publisher.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011Consumer.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumer.txt create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumerCache.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaPublisher.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LiveLockAvoidance.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LockInstructionWatcher.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryConsumerFactory.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryMetaBroker.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueue.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueuePublisher.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MessageLogger.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/ApiKeyBean.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPCambriaLimiter.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPContext.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaConsumerFactory.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaMetaBroker.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPMetricsSet.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPNsaApiDb.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkClient.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkConfigDb.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/LogDetails.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/beans/TopicBean.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPAccessDeniedException.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPCambriaExceptionMapper.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPErrorMessages.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPResponseCode.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPWebExceptionMapper.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/exception/ErrorResponse.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/listener/CambriaServletContextListener.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/listener/DME2EndPointLoader.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker1.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metabroker/Topic.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaClient.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaConsumer.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisher.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisherUtility.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/Clock.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaEventSet.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaOutboundEventStream.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaStreamReader.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticator.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticator.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticatorImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/AdminService.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/ApiKeysService.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/EventsService.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/MMService.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/MetricsService.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/TopicService.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/TransactionService.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/UIService.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/impl/AdminServiceImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/impl/ApiKeysServiceImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/impl/BaseTransactionDbImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/impl/MMServiceImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/impl/MetricsServiceImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/impl/TransactionServiceImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/service/impl/UIServiceImpl.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionFactory.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObj.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObjDB.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/transaction/TransactionObj.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/transaction/TrnRequest.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/utils/ConfigurationReader.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPCuratorFactory.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPResponseBuilder.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/utils/Emailer.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/utils/PropertyReader.java create mode 100644 src/main/java/org/onap/dmaap/dmf/mr/utils/Utils.java create mode 100644 src/main/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java create mode 100644 src/main/java/org/onap/dmaap/mr/filter/ContentLengthFilter.java create mode 100644 src/main/java/org/onap/dmaap/mr/filter/DefaultLength.java delete mode 100644 src/test/java/com/att/mr/test/dmaap/ApiKeyBean.java delete mode 100644 src/test/java/com/att/mr/test/dmaap/DMaapPubSubTest.java delete mode 100644 src/test/java/com/att/mr/test/dmaap/DMaapTopicTest.java delete mode 100644 src/test/java/com/att/mr/test/dmaap/DmaapAdminTest.java delete mode 100644 src/test/java/com/att/mr/test/dmaap/DmaapApiKeyTest.java delete mode 100644 src/test/java/com/att/mr/test/dmaap/DmaapMetricsTest.java delete mode 100644 src/test/java/com/att/mr/test/dmaap/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/mr/test/dmaap/LoadPropertyFile.java delete mode 100644 src/test/java/com/att/mr/test/dmaap/TestRunner.java delete mode 100644 src/test/java/com/att/mr/test/dmaap/TopicBean.java delete mode 100644 src/test/java/com/att/mr/test/dme2/ApiKeyBean.java delete mode 100644 src/test/java/com/att/mr/test/dme2/DME2AdminTest.java delete mode 100644 src/test/java/com/att/mr/test/dme2/DME2ApiKeyTest.java delete mode 100644 src/test/java/com/att/mr/test/dme2/DME2ConsumerFilterTest.java delete mode 100644 src/test/java/com/att/mr/test/dme2/DME2ConsumerTest.java delete mode 100644 src/test/java/com/att/mr/test/dme2/DME2MetricsTest.java delete mode 100644 src/test/java/com/att/mr/test/dme2/DME2ProducerTest.java delete mode 100644 src/test/java/com/att/mr/test/dme2/DME2TopicTest.java delete mode 100644 src/test/java/com/att/mr/test/dme2/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/mr/test/dme2/LoadPropertyFile.java delete mode 100644 src/test/java/com/att/mr/test/dme2/TestRunner.java delete mode 100644 src/test/java/com/att/mr/test/dme2/TopicBeanDME2.java delete mode 100644 src/test/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java delete mode 100644 src/test/java/com/att/nsa/apiServer/metrics/cambria/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/apiServer/metrics/cambria/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/CambriaApiExceptionTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/CambriaApiTestCase.java delete mode 100644 src/test/java/com/att/nsa/cambria/CambriaApiVersionInfoTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/CambriaRateLimiterTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/kafka/CuratorFrameworkImpl.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/kafka/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCacheTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/kafka/KafkaPublisherTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/kafka/MetricsSetImpl.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/kafka/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/memory/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactoryTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/memory/MemoryMetaBrokerTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisherTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/memory/MemoryQueueTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/memory/MessageLoggerTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/backends/memory/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest2.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest3.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest4.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest5.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest6.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiterTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest2.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest3.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest4.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest5.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest6.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBrokerTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest10.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest11.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest12.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest13.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest14.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest15.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest16.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest17.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest18.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest2.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest3.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest4.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest5.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest6.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest7.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest8.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/LogDetailsTest9.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TopicBeanTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TopicBeanTest10.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TopicBeanTest2.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TopicBeanTest3.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TopicBeanTest4.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TopicBeanTest5.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TopicBeanTest6.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TopicBeanTest7.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TopicBeanTest8.java delete mode 100644 src/test/java/com/att/nsa/cambria/beans/TopicBeanTest9.java delete mode 100644 src/test/java/com/att/nsa/cambria/embed/EmbedConfigurationReader.java delete mode 100644 src/test/java/com/att/nsa/cambria/embed/KafkaLocal.java delete mode 100644 src/test/java/com/att/nsa/cambria/embed/ZooKeeperLocal.java delete mode 100644 src/test/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapperTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/exception/DMaaPErrorMessagesTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapperTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/exception/ErrorResponseTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/exception/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/exception/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/listener/CambriaServletContextListenerTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/listener/DME2EndPointLoaderTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/listener/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/listener/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/metabroker/BrokerImpl.java delete mode 100644 src/test/java/com/att/nsa/cambria/metabroker/BrokerImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/metabroker/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/metabroker/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/metabroker/TopicImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/metabroker/TopicImplem.java delete mode 100644 src/test/java/com/att/nsa/cambria/metabroker/TopicImplemTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtilityTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/metrics/publisher/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/metrics/publisher/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClientTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/metrics/publisher/impl/ClockTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/metrics/publisher/impl/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/metrics/publisher/impl/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/resources/CambriaEventSetTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/resources/CambriaOutboundEventStreamTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/resources/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/resources/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReaderTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/resources/streamReaders/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/resources/streamReaders/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/security/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/security/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/security/impl/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/security/impl/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/AdminServiceImplemTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/EventsServiceImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/MMServiceImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/MessageTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/MetricsServiceImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/ShowConsumerCacheTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/TopicServiceImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/TransactionServiceImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/service/impl/UIServiceImplTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/transaction/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/transaction/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/transaction/TransactionObjTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/transaction/TrnRequestTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/transaction/impl/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/cambria/transaction/impl/TestRunner.java delete mode 100644 src/test/java/com/att/nsa/cambria/utils/ConfigurationReaderTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/utils/DMaaPCuratorFactoryTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/utils/DMaaPResponseBuilderTest.java delete mode 100644 src/test/java/com/att/nsa/cambria/utils/UtilsTest.java delete mode 100644 src/test/java/com/att/nsa/filter/ContentLengthFilterTest.java delete mode 100644 src/test/java/com/att/nsa/filter/DefaultLengthTest.java delete mode 100644 src/test/java/com/att/nsa/filter/JUnitTestSuite.java delete mode 100644 src/test/java/com/att/nsa/filter/TestRunner.java delete mode 100644 src/test/java/com/att/sa/cambria/testClient/SimpleExample.java create mode 100644 src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/CambriaApiExceptionTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/CambriaApiTestCase.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/CambriaApiVersionInfoTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/CambriaRateLimiterTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/CuratorFrameworkImpl.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaConsumerCacheTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/MetricsSetImpl.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/memory/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryConsumerFactoryTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryMetaBrokerTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueuePublisherTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueueTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MessageLoggerTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/backends/memory/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest2.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest3.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest4.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest5.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest6.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPCambriaLimiterTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest2.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest3.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest4.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest5.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest6.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPKafkaMetaBrokerTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest10.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest11.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest12.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest13.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest14.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest15.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest16.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest17.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest18.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest2.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest3.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest4.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest5.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest6.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest7.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest8.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest9.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest10.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest2.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest3.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest4.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest5.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest6.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest7.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest8.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest9.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPCambriaExceptionMapperTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPErrorMessagesTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPWebExceptionMapperTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/exception/ErrorResponseTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/exception/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/exception/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/listener/CambriaServletContextListenerTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/listener/DME2EndPointLoaderTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/listener/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/listener/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImpl.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metabroker/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metabroker/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplem.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplemTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/CambriaPublisherUtilityTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/CambriaBaseClientTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/ClockTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaEventSetTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaOutboundEventStreamTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/resources/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/resources/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaStreamReaderTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAAFAuthenticatorImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAuthenticatorImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/security/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/security/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/security/impl/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/security/impl/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/AdminServiceImplemTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/ApiKeysServiceImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/BaseTransactionDbImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/EventsServiceImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/MMServiceImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/MessageTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/MetricsServiceImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/ShowConsumerCacheTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/TopicServiceImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/TransactionServiceImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/service/impl/UIServiceImplTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/transaction/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/transaction/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/transaction/TransactionObjTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/transaction/TrnRequestTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/utils/ConfigurationReaderTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPCuratorFactoryTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPResponseBuilderTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/filter/ContentLengthFilterTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/filter/DefaultLengthTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/filter/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/filter/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dmaap/ApiKeyBean.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapPubSubTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapTopicTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapAdminTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapApiKeyTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapMetricsTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dmaap/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dmaap/LoadPropertyFile.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dmaap/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dmaap/TopicBean.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/ApiKeyBean.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/DME2AdminTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/DME2ApiKeyTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerFilterTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/DME2MetricsTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/DME2ProducerTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/DME2TopicTest.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/JUnitTestSuite.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/LoadPropertyFile.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/TestRunner.java create mode 100644 src/test/java/org/onap/dmaap/mr/test/dme2/TopicBeanDME2.java diff --git a/src/main/java/com/att/dmf/mr/CambriaApiException.java b/src/main/java/com/att/dmf/mr/CambriaApiException.java deleted file mode 100644 index cdf95ab..0000000 --- a/src/main/java/com/att/dmf/mr/CambriaApiException.java +++ /dev/null @@ -1,80 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr; - -import org.json.JSONObject; - -import com.att.dmf.mr.exception.ErrorResponse; -import com.att.nsa.apiServer.NsaAppException; - -public class CambriaApiException extends NsaAppException -{ - /* - * defined long type constant serialVersionUID - */ - private static final long serialVersionUID = 1L; - - private transient ErrorResponse errRes; - /** - * Implements constructor CambriaApiException - * @param jsonObject - * - */ - public CambriaApiException ( JSONObject jsonObject ) - { - super ( jsonObject ); - } - - /** - * Implements constructor CambriaApiException - * @param status - * @param msg - */ - public CambriaApiException ( int status, String msg ) - { - super ( status, msg ); - } - - /** - * Implements constructor CambriaApiException - * @param status - * @param jsonObject - */ - public CambriaApiException ( int status, JSONObject jsonObject ) - { - super ( status, jsonObject ); - } - - public CambriaApiException (ErrorResponse errRes) - { - super(errRes.getHttpStatusCode(),errRes.getErrorMessage()); - this.errRes = errRes; - } - - public ErrorResponse getErrRes() { - return errRes; - } - - public void setErrRes(ErrorResponse errRes) { - this.errRes = errRes; - } -} diff --git a/src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java b/src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java deleted file mode 100644 index f0c57b5..0000000 --- a/src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java +++ /dev/null @@ -1,88 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Properties; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -/** - * CambriaApiVersionInfo will provide the version of cambria code - * - * @author peter - * - */ -public class CambriaApiVersionInfo { - - /** - * 3 constants are defined:- - * PROPS,VERSION and LOG - */ - - private static final Properties PROPS = new Properties(); - private static final String VERSION; - - - private static final EELFLogger LOG = EELFManager.getInstance().getLogger(CambriaApiVersionInfo.class); - - /** - * private constructor created with no argument - * to avoid default constructor - */ - private CambriaApiVersionInfo() - { - - } - - /** - * returns version of String type - */ - public static String getVersion() { - return VERSION; - } - - /** - * - * defines static initialization method - * It initializes VERSION Constant - * it handles exception in try catch block - * and throws IOException - * - */ - - static { - String use = null; - try { - final InputStream is = CambriaApiVersionInfo.class - .getResourceAsStream("/cambriaApiVersion.properties"); - if (is != null) { - PROPS.load(is); - use = PROPS.getProperty("cambriaApiVersion", null); - } - } catch (IOException e) { - LOG.error("Failed due to IO EXception:"+e); - } - VERSION = use; - } -} diff --git a/src/main/java/com/att/dmf/mr/backends/Consumer.java b/src/main/java/com/att/dmf/mr/backends/Consumer.java deleted file mode 100644 index f4a9a80..0000000 --- a/src/main/java/com/att/dmf/mr/backends/Consumer.java +++ /dev/null @@ -1,105 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends; - - -/** - * A consumer interface. Consumers pull the next message from a given topic. - * @author peter - */ -public interface Consumer -{ - /** - * A message interface provide the offset and message - * @author nilanjana.maity - * - */ - public interface Message - { - /** - * returning the offset of that particular message - * @return long - */ - long getOffset (); - /** - * returning the message - * @return message - */ - String getMessage (); - } - - /** - * Get this consumer's name - * @return name - */ - String getName (); - - /** - * Get creation time in ms - * @return - */ - long getCreateTimeMs (); - - /** - * Get last access time in ms - * @return - */ - long getLastAccessMs (); - - /** - * Get the next message from this source. This method must not block. - * @return the next message, or null if none are waiting - */ - Message nextMessage (); - - /** - * Get the next message from this source. This method must not block. - * @param atOffset start with the next message at or after atOffset. -1 means next from last request - * @return the next message, or null if none are waiting - */ - - - - /** - * Close/clean up this consumer - * @return - */ - boolean close(); - - /** - * Commit the offset of the last consumed message - * - */ - void commitOffsets(); - - /** - * Get the offset this consumer is currently at - * @return offset - */ - long getOffset(); - - void setOffset(long offset); - - - - -} diff --git a/src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java b/src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java deleted file mode 100644 index 55e0645..0000000 --- a/src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java +++ /dev/null @@ -1,118 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends; - -import java.util.Collection; -import java.util.HashMap; - -import com.att.dmf.mr.CambriaApiException; - -/** - * This is the factory class to instantiate the consumer - * - * @author nilanjana.maity - * - */ - -public interface ConsumerFactory { - public static final String kSetting_EnableCache = "cambria.consumer.cache.enabled"; - public static boolean kDefault_IsCacheEnabled = true; - - /** - * User defined exception for Unavailable Exception - * - * @author nilanjana.maity - * - */ - public class UnavailableException extends Exception { - /** - * Unavailable Exception with message - * - * @param msg - */ - public UnavailableException(String msg) { - super(msg); - } - - /** - * Unavailable Exception with the throwable object - * - * @param t - */ - public UnavailableException(Throwable t) { - super(t); - } - - /** - * Unavailable Exception with the message and cause - * - * @param msg - * @param cause - */ - public UnavailableException(String msg, Throwable cause) { - super(msg, cause); - } - - private static final long serialVersionUID = 1L; - } - - /** - * For admin use, drop all cached consumers. - */ - public void dropCache(); - - /** - * Get or create a consumer for the given set of info (topic, group, id) - * - * @param topic - * @param consumerGroupId - * @param clientId - * @param timeoutMs - * @return - * @throws UnavailableException - */ - - - /** - * For factories that employ a caching mechanism, this allows callers to - * explicitly destory a consumer that resides in the factory's cache. - * - * @param topic - * @param consumerGroupId - * @param clientId - */ - public void destroyConsumer(String topic, String consumerGroupId, - String clientId); - - /** - * For admin/debug, we provide access to the consumers - * - * @return a collection of consumers - */ - public Collection getConsumers(); - - public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException; - public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException; - - - -} diff --git a/src/main/java/com/att/dmf/mr/backends/MetricsSet.java b/src/main/java/com/att/dmf/mr/backends/MetricsSet.java deleted file mode 100644 index de665b8..0000000 --- a/src/main/java/com/att/dmf/mr/backends/MetricsSet.java +++ /dev/null @@ -1,71 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends; - -import com.att.nsa.metrics.CdmMetricsRegistry; -/** - * This interface will help to generate metrics - * @author nilanjana.maity - * - */ -public interface MetricsSet extends CdmMetricsRegistry{ - - /** - * This method will setup cambria sender code - */ - public void setupCambriaSender (); - /** - * This method will define on route complete - * @param name - * @param durationMs - */ - public void onRouteComplete ( String name, long durationMs ); - /** - * This method will help the kafka publisher while publishing the messages - * @param amount - */ - public void publishTick ( int amount ); - /** - * This method will help the kafka consumer while consuming the messages - * @param amount - */ - public void consumeTick ( int amount ); - /** - * This method will call if the kafka consumer cache missed - */ - public void onKafkaConsumerCacheMiss (); - /** - * This method will call if the kafka consumer cache will be hit while publishing/consuming the messages - */ - public void onKafkaConsumerCacheHit (); - /** - * This method will call if the kafka consumer cache claimed - */ - public void onKafkaConsumerClaimed (); - /** - * This method will call if Kafka consumer is timed out - */ - public void onKafkaConsumerTimeout (); - - - -} diff --git a/src/main/java/com/att/dmf/mr/backends/Publisher.java b/src/main/java/com/att/dmf/mr/backends/Publisher.java deleted file mode 100644 index 25022e2..0000000 --- a/src/main/java/com/att/dmf/mr/backends/Publisher.java +++ /dev/null @@ -1,99 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.apache.kafka.clients.producer.ProducerRecord; - -import com.att.dmf.mr.beans.LogDetails; - -/** - * A publisher interface. Publishers receive messages and post them to a topic. - * @author peter - */ -public interface Publisher -{ - /** - * A message interface. The message has a key and a body. - * @author peter - */ - public interface message - { - /** - * Get the key for this message. The key is used to partition messages - * into "sub-streams" that have guaranteed order. The key can be null, - * which means the message can be processed without any concern for order. - * - * @return a key, possibly null - */ - String getKey(); - - /** - * Get the message body. - * @return a message body - */ - String getMessage(); - /** - * set the logging params for transaction enabled logging - * @param logDetails - */ - void setLogDetails (LogDetails logDetails); - /** - * Get the log details for transaction enabled logging - * @return LogDetails - */ - LogDetails getLogDetails (); - - /** - * boolean transactionEnabled - * @return true/false - */ - boolean isTransactionEnabled(); - /** - * Set the transaction enabled flag from prop file or topic based implementation - * @param transactionEnabled - */ - void setTransactionEnabled(boolean transactionEnabled); - } - - /** - * Send a single message to a topic. Equivalent to sendMessages with a list of size 1. - * @param topic - * @param msg - * @throws IOException - */ - public void sendMessage ( String topic, message msg ) throws IOException; - - /** - * Send messages to a topic. - * @param topic - * @param msgs - * @throws IOException - */ - public void sendMessages ( String topic, List msgs ) throws IOException; - - public void sendBatchMessageNew(String topic ,ArrayList> kms) throws IOException; - public void sendMessagesNew( String topic, List msgs ) throws IOException; -} diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java b/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java deleted file mode 100644 index 9be9073..0000000 --- a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java +++ /dev/null @@ -1,397 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.kafka; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.FutureTask; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.RunnableFuture; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.KafkaException; - -import com.att.dmf.mr.backends.Consumer; -import com.att.dmf.mr.constants.CambriaConstants; - - - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * A consumer instance that's created per-request. These are stateless so that - * clients can connect to this service as a proxy. - * - * @author Ram - * - */ -public class Kafka011Consumer implements Consumer { - private enum State { - OPENED, CLOSED - } - - - /** - * KafkaConsumer() is constructor. It has following 4 parameters:- - * - * @param topic - * @param group - * @param id - * @param cc - * - */ - - public Kafka011Consumer(String topic, String group, String id, KafkaConsumer cc, - KafkaLiveLockAvoider2 klla) throws Exception { - fTopic = topic; - fGroup = group; - fId = id; - fCreateTimeMs = System.currentTimeMillis(); - fLastTouch = fCreateTimeMs; - fPendingMsgs = new LinkedBlockingQueue>(); - fLogTag = fGroup + "(" + fId + ")/" + fTopic; - offset = 0; - state = Kafka011Consumer.State.OPENED; - kConsumer = cc; - fKafkaLiveLockAvoider = klla; - synchronized (kConsumer) { - kConsumer.subscribe(Arrays.asList(topic)); - } - } - - private Consumer.Message makeMessage(final ConsumerRecord msg) { - return new Consumer.Message() { - @Override - public long getOffset() { - offset = msg.offset(); - return offset; - } - - @Override - public String getMessage() { - return new String(msg.value()); - } - }; - } - - @Override - public synchronized Consumer.Message nextMessage() { - - try { - if (fPendingMsgs.size() > 0) { - return makeMessage(fPendingMsgs.take()); - } - } catch (InterruptedException x) { - log.warn("After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage() + ")", - x); - } - - Callable run = new Callable() { - @Override - public Boolean call() throws Exception { - try { - ConsumerRecords records; - synchronized (kConsumer) { - records = kConsumer.poll(500); - } - for (ConsumerRecord record : records) { - - fPendingMsgs.offer(record); - } - - } catch (KafkaException x) { - log.debug(fLogTag + ": KafkaException " + x.getMessage()); - - } catch (java.lang.IllegalStateException | java.lang.IllegalArgumentException x) { - log.error(fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. " - + x.getMessage()); - - } - - - return true; - } - }; - - @SuppressWarnings({ "rawtypes", "unchecked" }) - RunnableFuture future = new FutureTask(run); - ExecutorService service = Executors.newSingleThreadExecutor(); - service.execute(future); - try { - future.get(5, TimeUnit.SECONDS); // wait 1 - // second - } catch (TimeoutException ex) { - // timed out. Try to stop the code if possible. - String apiNodeId = null; - try { - apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port; - } catch (UnknownHostException e1) { - // TODO Auto-generated catch block - log.error("unable to get the localhost address"); - } - - try { - if (fKafkaLiveLockAvoider != null) - fKafkaLiveLockAvoider.unlockConsumerGroup(apiNodeId, fTopic + "::" + fGroup); - } catch (Exception e) { - log.error("unlockConsumerGroup(" + apiNodeId + "," + fTopic + "::" + fGroup); - } - - forcePollOnConsumer(); - future.cancel(true); - } catch (Exception ex) { - // timed out. Try to stop the code if possible. - future.cancel(true); - } - service.shutdown(); - - return null; - - } - - /** - * getName() method returns string type value. returns 3 parameters in - * string:- fTopic,fGroup,fId - * - * @Override - */ - public String getName() { - return fTopic + " : " + fGroup + " : " + fId; - } - - /** - * getCreateTimeMs() method returns long type value. returns fCreateTimeMs - * variable value - * - * @Override - * - */ - public long getCreateTimeMs() { - return fCreateTimeMs; - } - - public org.apache.kafka.clients.consumer.KafkaConsumer getConsumer() { - return kConsumer; - } - - /** - * getLastAccessMs() method returns long type value. returns fLastTouch - * variable value - * - * @Override - * - */ - public long getLastAccessMs() { - return fLastTouch; - } - - /** - * getOffset() method returns long type value. returns offset variable value - * - * @Override - * - */ - public long getOffset() { - return offset; - } - - /** - * commit offsets commitOffsets() method will be called on closed of - * KafkaConsumer. - * - * @Override - * - * - * public void commitOffsets() { if (getState() == - * KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called - * on closed KafkaConsumer " + getName()); return; } - * fConnector.commitOffsets(); } - */ - - /** - * updating fLastTouch with current time in ms - */ - public void touch() { - fLastTouch = System.currentTimeMillis(); - } - - /** - * getLastTouch() method returns long type value. returns fLastTouch - * variable value - * - */ - public long getLastTouch() { - return fLastTouch; - } - - /** - * setting the kafkaConsumer state to closed - */ - - public boolean close() { - if (getState() == Kafka011Consumer.State.CLOSED) { - - log.error("close() called on closed KafkaConsumer " + getName()); - return true; - } - - - boolean retVal = kafkaConnectorshuttask(); - return retVal; - - } - - /* time out if the kafka shutdown fails for some reason */ - - private boolean kafkaConnectorshuttask() { - Callable run = new Callable() { - @Override - public Boolean call() throws Exception { - - try { - - kConsumer.close(); - - } catch (Exception e) { - log.info("@Kafka Stream shutdown erorr occurred " + getName() + " " + e); - throw new Exception("@Kafka Stream shutdown erorr occurred " + getName() + " " + e); - - } - log.info("Kafka connection closure with in 15 seconds by a Executors task"); - - return true; - } - }; - - @SuppressWarnings({ "rawtypes", "unchecked" }) - RunnableFuture future = new FutureTask(run); - ExecutorService service = Executors.newSingleThreadExecutor(); - service.execute(future); - try { - future.get(200, TimeUnit.SECONDS); // wait 1 - // second - } catch (TimeoutException ex) { - // timed out. Try to stop the code if possible. - log.info("Timeout Occured - Kafka connection closure with in 300 seconds by a Executors task"); - future.cancel(true); - setState(Kafka011Consumer.State.OPENED); - } catch (Exception ex) { - // timed out. Try to stop the code if possible. - log.error("Exception occured Occured - Kafka connection closure with in 300 seconds by a Executors task" - + ex); - future.cancel(true); - setState(Kafka011Consumer.State.OPENED); - return false; - } - service.shutdown(); - setState(Kafka011Consumer.State.CLOSED); - return true; - } - - public void forcePollOnConsumer() { - Kafka011ConsumerUtil.forcePollOnConsumer(fTopic, fGroup, fId); - - } - - /** - * getConsumerGroup() returns Consumer group - * - * @return - */ - public String getConsumerGroup() { - return fGroup; - } - - /** - * getConsumerId returns Consumer Id - * - * @return - */ - public String getConsumerId() { - return fId; - } - - /** - * getState returns kafkaconsumer state - * - * @return - */ - private Kafka011Consumer.State getState() { - return this.state; - } - - /** - * setState() sets the kafkaConsumer state - * - * @param state - */ - private void setState(Kafka011Consumer.State state) { - this.state = state; - } - - - private final String fTopic; - private final String fGroup; - private final String fId; - private final String fLogTag; - - private KafkaConsumer kConsumer; - private long fCreateTimeMs; - private long fLastTouch; - private long offset; - private Kafka011Consumer.State state; - private KafkaLiveLockAvoider2 fKafkaLiveLockAvoider; - private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011Consumer.class); - private final LinkedBlockingQueue> fPendingMsgs; - - @Override - public void commitOffsets() { - if (getState() == Kafka011Consumer.State.CLOSED) { - log.warn("commitOffsets() called on closed KafkaConsumer " + getName()); - return; - } - kConsumer.commitSync(); - - - } - - @Override - public void setOffset(long offsetval) { - offset = offsetval; - } - - - public void setConsumerCache(KafkaConsumerCache cache) { - } - - -} diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java b/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java deleted file mode 100644 index e066df5..0000000 --- a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java +++ /dev/null @@ -1,123 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.kafka; - -import java.util.ArrayList; - -import org.apache.kafka.clients.consumer.ConsumerRecords; - - - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * A consumer Util class for force polling when a rebalance issue is anticipated - * - * @author Ram - * - */ -public class Kafka011ConsumerUtil { - private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011ConsumerUtil.class); - - /** - * @param fconsumercache - * @param fTopic - * @param fGroup - * @param fId - * @return - */ - public static boolean forcePollOnConsumer(final String fTopic, final String fGroup, final String fId) { - - Thread forcepollThread = new Thread(new Runnable() { - public void run() { - try { - - ArrayList kcsList = null; - - kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(fTopic + "::" + fGroup + "::", fId); - if (null != kcsList) { - for (int counter = 0; counter < kcsList.size(); counter++) { - - Kafka011Consumer kc1 = kcsList.get(counter); - - try { - ConsumerRecords recs = kc1.getConsumer().poll(0); - log.info("soft poll on " + kc1); - } catch (java.util.ConcurrentModificationException e) { - log.error("Error occurs for " + e); - } - - } - - } - - } catch (Exception e) { - log.error("Failed and go to Exception block for " + fGroup + " " + e.getMessage()); - } - } - }); - - forcepollThread.start(); - - return false; - - } - - /** - * @param fconsumercache - * @param group - * @return - */ - public static boolean forcePollOnConsumer(final String group) { - - Thread forcepollThread = new Thread(new Runnable() { - public void run() { - try { - ArrayList kcsList = new ArrayList(); - kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(group); - - if (null != kcsList) { - - for (int counter = 0; counter < kcsList.size(); counter++) { - - Kafka011Consumer kc1 = kcsList.get(counter); - log.info("soft poll on remote nodes " + kc1); - ConsumerRecords recs = kc1.getConsumer().poll(0); - } - - } - - } catch (java.util.ConcurrentModificationException e) { - log.error("Error occurs for " + e); - } catch (Exception e) { - log.error("Failed and go to Exception block for " + group + " " + e.getMessage()); - } - } - }); - - forcepollThread.start(); - return false; - - } - -} diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt deleted file mode 100644 index dd6259f..0000000 --- a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt +++ /dev/null @@ -1,386 +0,0 @@ -package com.att.dmf.mr.backends.kafka; - -import java.util.Arrays; -import java.util.Properties; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.FutureTask; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.RunnableFuture; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.common.KafkaException; - -import com.att.dmf.mr.backends.Consumer; - -//import org.slf4j.Logger; -//import org.slf4j.LoggerFactory; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * A consumer instance that's created per-request. These are stateless so that - * clients can connect to this service as a proxy. - * - * @author peter - * - */ -public class KafkaConsumer implements Consumer { - private enum State { - OPENED, CLOSED - } - - /** - * KafkaConsumer() is constructor. It has following 4 parameters:- - * - * @param topic - * @param group - * @param id - * @param cc - * - */ - - public KafkaConsumer(String topic, String group, String id, Properties prop) throws Exception { - fTopic = topic; - fGroup = group; - fId = id; - // fConnector = cc; - - fCreateTimeMs = System.currentTimeMillis(); - fLastTouch = fCreateTimeMs; - fPendingMsgs = new LinkedBlockingQueue> (); - fLogTag = fGroup + "(" + fId + ")/" + fTopic; - offset = 0; - - state = KafkaConsumer.State.OPENED; - - // final Map topicCountMap = new HashMap(); - // topicCountMap.put(fTopic, 1); - // log.info(fLogTag +" kafka Consumer started at " - // +System.currentTimeMillis()); - // final Map>> consumerMap = - // fConnector.createMessageStreams(topicCountMap); - // final List> streams = - // consumerMap.get(fTopic); - - kConsumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(prop); - // System.out.println("I am in Consumer APP " + topic + "-- " + - // fConsumer); - kConsumer.subscribe(Arrays.asList(topic)); - log.info(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs)); - System.out.println("-----id " +id); - - - try { ConsumerRecords records = - kConsumer.poll(500); System.out.println("---" + - records.count()); - - for (ConsumerRecord record : records) { - System.out.printf("offset = %d, key = %s, value = %s", - record.offset(), record.key(), record.value()); String t = - record.value(); - - } - }catch(Exception e){ - System.out.println( e); - } - System.out.println(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs)); - kConsumer.commitSync(); - // fConsumer.close(); - - - /* - * ConsumerRecords records = fConsumer.poll(500); - * System.out.println("---" + records.count()); - * - * for (ConsumerRecord record : records) { - * System.out.printf("offset = %d, key = %s, value = %s", - * record.offset(), record.key(), record.value()); String t = - * record.value(); - * - * } - * - * - * fConsumer.commitSync(); fConsumer.close(); - */ - - // fStream = streams.iterator().next(); - } - - - - private Consumer.Message makeMessage ( final ConsumerRecord msg ) - { - return new Consumer.Message() - { - @Override - public long getOffset () - { - return msg.offset (); - } - - @Override - public String getMessage () - { - return new String ( msg.value () ); - } - }; - } - - @Override - public synchronized Consumer.Message nextMessage () - { - - try - { - if ( fPendingMsgs.size () > 0 ) - { - return makeMessage ( fPendingMsgs.take () ); - } - } - catch ( InterruptedException x ) - { - log.warn ( "After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage () + ")", x ); - } - - - try - { - boolean foundMsgs = false; - System.out.println("entering into pollingWWWWWWWWWWWWWWWWW"); - final ConsumerRecords records = kConsumer.poll ( 100 ); - System.out.println("polling doneXXXXXXXXXXXXXXXXXXXXXXXXXXX...."); - for ( ConsumerRecord record : records ) - { - foundMsgs = true; - fPendingMsgs.offer ( record ); - } - - } - catch ( KafkaException x ) - { - log.debug ( fLogTag + ": KafkaException " + x.getMessage () ); - - } - catch ( java.lang.IllegalStateException | java.lang.IllegalArgumentException x ) - { - log.error ( fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. " + x.getMessage () ); - - } - - return null; - } - - - - /** - * getName() method returns string type value. returns 3 parameters in - * string:- fTopic,fGroup,fId - * - * @Override - */ - public String getName() { - return fTopic + " : " + fGroup + " : " + fId; - } - - /** - * getCreateTimeMs() method returns long type value. returns fCreateTimeMs - * variable value - * - * @Override - * - */ - public long getCreateTimeMs() { - return fCreateTimeMs; - } - - public org.apache.kafka.clients.consumer.KafkaConsumer getConsumer() { - return kConsumer; - } - - /** - * getLastAccessMs() method returns long type value. returns fLastTouch - * variable value - * - * @Override - * - */ - public long getLastAccessMs() { - return fLastTouch; - } - - - - /** - * getOffset() method returns long type value. returns offset variable value - * - * @Override - * - */ - public long getOffset() { - return offset; - } - - /** - * commit offsets commitOffsets() method will be called on closed of - * KafkaConsumer. - * - * @Override - * - * - * public void commitOffsets() { if (getState() == - * KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called - * on closed KafkaConsumer " + getName()); return; } - * fConnector.commitOffsets(); } - */ - - /** - * updating fLastTouch with current time in ms - */ - public void touch() { - fLastTouch = System.currentTimeMillis(); - } - - /** - * getLastTouch() method returns long type value. returns fLastTouch - * variable value - * - */ - public long getLastTouch() { - return fLastTouch; - } - - /** - * setting the kafkaConsumer state to closed - */ - public synchronized boolean close() { - - if (getState() == KafkaConsumer.State.CLOSED) { - - log.warn("close() called on closed KafkaConsumer " + getName()); - return true; - } - - setState(KafkaConsumer.State.CLOSED); - // fConnector.shutdown(); - boolean retVal = kafkaConnectorshuttask(); - return retVal; - - } - - /* time out if the kafka shutdown fails for some reason */ - - private boolean kafkaConnectorshuttask() { - Callable run = new Callable() { - @Override - public Boolean call() throws Exception { - // your code to be timed - try { - System.out.println("consumer closing....." + kConsumer); - kConsumer.close(); - } catch (Exception e) { - log.info("@@@@@@Kafka Stream shutdown erorr occurred " + getName() + " " + e); - } - log.info("Kafka connection closure with in 15 seconds by a Executors task"); - return true; - } - }; - - RunnableFuture future = new FutureTask(run); - ExecutorService service = Executors.newSingleThreadExecutor(); - service.execute(future); - Boolean result = null; - try { - result = (Boolean) future.get(15, TimeUnit.SECONDS); // wait 1 - // second - } catch (TimeoutException ex) { - // timed out. Try to stop the code if possible. - log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task"); - future.cancel(true); - } catch (Exception ex) { - // timed out. Try to stop the code if possible. - log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task" + ex); - future.cancel(true); - return false; - } - service.shutdown(); - return true; - } - - /** - * getConsumerGroup() returns Consumer group - * - * @return - */ - public String getConsumerGroup() { - return fGroup; - } - - /** - * getConsumerId returns Consumer Id - * - * @return - */ - public String getConsumerId() { - return fId; - } - - /** - * getState returns kafkaconsumer state - * - * @return - */ - private KafkaConsumer.State getState() { - return this.state; - } - - /** - * setState() sets the kafkaConsumer state - * - * @param state - */ - private void setState(KafkaConsumer.State state) { - this.state = state; - } - - // private ConsumerConnector fConnector; - private final String fTopic; - private final String fGroup; - private final String fId; - private final String fLogTag; - // private final KafkaStream fStream; - private final org.apache.kafka.clients.consumer.KafkaConsumer kConsumer; - private long fCreateTimeMs; - private long fLastTouch; - private long offset; - private KafkaConsumer.State state; - private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class); - private final LinkedBlockingQueue> fPendingMsgs; - // private static final Logger log = - // LoggerFactory.getLogger(KafkaConsumer.class); - - @Override - public void commitOffsets() { - if (getState() == KafkaConsumer.State.CLOSED) { - log.warn("commitOffsets() called on closed KafkaConsumer " + getName()); - return; - } - kConsumer.commitSync(); - // fConsumer.close(); - - } - - - - @Override - public void setOffset(long offsetval) { - // TODO Auto-generated method stub - offset = offsetval; - } -} diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java deleted file mode 100644 index 83c08ec..0000000 --- a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java +++ /dev/null @@ -1,742 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.kafka; - -import java.io.IOException; -import java.net.InetAddress; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Enumeration; -import java.util.LinkedList; -import java.util.List; -import java.util.Map.Entry; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import javax.annotation.Resource; - -import org.I0Itec.zkclient.exception.ZkException; -import org.I0Itec.zkclient.exception.ZkInterruptedException; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.imps.CuratorFrameworkState; -import org.apache.curator.framework.recipes.cache.ChildData; -import org.apache.curator.framework.recipes.cache.PathChildrenCache; -import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; -import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; -import org.apache.curator.framework.state.ConnectionState; -import org.apache.curator.framework.state.ConnectionStateListener; -import org.apache.curator.utils.EnsurePath; -import org.apache.curator.utils.ZKPaths; -import org.apache.http.annotation.NotThreadSafe; -import org.apache.zookeeper.KeeperException; -import org.apache.zookeeper.KeeperException.NoNodeException; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.context.annotation.ComponentScan; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.backends.Consumer; -import com.att.dmf.mr.backends.MetricsSet; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.exception.DMaaPErrorMessages; -import com.att.dmf.mr.utils.ConfigurationReader; - - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.metrics.CdmTimer; - -/** - * @NotThreadSafe but expected to be used within KafkaConsumerFactory, which - * must be - * @author peter - * - */ -@NotThreadSafe -public class KafkaConsumerCache { - - private static KafkaConsumerCache kafkaconscache = null; - - public static KafkaConsumerCache getInstance() { - if (kafkaconscache == null) - kafkaconscache = new KafkaConsumerCache(); - - return kafkaconscache; - } - - private static final String kSetting_ConsumerHandoverWaitMs = "cambria.consumer.cache.handoverWaitMs"; - private static final int kDefault_ConsumerHandoverWaitMs = 500; - - private static final String kSetting_SweepEverySeconds = "cambria.consumer.cache.sweepFreqSeconds"; - private static final String kSetting_TouchEveryMs = "cambria.consumer.cache.touchFreqMs"; - - private static final String kSetting_ZkBasePath = "cambria.consumer.cache.zkBasePath"; - private static final String kDefault_ZkBasePath = CambriaConstants.kDefault_ZkRoot + "/consumerCache"; - - // kafka defaults to timing out a client after 6 seconds of inactivity, but - // it heartbeats even when the client isn't fetching. Here, we don't - // want to prematurely rebalance the consumer group. Assuming clients are - // hitting - // the server at least every 30 seconds, timing out after 2 minutes should - // be okay. - // FIXME: consider allowing the client to specify its expected call rate? - private static final long kDefault_MustTouchEveryMs = 1000L*60*2; - - // check for expirations pretty regularly - private static final long kDefault_SweepEverySeconds = 15; - - private enum Status { - NOT_STARTED, CONNECTED, DISCONNECTED, SUSPENDED - } - - - - - @Autowired - private DMaaPErrorMessages errorMessages; - - - /** - * User defined exception class for kafka consumer cache - * - * @author nilanjana.maity - * - */ - public class KafkaConsumerCacheException extends Exception { - /** - * To throw the exception - * - * @param t - */ - KafkaConsumerCacheException(Throwable t) { - super(t); - } - - /** - * - * @param s - */ - public KafkaConsumerCacheException(String s) { - super(s); - } - - private static final long serialVersionUID = 1L; - } - - /** - * Creates a KafkaConsumerCache object. Before it is used, you must call - * startCache() - * - * @param apiId - * @param s - * @param metrics - */ - public KafkaConsumerCache() { - - String strkSetting_ZkBasePath = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - kSetting_ZkBasePath); - if (null == strkSetting_ZkBasePath) - strkSetting_ZkBasePath = kDefault_ZkBasePath; - fBaseZkPath = strkSetting_ZkBasePath; - - fConsumers = new ConcurrentHashMap<>(); - fSweepScheduler = Executors.newScheduledThreadPool(1); - - curatorConsumerCache = null; - - status = Status.NOT_STARTED; - // Watcher for consumer rebalancing across nodes. Kafka011 rebalancing - // work around - - listener = new ConnectionStateListener() { - public void stateChanged(CuratorFramework client, ConnectionState newState) { - if (newState == ConnectionState.LOST) { - - log.info("ZooKeeper connection expired"); - handleConnectionLoss(); - } else if (newState == ConnectionState.READ_ONLY) { - log.warn("ZooKeeper connection set to read only mode."); - } else if (newState == ConnectionState.RECONNECTED) { - log.info("ZooKeeper connection re-established"); - handleReconnection(); - } else if (newState == ConnectionState.SUSPENDED) { - log.warn("ZooKeeper connection has been suspended."); - handleConnectionSuspended(); - } - } - }; - } - - /** - * Start the cache service. This must be called before any get/put - * operations. - * - * @param mode - * DMAAP or cambria - * @param curator - * @throws IOException - * @throws KafkaConsumerCacheException - */ - public void startCache(String mode, CuratorFramework curator) throws KafkaConsumerCacheException { - - if (fApiId == null) { - throw new IllegalArgumentException("API Node ID must be specified."); - } - - try { - - if (mode != null && mode.equals(CambriaConstants.DMAAP)) { - curator = getCuratorFramework(curator); - } - curator.getConnectionStateListenable().addListener(listener); - setStatus(Status.CONNECTED); - curatorConsumerCache = new PathChildrenCache(curator, fBaseZkPath, true); - curatorConsumerCache.start(); - curatorConsumerCache.getListenable().addListener(new PathChildrenCacheListener() { - public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception { - switch (event.getType()) { - case CHILD_ADDED: { - try { - final String apiId = new String(event.getData().getData()); - final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath()); - - log.info(apiId + " started consumer " + consumer); - } catch (Exception ex) { - log.info("#Error Occured during Adding child" + ex); - } - break; - } - case CHILD_UPDATED: { - final String apiId = new String(event.getData().getData()); - final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath()); - - if (fConsumers.containsKey(consumer)) { - log.info(apiId + " claimed consumer " + consumer + " from " + fApiId - + " but wont hand over"); - // Commented so that it dont give the connection - // until the active node is running for this client - // id. - dropClaimedConsumer(consumer); - } - - break; - } - case CHILD_REMOVED: { - final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath()); - - if (fConsumers.containsKey(consumer)) { - log.info("Someone wanted consumer " + consumer - + " gone; but not removing it from the cache"); - dropConsumer(consumer, false); - } - - break; - } - - default: - break; - } - } - }); - - // initialize the ZK path - EnsurePath ensurePath = new EnsurePath(fBaseZkPath); - ensurePath.ensure(curator.getZookeeperClient()); - - - - long freq = kDefault_SweepEverySeconds; - String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - kSetting_SweepEverySeconds); - if (null != strkSetting_SweepEverySeconds) { - freq = Long.parseLong(strkSetting_SweepEverySeconds); - } - - fSweepScheduler.scheduleAtFixedRate(new sweeper(), freq, freq, TimeUnit.SECONDS); - log.info("KafkaConsumerCache started"); - log.info("sweeping cached clients every " + freq + " seconds"); - } catch (ZkException e) { - log.error("@@@@@@ ZK Exception occured for " + e); - throw new KafkaConsumerCacheException(e); - } catch (Exception e) { - log.error("@@@@@@ Exception occured for " + e); - throw new KafkaConsumerCacheException(e); - } - } - - /** - * Getting the curator oject to start the zookeeper connection estabished - * - * @param curator - * @return curator object - */ - public static CuratorFramework getCuratorFramework(CuratorFramework curator) { - if (curator.getState() == CuratorFrameworkState.LATENT) { - curator.start(); - - try { - curator.blockUntilConnected(); - } catch (InterruptedException e) { - log.error("error while setting curator framework :",e); - Thread.currentThread().interrupt(); - } - } - - return curator; - } - - /** - * Stop the cache service. - */ - public void stopCache() { - setStatus(Status.DISCONNECTED); - - final CuratorFramework curator = ConfigurationReader.getCurator(); - - if (curator != null) { - try { - curator.getConnectionStateListenable().removeListener(listener); - curatorConsumerCache.close(); - log.info("Curator client closed"); - } catch (ZkInterruptedException e) { - log.warn("Curator client close interrupted: " + e.getMessage()); - } catch (IOException e) { - log.warn("Error while closing curator PathChildrenCache for KafkaConsumerCache" + e.getMessage()); - } - - curatorConsumerCache = null; - } - - if (fSweepScheduler != null) { - fSweepScheduler.shutdownNow(); - log.info("cache sweeper stopped"); - } - - if (fConsumers != null) { - fConsumers.clear(); - fConsumers = null; - } - - setStatus(Status.NOT_STARTED); - - log.info("Consumer cache service stopped"); - } - - /** - * Get a cached consumer by topic, group, and id, if it exists (and remains - * valid) In addition, this method waits for all other consumer caches in - * the cluster to release their ownership and delete their version of this - * consumer. - * - * @param topic - * @param consumerGroupId - * @param clientId - * @return a consumer, or null - */ - public Kafka011Consumer getConsumerFor(String topic, String consumerGroupId, String clientId) - throws KafkaConsumerCacheException { - if (getStatus() != KafkaConsumerCache.Status.CONNECTED) - throw new KafkaConsumerCacheException("The cache service is unavailable."); - - final String consumerKey = makeConsumerKey(topic, consumerGroupId, clientId); - final Kafka011Consumer kc = fConsumers.get(consumerKey); - - if (kc != null) { - log.debug("Consumer cache hit for [" + consumerKey + "], last was at " + kc.getLastTouch()); - kc.touch(); - fMetrics.onKafkaConsumerCacheHit(); - } else { - log.debug("Consumer cache miss for [" + consumerKey + "]"); - fMetrics.onKafkaConsumerCacheMiss(); - } - - return kc; - } - - /** - * Get a cached consumer by topic, group, and id, if it exists (and remains - * valid) In addition, this method waits for all other consumer caches in - * the cluster to release their ownership and delete their version of this - * consumer. - * - * @param topic - * @param consumerGroupId - * @param clientId - * @return a consumer, or null - */ - public ArrayList getConsumerListForCG(String topicgroup, String clientId) - throws KafkaConsumerCacheException { - if (getStatus() != KafkaConsumerCache.Status.CONNECTED) - throw new KafkaConsumerCacheException("The cache service is unavailable."); - ArrayList kcl = new ArrayList<>(); - - - Enumeration strEnum = fConsumers.keys(); - String consumerLocalKey = null; - while (strEnum.hasMoreElements()) { - consumerLocalKey = strEnum.nextElement(); - - if (consumerLocalKey.startsWith(topicgroup) && (!consumerLocalKey.endsWith("::" + clientId))) { - - - - - kcl.add(fConsumers.get(consumerLocalKey)); - - } - } - - return kcl; - } - - public ArrayList getConsumerListForCG(String group) throws KafkaConsumerCacheException { - if (getStatus() != KafkaConsumerCache.Status.CONNECTED) - throw new KafkaConsumerCacheException("The cache service is unavailable."); - ArrayList kcl = new ArrayList<>(); - - Enumeration strEnum = fConsumers.keys(); - String consumerLocalKey = null; - while (strEnum.hasMoreElements()) { - consumerLocalKey = strEnum.nextElement(); - - if (consumerLocalKey.startsWith(group)) { - - - kcl.add(fConsumers.get(consumerLocalKey)); - - } - } - - return kcl; - } - - /** - * Put a consumer into the cache by topic, group and ID - * - * @param topic - * @param consumerGroupId - * @param consumerId - * @param consumer - * @throws KafkaConsumerCacheException - */ - public void putConsumerFor(String topic, String consumerGroupId, String consumerId, Kafka011Consumer consumer) - throws KafkaConsumerCacheException { - if (getStatus() != KafkaConsumerCache.Status.CONNECTED) - throw new KafkaConsumerCacheException("The cache service is unavailable."); - - final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId); - fConsumers.put(consumerKey, consumer); - - - - log.info("^@ Consumer Added to Cache Consumer Key" + consumerKey + " ApiId" + fApiId); - } - - public Collection getConsumers() { - return new LinkedList<>(fConsumers.values()); - } - - /** - * This method is to drop all the consumer - */ - public void dropAllConsumers() { - for (Entry entry : fConsumers.entrySet()) { - dropConsumer(entry.getKey(), true); - } - - // consumers should be empty here - if (fConsumers.size() > 0) { - log.warn("During dropAllConsumers, the consumer map is not empty."); - fConsumers.clear(); - } - } - - /** - * Drop a consumer from our cache due to a timeout - * - * @param key - */ - private void dropTimedOutConsumer(String key) { - fMetrics.onKafkaConsumerTimeout(); - - if (!fConsumers.containsKey(key)) { - log.warn("Attempted to drop a timed out consumer which was not in our cache: " + key); - return; - } - - // First, drop this consumer from our cache - boolean isdrop = dropConsumer(key, true); - if (!isdrop) { - return; - } - final CuratorFramework curator = ConfigurationReader.getCurator(); - - try { - curator.delete().guaranteed().forPath(fBaseZkPath + "/" + key); - log.info(" ^ deleted " + fBaseZkPath + "/" + key); - } catch (NoNodeException e) { - log.warn("A consumer was deleted from " + fApiId - + "'s cache, but no Cambria API node had ownership of it in ZooKeeper"); - } catch (Exception e) { - log.debug("Unexpected exception while deleting consumer: " + e.getMessage()); - log.info(" %%%%%%@# Unexpected exception while deleting consumer: " + e.getMessage()); - } - - try { - int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs; - String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - kSetting_ConsumerHandoverWaitMs); - if (strkSetting_ConsumerHandoverWaitMs != null) - consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs); - Thread.sleep(consumerHandoverWaitMs); - } catch (InterruptedException e) { - log.error("InterruptedException in dropTimedOutConsumer",e); - Thread.currentThread().interrupt(); - } - log.info("Dropped " + key + " consumer due to timeout"); - } - - /** - * Drop a consumer from our cache due to another API node claiming it as - * their own. - * - * @param key - */ - private void dropClaimedConsumer(String key) { - // if the consumer is still in our cache, it implies a claim. - if (fConsumers.containsKey(key)) { - fMetrics.onKafkaConsumerClaimed(); - log.info("Consumer [" + key + "] claimed by another node."); - } - log.info("^dropping claimed Kafka consumer " + key); - dropConsumer(key, false); - } - - /** - * Removes the consumer from the cache and closes its connection to the - * kafka broker(s). - * - * @param key - * @param dueToTimeout - */ - private boolean dropConsumer(String key, boolean dueToTimeout) { - final Kafka011Consumer kc = fConsumers.get(key); - log.info("closing Kafka consumer " + key + " object " + kc); - if (kc != null) { - - if (kc.close()) { - fConsumers.remove(key); - - } else { - return false; - } - } - return true; - } - - // private final rrNvReadable fSettings; - private MetricsSet fMetrics; - private final String fBaseZkPath; - private final ScheduledExecutorService fSweepScheduler; - private String fApiId; - - public void setfMetrics(final MetricsSet metrics) { - this.fMetrics = metrics; - } - - public void setfApiId(final String id) { - this.fApiId = id; - } - - private final ConnectionStateListener listener; - - private ConcurrentHashMap fConsumers; - private PathChildrenCache curatorConsumerCache; - - private volatile Status status; - - private void handleReconnection() { - - log.info("Reading current cache data from ZK and synchronizing local cache"); - final List cacheData = curatorConsumerCache.getCurrentData(); - // Remove all the consumers in this API nodes cache that now belong to - // other API nodes. - for (ChildData cachedConsumer : cacheData) { - final String consumerId = ZKPaths.getNodeFromPath(cachedConsumer.getPath()); - final String owningApiId = (cachedConsumer.getData() != null) ? new String(cachedConsumer.getData()) - : "undefined"; - if (!fApiId.equals(owningApiId)) { - fConsumers.remove(consumerId); // Commented to avoid removing - // the value cache hashmap but the lock still exists. - // This is not considered in kafka consumer Factory - log.info("@@@ Validating current cache data from ZK and synchronizing local cache" + owningApiId - + " removing " + consumerId); - } - } - - setStatus(Status.CONNECTED); - } - - private void handleConnectionSuspended() { - log.info("Suspending cache until ZK connection is re-established"); - - setStatus(Status.SUSPENDED); - } - - private void handleConnectionLoss() { - log.info("Clearing consumer cache (shutting down all Kafka consumers on this node)"); - - setStatus(Status.DISCONNECTED); - - closeAllCachedConsumers(); - fConsumers.clear(); - } - - private void closeAllCachedConsumers() { - for (Entry entry : fConsumers.entrySet()) { - try { - entry.getValue().close(); - } catch (Exception e) { - log.info("@@@@@@ Error occurd while closing Clearing All cache " + e); - } - } - } - - private static String makeConsumerKey(String topic, String consumerGroupId, String clientId) { - return topic + "::" + consumerGroupId + "::" + clientId; - } - - /** - * This method is to get a lock - * - * @param topic - * @param consumerGroupId - * @param consumerId - * @throws KafkaConsumerCacheException - */ - public void signalOwnership(final String topic, final String consumerGroupId, final String consumerId) - throws KafkaConsumerCacheException { - // get a lock at /:::: - final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId); - - try(final CdmTimer timer = new CdmTimer(fMetrics, "CacheSignalOwnership")) { - final String consumerPath = fBaseZkPath + "/" + consumerKey; - log.debug(fApiId + " attempting to claim ownership of consumer " + consumerKey); - final CuratorFramework curator = ConfigurationReader.getCurator(); - - try { - curator.setData().forPath(consumerPath, fApiId.getBytes()); - } catch (KeeperException.NoNodeException e) { - curator.create().creatingParentsIfNeeded().forPath(consumerPath, fApiId.getBytes()); - } - log.info(fApiId + " successfully claimed ownership of consumer " + consumerKey); - timer.end(); - } catch (Exception e) { - log.error(fApiId + " failed to claim ownership of consumer " + consumerKey); - throw new KafkaConsumerCacheException(e); - } - - log.info("Backing off to give the Kafka broker time to clean up the ZK data for this consumer"); - - try { - int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs; - String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - kSetting_ConsumerHandoverWaitMs); - if (strkSetting_ConsumerHandoverWaitMs != null) - consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs); - Thread.sleep(consumerHandoverWaitMs); - } catch (InterruptedException e) { - log.error("InterruptedException in signalOwnership",e); - Thread.currentThread().interrupt(); - } - } - - public KafkaLiveLockAvoider2 getkafkaLiveLockAvoiderObj() { - return null; - } - - public void sweep() { - final LinkedList removals = new LinkedList(); - long mustTouchEveryMs = kDefault_MustTouchEveryMs; - String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - kSetting_TouchEveryMs); - if (null != strkSetting_TouchEveryMs) { - mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs); - } - - - final long oldestAllowedTouchMs = System.currentTimeMillis() - mustTouchEveryMs; - - for (Entry e : fConsumers.entrySet()) { - final long lastTouchMs = e.getValue().getLastTouch(); - log.debug("consumer #####1" + e.getKey() + " " + lastTouchMs + " < " + oldestAllowedTouchMs); - - if (lastTouchMs < oldestAllowedTouchMs) { - log.info("consumer " + e.getKey() + " has expired"); - removals.add(e.getKey()); - } - } - - for (String key : removals) { - dropTimedOutConsumer(key); - } - } - - /** - * Creating a thread to run the sweep method - * - * @author nilanjana.maity - * - */ - private class sweeper implements Runnable { - /** - * run method - */ - public void run() { - sweep(); - } - } - - /** - * This method is to drop consumer - * - * @param topic - * @param consumerGroup - * @param clientId - */ - public void dropConsumer(String topic, String consumerGroup, String clientId) { - dropConsumer(makeConsumerKey(topic, consumerGroup, clientId), false); - } - - private Status getStatus() { - return this.status; - } - - private void setStatus(Status status) { - this.status = status; - } - - private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumerCache.class); - -} \ No newline at end of file diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java deleted file mode 100644 index f521b41..0000000 --- a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java +++ /dev/null @@ -1,159 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.kafka; - - -import java.util.List; -import java.util.concurrent.TimeUnit; - -import javax.annotation.PostConstruct; - -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.locks.InterProcessMutex; -import org.apache.zookeeper.CreateMode; -import org.apache.zookeeper.Watcher; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.stereotype.Component; - -//@ComponentScan(basePackages="com.att.dmf.mr.backends.kafka") -@Component -public class KafkaLiveLockAvoider2 { - - public static final String ZNODE_ROOT = "/live-lock-avoid"; - public static final String ZNODE_LOCKS = "/locks"; - public static final String ZNODE_UNSTICK_TASKS ="/unstick-tasks"; - - private static String locksPath = ZNODE_ROOT+ZNODE_LOCKS; - private static String tasksPath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS; - private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaLiveLockAvoider2.class.getName()); - - @Autowired - @Qualifier("curator") - private CuratorFramework curatorFramework; - - @PostConstruct - public void init() { - log.info("Welcome......................................................................................"); - try { - if (curatorFramework.checkExists().forPath(locksPath) == null) { - curatorFramework.create().creatingParentsIfNeeded().forPath(locksPath); - } - if (curatorFramework.checkExists().forPath(tasksPath) == null) { - curatorFramework.create().creatingParentsIfNeeded().forPath(tasksPath); - } - - } catch (Exception e) { - - log.error("Error during creation of permanent Znodes under /live-lock-avoid ",e); - - } - - - } - public void unlockConsumerGroup(String appId, String groupName) throws Exception { - - log.info("Signalling unlock to all conumsers of in group [{}] now, " , groupName); - - String fullLockPath = String.format("%s/%s", locksPath, groupName ); - String fullTasksPath = null; - - try { - - //Use the Curator recipe for a Mutex lock, only one process can be broadcasting unlock instructions for a group - InterProcessMutex lock = new InterProcessMutex(curatorFramework, fullLockPath); - if ( lock.acquire(100L, TimeUnit.MILLISECONDS) ) - { - try - { - List taskNodes = curatorFramework.getChildren().forPath(tasksPath); - for (String taskNodeName : taskNodes) { - if(!taskNodeName.equals(appId)) { - - fullTasksPath = String.format("%s/%s/%s", tasksPath, taskNodeName, groupName); - log.info("Writing groupName {} to path {}",groupName, fullTasksPath); - - - if(curatorFramework.checkExists().forPath(fullTasksPath) != null) { - curatorFramework.delete().forPath(fullTasksPath); - } - curatorFramework.create().withMode(CreateMode.EPHEMERAL).forPath(fullTasksPath); - } - } - - - } - finally - { - //Curator lock recipe requires a acquire() to be followed by a release() - lock.release(); - } - }else { - log.info("Could not obtain the avoider lock, another process has the avoider lock? {}", !lock.isAcquiredInThisProcess() ); - } - - - } catch (Exception e) { - log.error("Error setting up either lock ZNode {} or task ZNode {}",fullLockPath, fullTasksPath,e); - throw e; - } - - - } - - /* - * Shoud be called once per MR server instance. - * - */ - public void startNewWatcherForServer(String appId, LiveLockAvoidance avoidanceCallback) { - LockInstructionWatcher instructionWatcher = new LockInstructionWatcher(curatorFramework,avoidanceCallback,this); - assignNewProcessNode(appId, instructionWatcher); - - } - - - protected void assignNewProcessNode(String appId, Watcher processNodeWatcher ) { - - String taskHolderZnodePath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS+"/"+appId; - - - try { - - if(curatorFramework.checkExists().forPath(taskHolderZnodePath) != null) { - curatorFramework.delete().deletingChildrenIfNeeded().forPath(taskHolderZnodePath); - - } - curatorFramework.create().forPath(taskHolderZnodePath); - //setup the watcher - curatorFramework.getChildren().usingWatcher(processNodeWatcher).inBackground().forPath(taskHolderZnodePath); - log.info("Done creating task holder and watcher for APP name: {}",appId); - - } catch (Exception e) { - log.error("Could not add new processing node for name {}", appId, e); - } - - } - - -} diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java deleted file mode 100644 index 1e78b01..0000000 --- a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java +++ /dev/null @@ -1,228 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.kafka; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.LinkedList; -import java.util.List; -import java.util.Properties; - -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.Producer; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.json.JSONException; -import org.springframework.beans.factory.annotation.Qualifier; - -import com.att.dmf.mr.backends.Publisher; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.utils.Utils; -//import org.slf4j.Logger; -//import org.slf4j.LoggerFactory; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.drumlin.till.nv.rrNvReadable; - - - -/** - * Sends raw JSON objects into Kafka. - * - * Could improve space: BSON rather than JSON? - * - * @author peter - * - */ - -public class KafkaPublisher implements Publisher { - /** - * constructor initializing - * - * @param settings - * @throws rrNvReadable.missingReqdSetting - */ - public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting { - //fSettings = settings; - - final Properties props = new Properties(); - /*transferSetting(fSettings, props, "metadata.broker.list", "localhost:9092"); - transferSetting(fSettings, props, "request.required.acks", "1"); - transferSetting(fSettings, props, "message.send.max.retries", "5"); - transferSetting(fSettings, props, "retry.backoff.ms", "150"); */ - String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list"); - if(null==kafkaConnUrl){ - - kafkaConnUrl="localhost:9092"; - } - - - if(Utils.isCadiEnabled()){ - transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';"); - transferSetting( props, "security.protocol", "SASL_PLAINTEXT"); - transferSetting( props, "sasl.mechanism", "PLAIN"); - } - transferSetting( props, "bootstrap.servers",kafkaConnUrl); - - transferSetting( props, "request.required.acks", "1"); - transferSetting( props, "message.send.max.retries", "5"); - transferSetting(props, "retry.backoff.ms", "150"); - - - - props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - - - - fProducer = new KafkaProducer<>(props); - } - - /** - * Send a message with a given topic and key. - * - * @param msg - * @throws FailedToSendMessageException - * @throws JSONException - */ - @Override - public void sendMessage(String topic, message msg) throws IOException{ - final List msgs = new LinkedList(); - msgs.add(msg); - sendMessages(topic, msgs); - } - - /** - * method publishing batch messages - * This method is commented from 0.8 to 0.11 upgrade - * @param topic - * @param kms - * throws IOException - * - public void sendBatchMessage(String topic, ArrayList> kms) throws IOException { - try { - fProducer.send(kms); - - } catch (FailedToSendMessageException excp) { - log.error("Failed to send message(s) to topic [" + topic + "].", excp); - throw new FailedToSendMessageException(excp.getMessage(), excp); - } - - } */ - - - /* - * Kafka 11.0 Interface - * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList) - */ - public void sendBatchMessageNew(String topic, ArrayList > kms) throws IOException { - try { - for (ProducerRecord km : kms) { - fProducer.send(km); - } - - } catch (Exception excp) { - log.error("Failed to send message(s) to topic [" + topic + "].", excp); - throw new IOException(excp.getMessage(), excp); - } - - } - - /** - * Send a set of messages. Each must have a "key" string value. - * - * @param topic - * @param msg - * @throws FailedToSendMessageException - * @throws JSONException - * - @Override - public void sendMessages(String topic, List msgs) - throws IOException, FailedToSendMessageException { - log.info("sending " + msgs.size() + " events to [" + topic + "]"); - - final List> kms = new ArrayList>(msgs.size()); - for (message o : msgs) { - final KeyedMessage data = new KeyedMessage(topic, o.getKey(), o.toString()); - kms.add(data); - } - try { - fProducer.send(kms); - - } catch (FailedToSendMessageException excp) { - log.error("Failed to send message(s) to topic [" + topic + "].", excp); - throw new FailedToSendMessageException(excp.getMessage(), excp); - } - } */ - @Override - public void sendMessagesNew(String topic, List msgs) - throws IOException { - log.info("sending " + msgs.size() + " events to [" + topic + "]"); -try{ - final List> kms = new ArrayList<>(msgs.size()); - for (message o : msgs) { - - final ProducerRecord data = new ProducerRecord<>(topic, o.getKey(), o.toString()); - - - try { - - fProducer.send(data); - - } catch (Exception excp) { - log.error("Failed to send message(s) to topic [" + topic + "].", excp); - throw new Exception(excp.getMessage(), excp); - } - } - - }catch(Exception e){} -} - //private final rrNvReadable fSettings; - - - private Producer fProducer; - - /** - * It sets the key value pair - * @param topic - * @param msg - * @param key - * @param defVal - */ - private void transferSetting(Properties props, String key, String defVal) { - String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key); - if (null==kafka_prop) kafka_prop=defVal; - //props.put(key, settings.getString("kafka." + key, defVal)); - props.put(key, kafka_prop); - } - - //private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class); - - private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class); - - @Override - public void sendMessages(String topic, List msgs) throws IOException { - // TODO Auto-generated method stub - - } - - -} \ No newline at end of file diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java b/src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java deleted file mode 100644 index a13ecea..0000000 --- a/src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java +++ /dev/null @@ -1,45 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.kafka; - - - -/** - * Live Lock Avoidance interface. To be implemented by the main message router client - * - */ -public interface LiveLockAvoidance { - - /** - * Gets the unique id - * @return the unique id for the Message Router server instance - */ - String getAppId(); - - - /** - * Main callback to inform the local MR server instance that all consumers in a group need to soft poll - * @param groupName name of the Kafka consumer group needed a soft poll - */ - void handleRebalanceUnlock( String groupName); - -} diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java b/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java deleted file mode 100644 index cc3338b..0000000 --- a/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java +++ /dev/null @@ -1,100 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.kafka; - -import java.util.List; - -import org.apache.curator.framework.CuratorFramework; -import org.apache.zookeeper.WatchedEvent; -import org.apache.zookeeper.Watcher; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * - * LockInstructionWatcher - * A package-private class used internally by the KafkaLiveLockAvoider. - * - * This class implements the zookeeper Watcher callback and listens for changes on child nodes changing. - * Each child node is actually a Kafka group name that needs to be soft polled. Deletion of the child nodes - * after soft poll unlocking is finished. - * - * - */ -public class LockInstructionWatcher implements Watcher { - - private CuratorFramework curatorFramework; - private LiveLockAvoidance avoidanceCallback; - private KafkaLiveLockAvoider2 avoider; - - private static final EELFLogger log = EELFManager.getInstance().getLogger(LockInstructionWatcher.class.getName()); - - - public LockInstructionWatcher(CuratorFramework curatorFramework, LiveLockAvoidance avoidanceCallback, - KafkaLiveLockAvoider2 avoider) { - super(); - this.curatorFramework = curatorFramework; - this.avoidanceCallback = avoidanceCallback; - this.avoider = avoider; - } - - - @Override - public void process(WatchedEvent event) { - - switch (event.getType()) { - case NodeChildrenChanged: - - - try { - - log.info("node children changed at path: {}", event.getPath()); - - List children = curatorFramework.getChildren().forPath(event.getPath()); - - log.info("found children nodes prodcessing now"); - for (String child : children) { - String childPath = String.format("%s/%s", event.getPath(), child); - log.info("Processing child task at node {}",childPath); - avoidanceCallback.handleRebalanceUnlock( child); - log.info("Deleting child task at node {}",childPath); - curatorFramework.delete().forPath(childPath); - } - //reset the watch with the avoider - avoider.assignNewProcessNode(avoidanceCallback.getAppId(), this); - - - } catch (Exception e) { - log.error("Error manipulating ZNode data in watcher",e); - } - - break; - - default: - log.info("Listner fired on path: {}, with event: {}", event.getPath(), event.getType()); - break; - } - } - - -} diff --git a/src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java deleted file mode 100644 index 237cac8..0000000 --- a/src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java +++ /dev/null @@ -1,184 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.memory; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Consumer; -import com.att.dmf.mr.backends.ConsumerFactory; -/** - * - * @author anowarul.islam - * - */ -public class MemoryConsumerFactory implements ConsumerFactory -{ - - private final MemoryQueue fQueue; - - /** - * - * Initializing constructor - * @param q - */ - public MemoryConsumerFactory ( MemoryQueue q ) - { - fQueue = q; - } - - /** - * - * @param topic - * @param consumerGroupId - * @param clientId - * @param timeoutMs - * @return Consumer - */ - @Override - public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs, String remotehost ) - { - return new MemoryConsumer ( topic, consumerGroupId ); - } - - /** - * - * Define nested inner class - * - */ - private class MemoryConsumer implements Consumer - { - - private final String fTopic; - private final String fConsumer; - private final long fCreateMs; - private long fLastAccessMs; - - /** - * - * Initializing MemoryConsumer constructor - * @param topic - * @param consumer - * - */ - public MemoryConsumer ( String topic, String consumer ) - { - fTopic = topic; - fConsumer = consumer; - fCreateMs = System.currentTimeMillis (); - fLastAccessMs = fCreateMs; - } - - @Override - /** - * - * return consumer details - */ - public Message nextMessage () - { - return fQueue.get ( fTopic, fConsumer ); - } - - @Override - public boolean close() { - //Nothing to close/clean up. - return true; - } - /** - * - */ - public void commitOffsets() - { - // ignoring this aspect - } - /** - * get offset - */ - public long getOffset() - { - return 0; - } - - @Override - /** - * get consumer topic name - */ - public String getName () - { - return fTopic + "/" + fConsumer; - } - - @Override - public long getCreateTimeMs () - { - return fCreateMs; - } - - @Override - public long getLastAccessMs () - { - return fLastAccessMs; - } - - - - @Override - public void setOffset(long offset) { - // TODO Auto-generated method stub - - } - - - } - - @Override - public void destroyConsumer(String topic, String consumerGroupId, - String clientId) { - //No cache for memory consumers, so NOOP - } - - @Override - public void dropCache () - { - // nothing to do - there's no cache here - } - - @Override - /** - * @return ArrayList - */ - public Collection getConsumers () - { - return new ArrayList<> (); - } - - @Override - public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs, - String remotehost) throws UnavailableException, CambriaApiException { - // TODO Auto-generated method stub - return null; - } - - -} diff --git a/src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java deleted file mode 100644 index e0c80bd..0000000 --- a/src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java +++ /dev/null @@ -1,201 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.memory; - -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; - -import com.att.dmf.mr.metabroker.Broker; -import com.att.dmf.mr.metabroker.Topic; -import com.att.nsa.configs.ConfigDb; -import com.att.nsa.security.NsaAcl; -import com.att.nsa.security.NsaApiKey; - -/** - * - * @author anowarul.islam - * - */ -public class MemoryMetaBroker implements Broker { - - private final MemoryQueue fQueue; - private final HashMap fTopics; - - /** - * - * @param mq - * @param configDb - * @param settings - */ - public MemoryMetaBroker(MemoryQueue mq, ConfigDb configDb) { - - fQueue = mq; - fTopics = new HashMap<>(); - } - - @Override - public List getAllTopics() { - return new LinkedList(fTopics.values()); - } - - @Override - public Topic getTopic(String topic) { - return fTopics.get(topic); - } - - @Override - public Topic createTopic(String topic, String desc, String ownerApiId, int partitions, int replicas, - boolean transactionEnabled) throws TopicExistsException { - if (getTopic(topic) != null) { - throw new TopicExistsException(topic); - } - fQueue.createTopic(topic); - fTopics.put(topic, new MemTopic(topic, desc, ownerApiId, transactionEnabled)); - return getTopic(topic); - } - - @Override - public void deleteTopic(String topic) { - fTopics.remove(topic); - fQueue.removeTopic(topic); - } - - private static class MemTopic implements Topic { - - private final String fName; - private final String fDesc; - private final String fOwner; - private NsaAcl fReaders; - private NsaAcl fWriters; - private boolean ftransactionEnabled; - private String accessDenied = "User does not own this topic "; - - /** - * constructor initialization - * - * @param name - * @param desc - * @param owner - * @param transactionEnabled - */ - public MemTopic(String name, String desc, String owner, boolean transactionEnabled) { - fName = name; - fDesc = desc; - fOwner = owner; - ftransactionEnabled = transactionEnabled; - fReaders = null; - fWriters = null; - } - - @Override - public String getOwner() { - return fOwner; - } - - @Override - public NsaAcl getReaderAcl() { - return fReaders; - } - - @Override - public NsaAcl getWriterAcl() { - return fWriters; - } - - @Override - public void checkUserRead(NsaApiKey user) throws AccessDeniedException { - if (fReaders != null && (user == null || !fReaders.canUser(user.getKey()))) { - throw new AccessDeniedException(user == null ? "" : user.getKey()); - } - } - - @Override - public void checkUserWrite(NsaApiKey user) throws AccessDeniedException { - if (fWriters != null && (user == null || !fWriters.canUser(user.getKey()))) { - throw new AccessDeniedException(user == null ? "" : user.getKey()); - } - } - - @Override - public String getName() { - return fName; - } - - @Override - public String getDescription() { - return fDesc; - } - - @Override - public void permitWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException { - if (!fOwner.equals(asUser.getKey())) { - throw new AccessDeniedException(accessDenied + fName); - } - if (fWriters == null) { - fWriters = new NsaAcl(); - } - fWriters.add(publisherId); - } - - @Override - public void denyWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException { - if (!fOwner.equals(asUser.getKey())) { - throw new AccessDeniedException(accessDenied + fName); - } - fWriters.remove(publisherId); - } - - @Override - public void permitReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException { - if (!fOwner.equals(asUser.getKey())) { - throw new AccessDeniedException(accessDenied + fName); - } - if (fReaders == null) { - fReaders = new NsaAcl(); - } - fReaders.add(consumerId); - } - - @Override - public void denyReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException { - if (!fOwner.equals(asUser.getKey())) { - throw new AccessDeniedException(accessDenied + fName); - } - fReaders.remove(consumerId); - } - - @Override - public boolean isTransactionEnabled() { - return ftransactionEnabled; - } - - @Override - public Set getOwners() { - final TreeSet set = new TreeSet<> (); - set.add ( fOwner ); - return set; - } - } -} diff --git a/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java deleted file mode 100644 index 25cb2df..0000000 --- a/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java +++ /dev/null @@ -1,207 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.memory; - -import java.util.ArrayList; -import java.util.HashMap; - -import com.att.dmf.mr.backends.Consumer; -import com.att.dmf.mr.backends.Publisher.message; - -/** - * When broker type is memory, then this class is doing all the topic related - * operations - * - * @author anowarul.islam - * - */ -public class MemoryQueue { - // map from topic to list of msgs - private HashMap fQueue; - private HashMap> fOffsets; - - /** - * constructor storing hashMap objects in Queue and Offsets object - */ - public MemoryQueue() { - fQueue = new HashMap<>(); - fOffsets = new HashMap<>(); - } - - /** - * method used to create topic - * - * @param topic - */ - public synchronized void createTopic(String topic) { - LogBuffer q = fQueue.get(topic); - if (q == null) { - q = new LogBuffer(1024 * 1024); - fQueue.put(topic, q); - } - } - - /** - * method used to remove topic - * - * @param topic - */ - public synchronized void removeTopic(String topic) { - LogBuffer q = fQueue.get(topic); - if (q != null) { - fQueue.remove(topic); - } - } - - /** - * method to write message on topic - * - * @param topic - * @param m - */ - public synchronized void put(String topic, message m) { - LogBuffer q = fQueue.get(topic); - if (q == null) { - createTopic(topic); - q = fQueue.get(topic); - } - q.push(m.getMessage()); - } - - /** - * method to read consumer messages - * - * @param topic - * @param consumerName - * @return - */ - public synchronized Consumer.Message get(String topic, String consumerName) { - final LogBuffer q = fQueue.get(topic); - if (q == null) { - return null; - } - - HashMap offsetMap = fOffsets.get(consumerName); - if (offsetMap == null) { - offsetMap = new HashMap<>(); - fOffsets.put(consumerName, offsetMap); - } - Integer offset = offsetMap.get(topic); - if (offset == null) { - offset = 0; - } - - final msgInfo result = q.read(offset); - if (result != null && result.msg != null) { - offsetMap.put(topic, result.offset + 1); - } - return result; - } - - /** - * static inner class used to details about consumed messages - * - * @author anowarul.islam - * - */ - private static class msgInfo implements Consumer.Message { - /** - * published message which is consumed - */ - public String msg; - /** - * offset associated with message - */ - public int offset; - - /** - * get offset of messages - */ - @Override - public long getOffset() { - return offset; - } - - /** - * get consumed message - */ - @Override - public String getMessage() { - return msg; - } - } - - /** - * - * @author sneha.d.desai - * - * private LogBuffer class has synchronized push and read method - */ - private class LogBuffer { - private int fBaseOffset; - private final int fMaxSize; - private final ArrayList fList; - - /** - * constructor initializing the offset, maxsize and list - * - * @param maxSize - */ - public LogBuffer(int maxSize) { - fBaseOffset = 0; - fMaxSize = maxSize; - fList = new ArrayList<>(); - } - - /** - * pushing message - * - * @param msg - */ - public synchronized void push(String msg) { - fList.add(msg); - while (fList.size() > fMaxSize) { - fList.remove(0); - fBaseOffset++; - } - } - - /** - * reading messages - * - * @param offset - * @return - */ - public synchronized msgInfo read(int offset) { - final int actual = Math.max(0, offset - fBaseOffset); - - final msgInfo mi = new msgInfo(); - mi.msg = (actual >= fList.size()) ? null : fList.get(actual); - if (mi.msg == null) - return null; - - mi.offset = actual + fBaseOffset; - return mi; - } - - } -} diff --git a/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java deleted file mode 100644 index 2b43ed3..0000000 --- a/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java +++ /dev/null @@ -1,92 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.memory; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.apache.kafka.clients.producer.ProducerRecord; - -import com.att.dmf.mr.backends.Publisher; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; - - - -/** - * - * @author anowarul.islam - * - */ -public class MemoryQueuePublisher implements Publisher { - /** - * - * @param q - * @param b - */ - public MemoryQueuePublisher(MemoryQueue q, MemoryMetaBroker b) { - fBroker = b; - fQueue = q; - } - - - /** - * - * @param topic - * @param msg - * @throws IOException - */ - @Override - public void sendMessage(String topic, message msg) throws IOException { - if (null == fBroker.getTopic(topic)) { - try { - fBroker.createTopic(topic, topic, null, 8, 3, false); - } catch (TopicExistsException e) { - throw new RuntimeException(e); - } - } - fQueue.put(topic, msg); - } - - @Override - /** - * @param topic - * @param msgs - * @throws IOException - */ - - public void sendBatchMessageNew(String topic, ArrayList> kms) throws IOException { - - } - - public void sendMessagesNew(String topic, List msgs) throws IOException { - } - - public void sendMessages(String topic, List msgs) throws IOException { - for (message m : msgs) { - sendMessage(topic, m); - } - } - - private final MemoryMetaBroker fBroker; - private final MemoryQueue fQueue; -} diff --git a/src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java b/src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java deleted file mode 100644 index eb77dc2..0000000 --- a/src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java +++ /dev/null @@ -1,109 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.backends.memory; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.apache.kafka.clients.producer.ProducerRecord; - -import com.att.dmf.mr.backends.Publisher; - - - -/** - * class used for logging perspective - * - * @author anowarul.islam - * - */ -public class MessageLogger implements Publisher { - public MessageLogger() { - } - - public void setFile(File f) throws FileNotFoundException { - fStream = new FileOutputStream(f, true); - } - - /** - * - * @param topic - * @param msg - * @throws IOException - */ - @Override - public void sendMessage(String topic, message msg) throws IOException { - logMsg(msg); - } - - /** - * @param topic - * @param msgs - * @throws IOException - */ - @Override - public void sendMessages(String topic, List msgs) throws IOException { - for (message m : msgs) { - logMsg(m); - } - } - - /** - * @param topic - * @param kms - * @throws IOException - - @Override - public void sendBatchMessage(String topic, ArrayList> kms) throws - - IOException { - } - */ - private FileOutputStream fStream; - - /** - * - * @param msg - * @throws IOException - */ - private void logMsg(message msg) throws IOException { - String key = msg.getKey(); - if (key == null) - key = ""; - - fStream.write('['); - fStream.write(key.getBytes()); - fStream.write("] ".getBytes()); - fStream.write(msg.getMessage().getBytes()); - fStream.write('\n'); - } - public void sendBatchMessageNew(String topic, ArrayList> kms) throws IOException { - - } - - public void sendMessagesNew(String topic, List msgs) throws IOException { - } -} diff --git a/src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java b/src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java deleted file mode 100644 index 4f0108f..0000000 --- a/src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java +++ /dev/null @@ -1,88 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import java.io.Serializable; - -import javax.xml.bind.annotation.XmlRootElement; - -import com.att.nsa.drumlin.till.data.uniqueStringGenerator; -/** - * - * @author anowarul.islam - * - */ -@XmlRootElement -public class ApiKeyBean implements Serializable { - - private static final long serialVersionUID = -8219849086890567740L; - - private static final String KEY_CHARS = "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - - private String email; - private String description; - /** - * constructor - */ - public ApiKeyBean() { - super(); - } -/** - * - * @param email - * @param description - */ - public ApiKeyBean(String email, String description) { - super(); - this.email = email; - this.description = description; - } - - public String getEmail() { - return email; - } - - public void setEmail(String email) { - this.email = email; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public String getKey() { - return generateKey(16); - } - - public String getSharedSecret() { - return generateKey(24); - } - - private static String generateKey ( int length ) { - return uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length ); - } - -} diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java b/src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java deleted file mode 100644 index 8cbf64f..0000000 --- a/src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java +++ /dev/null @@ -1,288 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import java.util.Date; -import java.util.HashMap; -import java.util.concurrent.TimeUnit; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.stereotype.Component; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.exception.DMaaPResponseCode; -import com.att.dmf.mr.exception.ErrorResponse; -import com.att.dmf.mr.utils.Utils; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.drumlin.service.standards.HttpStatusCodes; -import com.att.nsa.drumlin.till.nv.rrNvReadable; -import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; -import com.att.nsa.metrics.impl.CdmRateTicker; - -/** - * class provide rate information - * - * @author anowarul.islam - * - */ -@Component -public class DMaaPCambriaLimiter { - private final HashMap fRateInfo; - private final HashMap fRateInfoCheck; - private final double fMaxEmptyPollsPerMinute; - private final double fMaxPollsPerMinute; - private final int fWindowLengthMins; - private final long fSleepMs; - private final long fSleepMs1; - private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPCambriaLimiter.class); - - /** - * constructor initializes - * - * @param settings - * @throws missingReqdSetting - * @throws invalidSettingValue - */ - @Autowired - public DMaaPCambriaLimiter(@Qualifier("propertyReader") rrNvReadable settings) { - fRateInfo = new HashMap<>(); - fRateInfoCheck = new HashMap<>(); - fMaxEmptyPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxEmptyPollsPerMinute, - CambriaConstants.kDefault_MaxEmptyPollsPerMinute); - fMaxPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxPollsPerMinute, - 30); - fWindowLengthMins = settings.getInt(CambriaConstants.kSetting_RateLimitWindowLength, - CambriaConstants.kDefault_RateLimitWindowLength); - fSleepMs = settings.getLong(CambriaConstants.kSetting_SleepMsOnRateLimit, - CambriaConstants.kDefault_SleepMsOnRateLimit); - fSleepMs1 = settings.getLong(CambriaConstants.kSetting_SleepMsRealOnRateLimit, - 5000); - - } - - /** - * Construct a rate limiter. - * - * @param maxEmptyPollsPerMinute - * Pass <= 0 to deactivate rate limiting. - * @param windowLengthMins - */ - public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, double maxPollsPerMinute,int windowLengthMins) { - this(maxEmptyPollsPerMinute,maxPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute),getSleepMsForRate(1)); - } - - /** - * Construct a rate limiter - * - * @param maxEmptyPollsPerMinute - * Pass <= 0 to deactivate rate limiting. - * @param sleepMs - * @param windowLengthMins - */ - public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute,double maxPollsPerMinute, int windowLengthMins, long sleepMs ,long sleepMS1) { - fRateInfo = new HashMap<>(); - fRateInfoCheck = new HashMap<>(); - fMaxEmptyPollsPerMinute = Math.max(0, maxEmptyPollsPerMinute); - fMaxPollsPerMinute = Math.max(0, maxPollsPerMinute); - fWindowLengthMins = windowLengthMins; - fSleepMs = Math.max(0, sleepMs); - fSleepMs1 = Math.max(0, sleepMS1); - } - - /** - * static method provide the sleep time - * - * @param ratePerMinute - * @return - */ - public static long getSleepMsForRate(double ratePerMinute) { - if (ratePerMinute <= 0.0) - return 0; - return Math.max(1000, Math.round(60 * 1000 / ratePerMinute)); - } - - /** - * Tell the rate limiter about a call to a topic/group/id. If the rate is - * too high, this call delays its return and throws an exception. - * - * @param topic - * @param consumerGroup - * @param clientId - * @throws CambriaApiException - */ - public void onCall(String topic, String consumerGroup, String clientId,String remoteHost) throws CambriaApiException { - // do nothing if rate is configured 0 or less - if (fMaxEmptyPollsPerMinute <= 0) { - return; - } - // setup rate info for this tuple - final RateInfo ri = getRateInfo(topic, consumerGroup, clientId); - final double rate = ri.onCall(); - log.info(ri.getLabel() + ": " + rate + " empty replies/minute."); - if (rate > fMaxEmptyPollsPerMinute) { - try { - log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxPollsPerMinute - + "."); - if (fSleepMs > 0) { - log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs - + " ms sleep, then responding in error."); - Thread.sleep(fSleepMs); - - } else { - log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error."); - } - } catch (InterruptedException e) { - log.error("Exception "+ e); - // ignore - } - - - ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests, - DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(), - "This client is making too many requests. Please use a long poll " - + "setting to decrease the number of requests that result in empty responses. ","",Utils.getFormattedDate(new Date()),topic,"","",consumerGroup+"/"+clientId,remoteHost); - - log.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - - - } - - /** - * - * @param topic - * @param consumerGroup - * @param clientId - * @param sentCount - */ - public void onSend(String topic, String consumerGroup, String clientId, long sentCount) { - // check for good replies - if (sentCount > 0) { - // that was a good send, reset the metric - getRateInfo(topic, consumerGroup, clientId).reset(); - } - } - - private static class RateInfo { - private final String fLabel; - private final CdmRateTicker fCallRateSinceLastMsgSend; - /** - * constructor initialzes - * - * @param label - * @param windowLengthMinutes - */ - public RateInfo(String label, int windowLengthMinutes) { - fLabel = label; - fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES, - windowLengthMinutes, TimeUnit.MINUTES); - } - - public String getLabel() { - return fLabel; - } - - /** - * CdmRateTicker is reset - */ - public void reset() { - fCallRateSinceLastMsgSend.reset(); - } - - /** - * - * @return - */ - public double onCall() { - fCallRateSinceLastMsgSend.tick(); - return fCallRateSinceLastMsgSend.getRate(); - } - } - - - - private static class RateInfoCheck { - - private final String fLabel; - private final CdmRateTicker fCallRateSinceLastMsgSend; - /** - * constructor initialzes - * - * @param label - * @param windowLengthMinutes - */ - public RateInfoCheck(String label, int windowLengthMinutes) { - fLabel = label; - fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES, - windowLengthMinutes, TimeUnit.MINUTES); - } - - public String getLabel() { - return fLabel; - } - - /** - * CdmRateTicker is reset - */ - public void reset() { - fCallRateSinceLastMsgSend.reset(); - } - - /** - * - * @return - */ - public double onCall() { - fCallRateSinceLastMsgSend.tick(); - return fCallRateSinceLastMsgSend.getRate(); - } - } - - - - - private RateInfo getRateInfo(String topic, String consumerGroup, String clientId) { - final String key = makeKey(topic, consumerGroup, clientId); - RateInfo ri = fRateInfo.get(key); - if (ri == null) { - ri = new RateInfo(key, fWindowLengthMins); - fRateInfo.put(key, ri); - } - return ri; - } - - - - - - - - private String makeKey(String topic, String group, String id) { - return topic + "::" + group + "::" + id; - } -} diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPContext.java b/src/main/java/com/att/dmf/mr/beans/DMaaPContext.java deleted file mode 100644 index a880877..0000000 --- a/src/main/java/com/att/dmf/mr/beans/DMaaPContext.java +++ /dev/null @@ -1,104 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import java.text.SimpleDateFormat; -import java.util.Date; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpSession; - -import com.att.dmf.mr.utils.ConfigurationReader; - -/** - * DMaaPContext provide and maintain all the configurations , Http request/response - * Session and consumer Request Time - * @author nilanjana.maity - * - */ -public class DMaaPContext { - - private ConfigurationReader configReader; - private HttpServletRequest request; - private HttpServletResponse response; - private HttpSession session; - private String consumerRequestTime; - static int i=0; - - public synchronized static long getBatchID() { - try{ - final long metricsSendTime = System.currentTimeMillis(); - final Date d = new Date(metricsSendTime); - final String text = new SimpleDateFormat("ddMMyyyyHHmmss").format(d); - long dt= Long.valueOf(text)+i; - i++; - return dt; - } - catch(NumberFormatException ex){ - return 0; - } - } - - public HttpServletRequest getRequest() { - return request; - } - - public void setRequest(HttpServletRequest request) { - this.request = request; - } - - public HttpServletResponse getResponse() { - return response; - } - - public void setResponse(HttpServletResponse response) { - this.response = response; - } - - public HttpSession getSession() { - this.session = request.getSession(); - return session; - } - - public void setSession(HttpSession session) { - this.session = session; - } - - public ConfigurationReader getConfigReader() { - return configReader; - } - - public void setConfigReader(ConfigurationReader configReader) { - this.configReader = configReader; - } - - public String getConsumerRequestTime() { - return consumerRequestTime; - } - - public void setConsumerRequestTime(String consumerRequestTime) { - this.consumerRequestTime = consumerRequestTime; - } - - -} diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java deleted file mode 100644 index fb0ace0..0000000 --- a/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java +++ /dev/null @@ -1,361 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.Collection; -import java.util.HashMap; -import java.util.Properties; -import java.util.concurrent.TimeUnit; - -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.locks.InterProcessMutex; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.springframework.beans.factory.annotation.Qualifier; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Consumer; -import com.att.dmf.mr.backends.ConsumerFactory; -import com.att.dmf.mr.backends.MetricsSet; -import com.att.dmf.mr.backends.kafka.Kafka011Consumer; -import com.att.dmf.mr.backends.kafka.Kafka011ConsumerUtil; -import com.att.dmf.mr.backends.kafka.KafkaConsumerCache; -import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException; -import com.att.dmf.mr.backends.kafka.KafkaLiveLockAvoider2; -import com.att.dmf.mr.backends.kafka.LiveLockAvoidance; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.Utils; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; - -/** - * @author nilanjana.maity - * - */ -public class DMaaPKafkaConsumerFactory implements ConsumerFactory { - - - private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class); - - - /** - * constructor initialization - * - * @param settings - * @param metrics - * @param curator - * @throws missingReqdSetting - * @throws KafkaConsumerCacheException - * @throws UnknownHostException - */ - - public DMaaPKafkaConsumerFactory(@Qualifier("dMaaPMetricsSet") MetricsSet metrics, - @Qualifier("curator") CuratorFramework curator, - @Qualifier("kafkalockavoid") KafkaLiveLockAvoider2 kafkaLiveLockAvoider) - throws missingReqdSetting, KafkaConsumerCacheException, UnknownHostException { - - String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - CambriaConstants.kSetting_ApiNodeIdentifier); - if (apiNodeId == null) { - - apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port; - } - - log.info("This Cambria API Node identifies itself as [" + apiNodeId + "]."); - final String mode = CambriaConstants.DMAAP; - - fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "kafka.metadata.broker.list"); - if (null == fkafkaBrokers) { - - fkafkaBrokers = "localhost:9092"; - } - - boolean kSetting_EnableCache = kDefault_IsCacheEnabled; - String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "cambria.consumer.cache.enabled"); - if (null != strkSetting_EnableCache) - kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache); - - final boolean isCacheEnabled = kSetting_EnableCache; - - - fCache = null; - if (isCacheEnabled) { - fCache = KafkaConsumerCache.getInstance(); - - } - if (fCache != null) { - fCache.setfMetrics(metrics); - fCache.setfApiId(apiNodeId); - fCache.startCache(mode, curator); - if(kafkaLiveLockAvoider!=null){ - kafkaLiveLockAvoider.startNewWatcherForServer(apiNodeId, makeAvoidanceCallback(apiNodeId)); - fkafkaLiveLockAvoider = kafkaLiveLockAvoider; - } - } - } - - /* - * getConsumerFor - * - * @see - * com.att.dmf.mr.backends.ConsumerFactory#getConsumerFor(java.lang.String, - * java.lang.String, java.lang.String, int, java.lang.String) This method is - * used by EventServiceImpl.getEvents() method to get a Kakfa consumer - * either from kafkaconsumer cache or create a new connection This also get - * the list of other consumer objects for the same consumer group and set to - * KafkaConsumer object. This list may be used during poll-rebalancing - * issue. - */ - @Override - public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs, - String remotehost) throws UnavailableException, CambriaApiException { - Kafka011Consumer kc; - - // To synchronize based on the consumer group. - - Object syncObject = synchash.get(topic + consumerGroupName); - if (null == syncObject) { - syncObject = new Object(); - synchash.put(topic + consumerGroupName, syncObject); - } - - synchronized (syncObject) { - try { - kc = (fCache != null) ? fCache.getConsumerFor(topic, consumerGroupName, consumerId) : null; // consumerId - - } catch (KafkaConsumerCacheException e) { - log.info("######@@@@### Error occured in Kafka Caching" + e + " " + topic + "::" + consumerGroupName - + "::" + consumerId); - log.error("####@@@@## Error occured in Kafka Caching" + e + " " + topic + "::" + consumerGroupName - + "::" + consumerId); - throw new UnavailableException(e); - } - - // Ideally if cache exists below flow should be skipped. If cache - // didnt - // exist, then create this first time on this node. - if (kc == null) { - - log.info("^Kafka consumer cache value " + topic + "::" + consumerGroupName + "::" + consumerId + " =>" - + kc); - - final InterProcessMutex ipLock = new InterProcessMutex(ConfigurationReader.getCurator(), - "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId); - boolean locked = false; - - try { - - locked = ipLock.acquire(30, TimeUnit.SECONDS); - if (!locked) { - - log.info("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic - + ", " + consumerGroupName + ", " + consumerId + ") from " + remotehost); - throw new UnavailableException( - "Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic - + ", " + consumerGroupName + ", " + consumerId + ") " + remotehost); - } - - // ConfigurationReader.getCurator().checkExists().forPath("S"). - - log.info("Creating Kafka consumer for group [" + consumerGroupName + "], consumer [" + consumerId - + "], on topic [" + topic + "]."); - - if (fCache != null) { - fCache.signalOwnership(topic, consumerGroupName, consumerId); - } - - final Properties props = createConsumerConfig(topic,consumerGroupName, consumerId); - long fCreateTimeMs = System.currentTimeMillis(); - KafkaConsumer cc = new KafkaConsumer<>(props); - kc = new Kafka011Consumer(topic, consumerGroupName, consumerId, cc, fkafkaLiveLockAvoider); - log.info(" kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs)); - - if (fCache != null) { - fCache.putConsumerFor(topic, consumerGroupName, consumerId, kc); // - } - - } catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) { - log.info( - "Kafka consumer couldn't connect to ZK. " + x + " " + consumerGroupName + "/" + consumerId); - throw new UnavailableException("Couldn't connect to ZK."); - } catch (KafkaConsumerCacheException e) { - log.info("Failed to cache consumer (this may have performance implications): " + e.getMessage() - + " " + consumerGroupName + "/" + consumerId); - } catch (UnavailableException u) { - log.info("Failed and in UnavailableException block " + u.getMessage() + " " + consumerGroupName - + "/" + consumerId); - throw new UnavailableException("Error while acquiring consumer factory lock " + u.getMessage(), u); - } catch (Exception e) { - log.info("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/" - + consumerId); - log.error("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/" - + consumerId); - - } finally { - if (locked) { - try { - ipLock.release(); - } catch (Exception e) { - throw new UnavailableException("Error while releasing consumer factory lock" + e, e); - } - } - } - } - } - return kc; - } - - @Override - public synchronized void destroyConsumer(String topic, String consumerGroup, String clientId) { - if (fCache != null) { - fCache.dropConsumer(topic, consumerGroup, clientId); - } - } - - @Override - public synchronized Collection getConsumers() { - return fCache.getConsumers(); - } - - @Override - public synchronized void dropCache() { - fCache.dropAllConsumers(); - } - - - private KafkaConsumerCache fCache; - private KafkaLiveLockAvoider2 fkafkaLiveLockAvoider; - private String fkafkaBrokers; - - - - private static String makeLongKey(String key, String prefix) { - return prefix + "." + key; - } - - private void transferSettingIfProvided(Properties target, String key, String prefix) { - String keyVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, makeLongKey(key, prefix)); - - - if (null != keyVal) { - - log.info("Setting [" + key + "] to " + keyVal + "."); - target.put(key, keyVal); - } - } - - /** - * Name CreateConsumerconfig - * @param topic - * @param groupId - * @param consumerId - * @return Properties - * - * This method is to create Properties required to create kafka connection - * Group name is replaced with different format groupid--topic to address same - * groupids for multiple topics. Same groupid with multiple topics - * may start frequent consumer rebalancing on all the topics . Replacing them makes it unique - */ - private Properties createConsumerConfig(String topic ,String groupId, String consumerId) { - final Properties props = new Properties(); - //fakeGroupName is added to avoid multiple consumer group for multiple topics.Donot Change this logic - //Fix for CPFMF-644 : - final String fakeGroupName = groupId + "--" + topic; - props.put("group.id", fakeGroupName); - props.put("enable.auto.commit", "false"); // 0.11 - props.put("bootstrap.servers", fkafkaBrokers); - if(Utils.isCadiEnabled()){ - props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';"); - props.put("security.protocol", "SASL_PLAINTEXT"); - props.put("sasl.mechanism", "PLAIN"); - } - props.put("client.id", consumerId); - - // additional settings: start with our defaults, then pull in configured - // overrides - populateKafkaInternalDefaultsMap(); - for (String key : KafkaConsumerKeys) { - transferSettingIfProvided(props, key, "kafka"); - } - - props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); - props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); - - return props; - } - - - private static final String KafkaConsumerKeys[] = { "bootstrap.servers", "heartbeat.interval.ms", - "auto.offset.reset", "exclude.internal.topics", "session.timeout.ms", "fetch.max.bytes", - "auto.commit.interval.ms", "connections.max.idle.ms", "fetch.min.bytes", "isolation.level", - "fetch.max.bytes", "request.timeout.ms", "fetch.max.wait.bytes", "reconnect.backoff.max.ms", - "max.partition.fetch.bytes", "reconnect.backoff.max.ms", "reconnect.backoff.ms", "retry.backoff.ms", - "max.poll.interval.ms", "max.poll.records", "receive.buffer.bytes", "metadata.max.age.ms" }; - - /** - * putting values in hashmap like consumer timeout, zookeeper time out, etc - * - * @param setting - */ - private static void populateKafkaInternalDefaultsMap() { } - - /* - * The starterIncremnt value is just to emulate calling certain consumers, - * in this test app all the consumers are local - * - */ - private LiveLockAvoidance makeAvoidanceCallback(final String appId) { - - return new LiveLockAvoidance() { - - @Override - public String getAppId() { - return appId; - } - - @Override - public void handleRebalanceUnlock(String groupName) { - log.info("FORCE A POLL NOW FOR appId: [{}] group: [{}]", getAppId(), groupName); - Kafka011ConsumerUtil.forcePollOnConsumer(groupName + "::"); - } - - }; - - } - - @SuppressWarnings("rawtypes") - @Override - public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs, - String remotehost) throws UnavailableException, CambriaApiException { - // TODO Auto-generated method stub - return null; - } - - private HashMap synchash = new HashMap(); - -} \ No newline at end of file diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java b/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java deleted file mode 100644 index acf4824..0000000 --- a/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java +++ /dev/null @@ -1,495 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; -import java.util.Properties; -import java.util.Set; -import java.util.TreeSet; -import java.util.concurrent.ExecutionException; - -import org.I0Itec.zkclient.ZkClient; -import org.I0Itec.zkclient.exception.ZkNoNodeException; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.AdminClientConfig; -import org.apache.kafka.clients.admin.CreateTopicsResult; -import org.apache.kafka.clients.admin.NewTopic; -import org.apache.kafka.common.KafkaFuture; -import org.json.JSONObject; -import org.json.JSONArray; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.stereotype.Component; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.metabroker.Broker; -import com.att.dmf.mr.metabroker.Broker1; -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.Utils; -//import org.apache.log4-j.Logger; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -import com.att.nsa.configs.ConfigDb; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.configs.ConfigPath; -import com.att.nsa.drumlin.service.standards.HttpStatusCodes; -import com.att.nsa.drumlin.till.nv.rrNvReadable; -import com.att.nsa.security.NsaAcl; -import com.att.nsa.security.NsaAclUtils; -import com.att.nsa.security.NsaApiKey; - - -/** - * class performing all topic operations - * - * @author anowarul.islam - * - */ -//@Component -public class DMaaPKafkaMetaBroker implements Broker1 { - - public DMaaPKafkaMetaBroker() { - fZk = null; - fCambriaConfig = null; - fBaseTopicData = null; - final Properties props = new Properties (); - String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "kafka.metadata.broker.list"); - if (null == fkafkaBrokers) { - - fkafkaBrokers = "localhost:9092"; - } - - props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers ); - if(Utils.isCadiEnabled()){ - props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';"); - props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); - props.put("sasl.mechanism", "PLAIN"); - } - - fKafkaAdminClient=AdminClient.create ( props ); - - } - - //private static final Logger log = Logger.getLogger(DMaaPKafkaMetaBroker.class); - private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class); - private final AdminClient fKafkaAdminClient; - - - - /** - * DMaaPKafkaMetaBroker constructor initializing - * - * @param settings - * @param zk - * @param configDb - */ - public DMaaPKafkaMetaBroker(@Qualifier("propertyReader") rrNvReadable settings, - @Qualifier("dMaaPZkClient") ZkClient zk, @Qualifier("dMaaPZkConfigDb") ConfigDb configDb) { - //fSettings = settings; - fZk = zk; - fCambriaConfig = configDb; - fBaseTopicData = configDb.parse("/topics"); - final Properties props = new Properties (); - String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "kafka.metadata.broker.list"); - if (null == fkafkaBrokers) { - - fkafkaBrokers = "localhost:9092"; - } - - if(Utils.isCadiEnabled()){ - props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';"); - props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); - props.put("sasl.mechanism", "PLAIN"); - } - props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers ); - - fKafkaAdminClient=AdminClient.create ( props ); - - - - } - - public DMaaPKafkaMetaBroker( rrNvReadable settings, - ZkClient zk, ConfigDb configDb,AdminClient client) { - - fZk = zk; - fCambriaConfig = configDb; - fBaseTopicData = configDb.parse("/topics"); - fKafkaAdminClient= client; - - - - } - - @Override - public List getAllTopics() throws ConfigDbException { - log.info("Retrieving list of all the topics."); - final LinkedList result = new LinkedList(); - try { - log.info("Retrieving all topics from root: " + zkTopicsRoot); - final List topics = fZk.getChildren(zkTopicsRoot); - for (String topic : topics) { - result.add(new KafkaTopic(topic, fCambriaConfig, fBaseTopicData)); - } - JSONObject dataObj = new JSONObject(); - dataObj.put("topics", new JSONObject()); - - for (String topic : topics) { - dataObj.getJSONObject("topics").put(topic, new JSONObject()); - } - } catch (ZkNoNodeException excp) { - // very fresh kafka doesn't have any topics or a topics node - log.error("ZK doesn't have a Kakfa topics node at " + zkTopicsRoot, excp); - } - return result; - } - - @Override - public Topic getTopic(String topic) throws ConfigDbException { - if (fZk.exists(zkTopicsRoot + "/" + topic)) { - return getKafkaTopicConfig(fCambriaConfig, fBaseTopicData, topic); - } - // else: no such topic in kafka - return null; - } - - /** - * static method get KafkaTopic object - * - * @param db - * @param base - * @param topic - * @return - * @throws ConfigDbException - */ - public static KafkaTopic getKafkaTopicConfig(ConfigDb db, ConfigPath base, String topic) throws ConfigDbException { - return new KafkaTopic(topic, db, base); - } - - /** - * creating topic - */ - @Override - public Topic createTopic(String topic, String desc, String ownerApiKey, int partitions, int replicas, - boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException { - log.info("Creating topic: " + topic); - try { - log.info("Check if topic [" + topic + "] exist."); - // first check for existence "our way" - final Topic t = getTopic(topic); - if (t != null) { - log.info("Could not create topic [" + topic + "]. Topic Already exists."); - throw new TopicExistsException("Could not create topic [" + topic + "]. Topic Alreay exists."); - } - } catch (ConfigDbException e1) { - log.error("Topic [" + topic + "] could not be created. Couldn't check topic data in config db.", e1); - throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable, - "Couldn't check topic data in config db."); - } - - // we only allow 3 replicas. (If we don't test this, we get weird - // results from the cluster, - // so explicit test and fail.) - if (replicas < 1 || replicas > 3) { - log.info("Topic [" + topic + "] could not be created. The replica count must be between 1 and 3."); - throw new CambriaApiException(HttpStatusCodes.k400_badRequest, - "The replica count must be between 1 and 3."); - } - if (partitions < 1) { - log.info("Topic [" + topic + "] could not be created. The partition count must be at least 1."); - throw new CambriaApiException(HttpStatusCodes.k400_badRequest, "The partition count must be at least 1."); - } - - // create via kafka - - try - { - final NewTopic topicRequest = new NewTopic ( topic, partitions, new Integer(replicas).shortValue () ); - final CreateTopicsResult ctr = fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) ); - final KafkaFuture ctrResult = ctr.all (); - ctrResult.get (); - // underlying Kafka topic created. now setup our API info - return createTopicEntry ( topic, desc, ownerApiKey, transactionEnabled ); - } - catch ( InterruptedException e ) - { - - log.warn ( "Execution of describeTopics timed out." ); - throw new ConfigDbException ( e ); - } - catch ( ExecutionException e ) - { - - log.warn ( "Execution of describeTopics failed: " + e.getCause ().getMessage (), e.getCause () ); - throw new ConfigDbException ( e.getCause () ); - } - - } - - @Override - public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException,ConfigDbException { - log.info("Deleting topic: " + topic); - ZkClient zkClient = null; - try { - log.info("Loading zookeeper client for topic deletion."); - // topic creation. (Otherwise, the topic is only partially created - // in ZK.) - - - fKafkaAdminClient.deleteTopics(Arrays.asList(topic)); - log.info("Zookeeper client loaded successfully. Deleting topic."); - - } catch (Exception e) { - log.error("Failed to delete topic [" + topic + "]. " + e.getMessage(), e); - throw new ConfigDbException(e); - } finally { - log.info("Closing zookeeper connection."); - if (zkClient != null) - zkClient.close(); - } - - // throw new UnsupportedOperationException ( "We can't programmatically - // delete Kafka topics yet." ); - } - - //private final rrNvReadable fSettings; - private final ZkClient fZk; - private final ConfigDb fCambriaConfig; - private final ConfigPath fBaseTopicData; - - private static final String zkTopicsRoot = "/brokers/topics"; - private static final JSONObject kEmptyAcl = new JSONObject(); - - /** - * method Providing KafkaTopic Object associated with owner and - * transactionenabled or not - * - * @param name - * @param desc - * @param owner - * @param transactionEnabled - * @return - * @throws ConfigDbException - */ - public KafkaTopic createTopicEntry(String name, String desc, String owner, boolean transactionEnabled) - throws ConfigDbException { - return createTopicEntry(fCambriaConfig, fBaseTopicData, name, desc, owner, transactionEnabled); - } - - /** - * static method giving kafka topic object - * - * @param db - * @param basePath - * @param name - * @param desc - * @param owner - * @param transactionEnabled - * @return - * @throws ConfigDbException - */ - public static KafkaTopic createTopicEntry(ConfigDb db, ConfigPath basePath, String name, String desc, String owner, - boolean transactionEnabled) throws ConfigDbException { - final JSONObject o = new JSONObject(); - o.put("owner", owner); - o.put("description", desc); - o.put("txenabled", transactionEnabled); - db.store(basePath.getChild(name), o.toString()); - return new KafkaTopic(name, db, basePath); - } - - /** - * class performing all user opearation like user is eligible to read, - * write. permitting a user to write and read, - * - * @author anowarul.islam - * - */ - public static class KafkaTopic implements Topic { - /** - * constructor initializes - * - * @param name - * @param configdb - * @param baseTopic - * @throws ConfigDbException - */ - public KafkaTopic(String name, ConfigDb configdb, ConfigPath baseTopic) throws ConfigDbException { - fName = name; - fConfigDb = configdb; - fBaseTopicData = baseTopic; - - String data = fConfigDb.load(fBaseTopicData.getChild(fName)); - if (data == null) { - data = "{}"; - } - - final JSONObject o = new JSONObject(data); - fOwner = o.optString("owner", ""); - fDesc = o.optString("description", ""); - fTransactionEnabled = o.optBoolean("txenabled", false);// default - // value is - // false - // if this topic has an owner, it needs both read/write ACLs. If there's no - // owner (or it's empty), null is okay -- this is for existing or implicitly - // created topics. - JSONObject readers = o.optJSONObject ( "readers" ); - if ( readers == null && fOwner.length () > 0 ) readers = kEmptyAcl; - fReaders = fromJson ( readers ); - - JSONObject writers = o.optJSONObject ( "writers" ); - if ( writers == null && fOwner.length () > 0 ) writers = kEmptyAcl; - fWriters = fromJson ( writers ); - } - - private NsaAcl fromJson(JSONObject o) { - NsaAcl acl = new NsaAcl(); - if (o != null) { - JSONArray a = o.optJSONArray("allowed"); - if (a != null) { - for (int i = 0; i < a.length(); ++i) { - String user = a.getString(i); - acl.add(user); - } - } - } - return acl; - } - - @Override - public String getName() { - return fName; - } - - @Override - public String getOwner() { - return fOwner; - } - - @Override - public String getDescription() { - return fDesc; - } - - @Override - public NsaAcl getReaderAcl() { - return fReaders; - } - - @Override - public NsaAcl getWriterAcl() { - return fWriters; - } - - @Override - public void checkUserRead(NsaApiKey user) throws AccessDeniedException { - NsaAclUtils.checkUserAccess ( fOwner, getReaderAcl(), user ); - } - - @Override - public void checkUserWrite(NsaApiKey user) throws AccessDeniedException { - NsaAclUtils.checkUserAccess ( fOwner, getWriterAcl(), user ); - } - - @Override - public void permitWritesFromUser(String pubId, NsaApiKey asUser) - throws ConfigDbException, AccessDeniedException { - updateAcl(asUser, false, true, pubId); - } - - @Override - public void denyWritesFromUser(String pubId, NsaApiKey asUser) throws ConfigDbException, AccessDeniedException { - updateAcl(asUser, false, false, pubId); - } - - @Override - public void permitReadsByUser(String consumerId, NsaApiKey asUser) - throws ConfigDbException, AccessDeniedException { - updateAcl(asUser, true, true, consumerId); - } - - @Override - public void denyReadsByUser(String consumerId, NsaApiKey asUser) - throws ConfigDbException, AccessDeniedException { - updateAcl(asUser, true, false, consumerId); - } - - private void updateAcl(NsaApiKey asUser, boolean reader, boolean add, String key) - throws ConfigDbException, AccessDeniedException{ - try - { - final NsaAcl acl = NsaAclUtils.updateAcl ( this, asUser, key, reader, add ); - - // we have to assume we have current data, or load it again. for the expected use - // case, assuming we can overwrite the data is fine. - final JSONObject o = new JSONObject (); - o.put ( "owner", fOwner ); - o.put ( "readers", safeSerialize ( reader ? acl : fReaders ) ); - o.put ( "writers", safeSerialize ( reader ? fWriters : acl ) ); - fConfigDb.store ( fBaseTopicData.getChild ( fName ), o.toString () ); - - log.info ( "ACL_UPDATE: " + asUser.getKey () + " " + ( add ? "added" : "removed" ) + ( reader?"subscriber":"publisher" ) + " " + key + " on " + fName ); - - } - catch ( ConfigDbException x ) - { - throw x; - } - catch ( AccessDeniedException x ) - { - throw x; - } - - } - - private JSONObject safeSerialize(NsaAcl acl) { - return acl == null ? null : acl.serialize(); - } - - private final String fName; - private final ConfigDb fConfigDb; - private final ConfigPath fBaseTopicData; - private final String fOwner; - private final String fDesc; - private final NsaAcl fReaders; - private final NsaAcl fWriters; - private boolean fTransactionEnabled; - - public boolean isTransactionEnabled() { - return fTransactionEnabled; - } - - @Override - public Set getOwners() { - final TreeSet owners = new TreeSet (); - owners.add ( fOwner ); - return owners; - } - } - -} diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java b/src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java deleted file mode 100644 index 4c9532b..0000000 --- a/src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java +++ /dev/null @@ -1,231 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.HashMap; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import com.att.dmf.mr.CambriaApiVersionInfo; -import com.att.dmf.mr.backends.MetricsSet; -import com.att.mr.apiServer.metrics.cambria.DMaaPMetricsSender; -import com.att.nsa.drumlin.till.nv.rrNvReadable; -import com.att.nsa.metrics.impl.CdmConstant; -import com.att.nsa.metrics.impl.CdmCounter; -import com.att.nsa.metrics.impl.CdmMetricsRegistryImpl; -import com.att.nsa.metrics.impl.CdmMovingAverage; -import com.att.nsa.metrics.impl.CdmRateTicker; -import com.att.nsa.metrics.impl.CdmSimpleMetric; -import com.att.nsa.metrics.impl.CdmStringConstant; -import com.att.nsa.metrics.impl.CdmTimeSince; - -/*@Component("dMaaPMetricsSet")*/ -/** - * Metrics related information - * - * @author anowarul.islam - * - */ -public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSet { - - private final CdmStringConstant fVersion; - private final CdmConstant fStartTime; - private final CdmTimeSince fUpTime; - - private final CdmCounter fRecvTotal; - private final CdmRateTicker fRecvEpsInstant; - private final CdmRateTicker fRecvEpsShort; - private final CdmRateTicker fRecvEpsLong; - - private final CdmCounter fSendTotal; - private final CdmRateTicker fSendEpsInstant; - private final CdmRateTicker fSendEpsShort; - private final CdmRateTicker fSendEpsLong; - - private final CdmCounter fKafkaConsumerCacheMiss; - private final CdmCounter fKafkaConsumerCacheHit; - - private final CdmCounter fKafkaConsumerClaimed; - private final CdmCounter fKafkaConsumerTimeout; - - private final CdmSimpleMetric fFanOutRatio; - - private final HashMap fPathUseRates; - private final HashMap fPathAvgs; - - private rrNvReadable fSettings; - - private final ScheduledExecutorService fScheduler; - - /** - * Constructor initialization - * - * @param cs - */ - - public DMaaPMetricsSet(rrNvReadable cs) { - - fVersion = new CdmStringConstant("Version " + CambriaApiVersionInfo.getVersion()); - super.putItem("version", fVersion); - - final long startTime = System.currentTimeMillis(); - final Date d = new Date(startTime); - final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d); - fStartTime = new CdmConstant(startTime / 1000, "Start Time (epoch); " + text); - super.putItem("startTime", fStartTime); - - fUpTime = new CdmTimeSince("seconds since start"); - super.putItem("upTime", fUpTime); - - fRecvTotal = new CdmCounter("Total events received since start"); - super.putItem("recvTotalEvents", fRecvTotal); - - fRecvEpsInstant = new CdmRateTicker("recv eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES); - super.putItem("recvEpsInstant", fRecvEpsInstant); - - fRecvEpsShort = new CdmRateTicker("recv eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES); - super.putItem("recvEpsShort", fRecvEpsShort); - - fRecvEpsLong = new CdmRateTicker("recv eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS); - super.putItem("recvEpsLong", fRecvEpsLong); - - fSendTotal = new CdmCounter("Total events sent since start"); - super.putItem("sendTotalEvents", fSendTotal); - - fSendEpsInstant = new CdmRateTicker("send eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES); - super.putItem("sendEpsInstant", fSendEpsInstant); - - fSendEpsShort = new CdmRateTicker("send eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES); - super.putItem("sendEpsShort", fSendEpsShort); - - fSendEpsLong = new CdmRateTicker("send eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS); - super.putItem("sendEpsLong", fSendEpsLong); - - fKafkaConsumerCacheMiss = new CdmCounter("Kafka Consumer Cache Misses"); - super.putItem("kafkaConsumerCacheMiss", fKafkaConsumerCacheMiss); - - fKafkaConsumerCacheHit = new CdmCounter("Kafka Consumer Cache Hits"); - super.putItem("kafkaConsumerCacheHit", fKafkaConsumerCacheHit); - - fKafkaConsumerClaimed = new CdmCounter("Kafka Consumers Claimed"); - super.putItem("kafkaConsumerClaims", fKafkaConsumerClaimed); - - fKafkaConsumerTimeout = new CdmCounter("Kafka Consumers Timedout"); - super.putItem("kafkaConsumerTimeouts", fKafkaConsumerTimeout); - - // FIXME: CdmLevel is not exactly a great choice - fFanOutRatio = new CdmSimpleMetric() { - @Override - public String getRawValueString() { - return getRawValue().toString(); - } - - @Override - public Number getRawValue() { - final double s = fSendTotal.getValue(); - final double r = fRecvTotal.getValue(); - return r == 0.0 ? 0.0 : s / r; - } - - @Override - public String summarize() { - return getRawValueString() + " sends per recv"; - } - - }; - super.putItem("fanOut", fFanOutRatio); - - // these are added to the metrics catalog as they're discovered - fPathUseRates = new HashMap(); - fPathAvgs = new HashMap(); - - fScheduler = Executors.newScheduledThreadPool(1); - } - - @Override - public void setupCambriaSender() { - DMaaPMetricsSender.sendPeriodically(fScheduler, this, "cambria.apinode.metrics.dmaap"); - } - - @Override - public void onRouteComplete(String name, long durationMs) { - CdmRateTicker ticker = fPathUseRates.get(name); - if (ticker == null) { - ticker = new CdmRateTicker("calls/min on path " + name + "", 1, TimeUnit.MINUTES, 1, TimeUnit.HOURS); - fPathUseRates.put(name, ticker); - super.putItem("pathUse_" + name, ticker); - } - ticker.tick(); - - CdmMovingAverage durs = fPathAvgs.get(name); - if (durs == null) { - durs = new CdmMovingAverage("ms avg duration on path " + name + ", last 10 minutes", 10, TimeUnit.MINUTES); - fPathAvgs.put(name, durs); - super.putItem("pathDurationMs_" + name, durs); - } - durs.tick(durationMs); - } - - @Override - public void publishTick(int amount) { - if (amount > 0) { - fRecvTotal.bumpBy(amount); - fRecvEpsInstant.tick(amount); - fRecvEpsShort.tick(amount); - fRecvEpsLong.tick(amount); - } - } - - @Override - public void consumeTick(int amount) { - if (amount > 0) { - fSendTotal.bumpBy(amount); - fSendEpsInstant.tick(amount); - fSendEpsShort.tick(amount); - fSendEpsLong.tick(amount); - } - } - - @Override - public void onKafkaConsumerCacheMiss() { - fKafkaConsumerCacheMiss.bump(); - } - - @Override - public void onKafkaConsumerCacheHit() { - fKafkaConsumerCacheHit.bump(); - } - - @Override - public void onKafkaConsumerClaimed() { - fKafkaConsumerClaimed.bump(); - } - - @Override - public void onKafkaConsumerTimeout() { - fKafkaConsumerTimeout.bump(); - } - -} diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java b/src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java deleted file mode 100644 index 963ff2d..0000000 --- a/src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import java.security.Key; - - -import org.springframework.beans.factory.annotation.Autowired; - -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.configs.ConfigDb; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.configs.confimpl.EncryptingLayer; -import com.att.nsa.drumlin.till.nv.rrNvReadable; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; -import com.att.nsa.security.db.BaseNsaApiDbImpl; -import com.att.nsa.security.db.EncryptingApiDbImpl; -import com.att.nsa.security.db.NsaApiDb; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; -import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory; -import com.att.nsa.util.rrConvertor; - -/** - * - * @author anowarul.islam - * - */ -public class DMaaPNsaApiDb { - - - private DMaaPZkConfigDb cdb; - - //private static final Logger log = Logger - - private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPNsaApiDb.class); - -/** - * - * Constructor initialized - * @param settings - * @param cdb - */ - @Autowired - public DMaaPNsaApiDb(rrNvReadable settings, DMaaPZkConfigDb cdb) { - - this.setCdb(cdb); - } - /** - * - * @param settings - * @param cdb - * @return - * @throws ConfigDbException - * @throws missingReqdSetting - */ - public static NsaApiDb buildApiKeyDb( - rrNvReadable settings, ConfigDb cdb) throws ConfigDbException, - missingReqdSetting { - // Cambria uses an encrypted api key db - - - final String keyBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.key"); - - - - final String initVectorBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.iv"); - // if neither value was provided, don't encrypt api key db - if (keyBase64 == null && initVectorBase64 == null) { - log.info("This server is configured to use an unencrypted API key database. See the settings documentation."); - return new BaseNsaApiDbImpl<>(cdb, - new NsaSimpleApiKeyFactory()); - } else if (keyBase64 == null) { - // neither or both, otherwise something's goofed - throw new missingReqdSetting("cambria.secureConfig.key"); - } else if (initVectorBase64 == null) { - // neither or both, otherwise something's goofed - throw new missingReqdSetting("cambria.secureConfig.iv"); - } else { - log.info("This server is configured to use an encrypted API key database."); - final Key key = EncryptingLayer.readSecretKey(keyBase64); - final byte[] iv = rrConvertor.base64Decode(initVectorBase64); - return new EncryptingApiDbImpl<>(cdb, - new NsaSimpleApiKeyFactory(), key, iv); - } - } - - /** - * @return - * returns settings - */ - - - - - /** - * @param settings - * set settings - */ - - - - - /** - * @return - * returns cbd - */ - public DMaaPZkConfigDb getCdb() { - return cdb; - } - /** - * @param cdb - * set cdb - */ - public void setCdb(DMaaPZkConfigDb cdb) { - this.cdb = cdb; - } - - -} diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java b/src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java deleted file mode 100644 index 78a7426..0000000 --- a/src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java +++ /dev/null @@ -1,45 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import org.I0Itec.zkclient.ZkClient; -import org.springframework.beans.factory.annotation.Qualifier; - -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.nsa.drumlin.till.nv.rrNvReadable; - -/** - * Created for Zookeeper client which will read configuration and settings parameter - * @author nilanjana.maity - * - */ -public class DMaaPZkClient extends ZkClient { - - /** - * This constructor will get the settings value from rrNvReadable - * and ConfigurationReader's zookeeper connection - * @param settings - */ - public DMaaPZkClient(@Qualifier("propertyReader") rrNvReadable settings) { - super(ConfigurationReader.getMainZookeeperConnectionString()); - } -} diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java b/src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java deleted file mode 100644 index 5aa25fa..0000000 --- a/src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java +++ /dev/null @@ -1,51 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import org.springframework.beans.factory.annotation.Qualifier; - -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.nsa.configs.confimpl.ZkConfigDb; -import com.att.nsa.drumlin.till.nv.rrNvReadable; - -/** - * Provide the zookeeper config db connection - * @author nilanjana.maity - * - */ -public class DMaaPZkConfigDb extends ZkConfigDb { - /** - * This Constructor will provide the configuration details from the property reader - * and DMaaPZkClient - * @param zk - * @param settings - */ - public DMaaPZkConfigDb(@Qualifier("dMaaPZkClient") DMaaPZkClient zk, - @Qualifier("propertyReader") rrNvReadable settings) { - - - super(ConfigurationReader.getMainZookeeperConnectionString(),ConfigurationReader.getMainZookeeperConnectionSRoot()); - - } - - -} diff --git a/src/main/java/com/att/dmf/mr/beans/LogDetails.java b/src/main/java/com/att/dmf/mr/beans/LogDetails.java deleted file mode 100644 index b7fb325..0000000 --- a/src/main/java/com/att/dmf/mr/beans/LogDetails.java +++ /dev/null @@ -1,214 +0,0 @@ -/** - * - */ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import java.util.Date; - -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.utils.Utils; - -/** - * @author muzainulhaque.qazi - * - */ - -public class LogDetails { - - private String publisherId; - private String topicId; - private String subscriberGroupId; - private String subscriberId; - private String publisherIp; - private String messageBatchId; - private String messageSequence; - private String messageTimestamp; - private String consumeTimestamp; - private String transactionIdTs; - private String serverIp; - - private long messageLengthInBytes; - private long totalMessageCount; - - private boolean transactionEnabled; - /** - * This is for transaction enabled logging details - * - */ - public LogDetails() { - super(); - } - - public String getTransactionId() { - StringBuilder transactionId = new StringBuilder(); - transactionId.append(transactionIdTs); - transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR); - transactionId.append(publisherIp); - transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR); - transactionId.append(messageBatchId); - transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR); - transactionId.append(messageSequence); - - return transactionId.toString(); - } - - public String getPublisherId() { - return publisherId; - } - - public void setPublisherId(String publisherId) { - this.publisherId = publisherId; - } - - public String getTopicId() { - return topicId; - } - - public void setTopicId(String topicId) { - this.topicId = topicId; - } - - public String getSubscriberGroupId() { - return subscriberGroupId; - } - - public void setSubscriberGroupId(String subscriberGroupId) { - this.subscriberGroupId = subscriberGroupId; - } - - public String getSubscriberId() { - return subscriberId; - } - - public void setSubscriberId(String subscriberId) { - this.subscriberId = subscriberId; - } - - public String getPublisherIp() { - return publisherIp; - } - - public void setPublisherIp(String publisherIp) { - this.publisherIp = publisherIp; - } - - public String getMessageBatchId() { - return messageBatchId; - } - - public void setMessageBatchId(Long messageBatchId) { - this.messageBatchId = Utils.getFromattedBatchSequenceId(messageBatchId); - } - - public String getMessageSequence() { - return messageSequence; - } - - public void setMessageSequence(String messageSequence) { - this.messageSequence = messageSequence; - } - - public String getMessageTimestamp() { - return messageTimestamp; - } - - public void setMessageTimestamp(String messageTimestamp) { - this.messageTimestamp = messageTimestamp; - } - - public String getPublishTimestamp() { - return Utils.getFormattedDate(new Date()); - } - - public String getConsumeTimestamp() { - return consumeTimestamp; - } - - public void setConsumeTimestamp(String consumeTimestamp) { - this.consumeTimestamp = consumeTimestamp; - } - - public long getMessageLengthInBytes() { - return messageLengthInBytes; - } - - public void setMessageLengthInBytes(long messageLengthInBytes) { - this.messageLengthInBytes = messageLengthInBytes; - } - - public long getTotalMessageCount() { - return totalMessageCount; - } - - public void setTotalMessageCount(long totalMessageCount) { - this.totalMessageCount = totalMessageCount; - } - - public boolean isTransactionEnabled() { - return transactionEnabled; - } - - public void setTransactionEnabled(boolean transactionEnabled) { - this.transactionEnabled = transactionEnabled; - } - - public String getTransactionIdTs() { - return transactionIdTs; - } - - public void setTransactionIdTs(String transactionIdTs) { - this.transactionIdTs = transactionIdTs; - } - - public String getPublisherLogDetails() { - - StringBuilder buffer = new StringBuilder(); - buffer.append("[publisherId=" + publisherId); - buffer.append(", topicId=" + topicId); - buffer.append(", messageTimestamp=" + messageTimestamp); - buffer.append(", publisherIp=" + publisherIp); - buffer.append(", messageBatchId=" + messageBatchId); - buffer.append(", messageSequence=" + messageSequence ); - buffer.append(", messageLengthInBytes=" + messageLengthInBytes); - buffer.append(", transactionEnabled=" + transactionEnabled); - buffer.append(", transactionId=" + getTransactionId()); - buffer.append(", publishTimestamp=" + getPublishTimestamp()); - buffer.append(", serverIp=" + getServerIp()+"]"); - return buffer.toString(); - - } - - public String getServerIp() { - return serverIp; - } - - public void setServerIp(String serverIp) { - this.serverIp = serverIp; - } - - public void setMessageBatchId(String messageBatchId) { - this.messageBatchId = messageBatchId; - } - -} diff --git a/src/main/java/com/att/dmf/mr/beans/TopicBean.java b/src/main/java/com/att/dmf/mr/beans/TopicBean.java deleted file mode 100644 index a397921..0000000 --- a/src/main/java/com/att/dmf/mr/beans/TopicBean.java +++ /dev/null @@ -1,155 +0,0 @@ -/** - * - */ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.beans; - -import java.io.Serializable; - -import javax.xml.bind.annotation.XmlRootElement; - -/** - * @author muzainulhaque.qazi - * - */ -@XmlRootElement -public class TopicBean implements Serializable { - - private static final long serialVersionUID = -8620390377775457949L; - private String topicName; - private String topicDescription; - - private int partitionCount; - private int replicationCount; - - private boolean transactionEnabled; - - /** - * constructor - */ - public TopicBean() { - super(); - } - - /** - * constructor initialization with topic details name, description, - * partition, replication, transaction - * - * @param topicName - * @param description - * @param partitionCount - * @param replicationCount - * @param transactionEnabled - */ - public TopicBean(String topicName, String topicDescription, int partitionCount, int replicationCount, - boolean transactionEnabled) { - super(); - this.topicName = topicName; - this.topicDescription = topicDescription; - this.partitionCount = partitionCount; - this.replicationCount = replicationCount; - this.transactionEnabled = transactionEnabled; - } - - /** - * @return - * returns topic name which is of String type - */ - public String getTopicName() { - return topicName; - } - - /** - * @param topicName - * set topic name - */ - public void setTopicName(String topicName) { - this.topicName = topicName; - } - - - /** - * @return - * returns partition count which is of int type - */ - public int getPartitionCount() { - return partitionCount; - } - - /** - * @param partitionCount - * set partition Count - */ - public void setPartitionCount(int partitionCount) { - this.partitionCount = partitionCount; - } - - /** - * @return - * returns replication count which is of int type - */ - public int getReplicationCount() { - return replicationCount; - } - - /** - * @param - * set replication count which is of int type - */ - public void setReplicationCount(int replicationCount) { - this.replicationCount = replicationCount; - } - - /** - * @return - * returns boolean value which indicates whether transaction is Enabled - */ - public boolean isTransactionEnabled() { - return transactionEnabled; - } - - /** - * @param - * sets boolean value which indicates whether transaction is Enabled - */ - public void setTransactionEnabled(boolean transactionEnabled) { - this.transactionEnabled = transactionEnabled; - } - - /** - * - * @return returns description which is of String type - */ - public String getTopicDescription() { - return topicDescription; - } - /** - * - * @param topicDescription - * set description which is of String type - */ - public void setTopicDescription(String topicDescription) { - this.topicDescription = topicDescription; - } - -} diff --git a/src/main/java/com/att/dmf/mr/constants/CambriaConstants.java b/src/main/java/com/att/dmf/mr/constants/CambriaConstants.java deleted file mode 100644 index cb6653c..0000000 --- a/src/main/java/com/att/dmf/mr/constants/CambriaConstants.java +++ /dev/null @@ -1,126 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.constants; - -import com.att.dmf.mr.utils.Utils; - -/** - * This is the constant files for all the property or parameters. - * @author nilanjana.maity - * - */ -public interface CambriaConstants { - - String CAMBRIA = "Cambria"; - String DMAAP = "DMaaP"; - - String kDefault_ZkRoot = "/fe3c/cambria"; - - String kSetting_ZkConfigDbRoot = "config.zk.root"; - String kDefault_ZkConfigDbRoot = kDefault_ZkRoot + "/config"; -String msgRtr_prop="MsgRtrApi.properties"; - String kBrokerType = "broker.type"; - - /** - * value to use to signal kafka broker type. - */ - String kBrokerType_Kafka = "kafka"; - String kBrokerType_Memory = "memory"; - String kSetting_AdminSecret = "authentication.adminSecret"; - - String kSetting_ApiNodeIdentifier = "cambria.api.node.identifier"; - - /** - * value to use to signal max empty poll per minute - */ - String kSetting_MaxEmptyPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute"; - String kSetting_MaxPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute"; - double kDefault_MaxEmptyPollsPerMinute = 10.0; - - String kSetting_SleepMsOnRateLimit = "cambria.rateLimit.delay.ms"; - String kSetting_SleepMsRealOnRateLimit = "cambria.rateLimitActual.delay.ms"; - long kDefault_SleepMsOnRateLimit = Utils.getSleepMsForRate ( kDefault_MaxEmptyPollsPerMinute ); - - String kSetting_RateLimitWindowLength = "cambria.rateLimit.window.minutes"; - int kDefault_RateLimitWindowLength = 5; - - String kConfig = "c"; - - String kSetting_Port = "cambria.service.port"; - /** - * value to use to signal default port - */ - int kDefault_Port = 3904; - - String kSetting_MaxThreads = "tomcat.maxthreads"; - int kDefault_MaxThreads = -1; - - - - //String kDefault_TomcatProtocolClass = Http11NioProtocol.class.getName (); - - String kSetting_ZkConfigDbServers = "config.zk.servers"; - - /** - * value to indicate localhost port number - */ - String kDefault_ZkConfigDbServers = "localhost:2181"; - - /** - * value to use to signal Session time out - */ - String kSetting_ZkSessionTimeoutMs = "cambria.consumer.cache.zkSessionTimeout"; - int kDefault_ZkSessionTimeoutMs = 20 * 1000; - - /** - * value to use to signal connection time out - */ - String kSetting_ZkConnectionTimeoutMs = "cambria.consumer.cache.zkConnectionTimeout"; - int kDefault_ZkConnectionTimeoutMs = 5 * 1000; - - String TRANSACTION_ID_SEPARATOR = "::"; - - /** - * value to use to signal there's no timeout on the consumer request. - */ - public static final int kNoTimeout = 10000; - - /** - * value to use to signal no limit in the number of messages returned. - */ - public static final int kNoLimit = 0; - - /** - * value to use to signal that the caller wants the next set of events - */ - public static final int kNextOffset = -1; - - /** - * value to use to signal there's no filter on the response stream. - */ - public static final String kNoFilter = ""; - - //Added for Metric publish - public static final int kStdCambriaServicePort = 3904; - public static final String kBasePath = "/events/"; - -} diff --git a/src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java b/src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java deleted file mode 100644 index de66617..0000000 --- a/src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java +++ /dev/null @@ -1,42 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.exception; - -import com.att.dmf.mr.CambriaApiException; - -public class DMaaPAccessDeniedException extends CambriaApiException{ - - - - public DMaaPAccessDeniedException(ErrorResponse errRes) { - super(errRes); - - } - - /** - * - */ - private static final long serialVersionUID = 1L; - - - -} diff --git a/src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java b/src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java deleted file mode 100644 index 304c15b..0000000 --- a/src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java +++ /dev/null @@ -1,94 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.exception; - -import javax.inject.Singleton; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import org.apache.http.HttpStatus; -import org.springframework.beans.factory.annotation.Autowired; - -import com.att.dmf.mr.CambriaApiException; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * Exception Mapper class to handle - * CambriaApiException - * @author rajashree.khare - * - */ -@Provider -@Singleton -public class DMaaPCambriaExceptionMapper implements ExceptionMapper{ - -private ErrorResponse errRes; - - -private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPCambriaExceptionMapper.class); - - @Autowired - private DMaaPErrorMessages msgs; - - public DMaaPCambriaExceptionMapper() { - super(); - LOGGER.info("Cambria Exception Mapper Created.."); - } - - @Override - public Response toResponse(CambriaApiException ex) { - - LOGGER.info("Reached Cambria Exception Mapper.."); - - /** - * Cambria Generic Exception - */ - if(ex instanceof CambriaApiException) - { - - errRes = ex.getErrRes(); - if(errRes!=null) { - - return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) - .build(); - } - else - { - return Response.status(ex.getStatus()).entity(ex.getMessage()).type(MediaType.APPLICATION_JSON) - .build(); - } - - - } - else - { - errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED, DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), msgs.getServerUnav()); - return Response.status(HttpStatus.SC_EXPECTATION_FAILED).entity(errRes).type(MediaType.APPLICATION_JSON).build(); - } - - } - - -} diff --git a/src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java b/src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java deleted file mode 100644 index 409aa60..0000000 --- a/src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java +++ /dev/null @@ -1,248 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.exception; - -import javax.annotation.PostConstruct; - -import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Component; -import org.springframework.web.context.support.SpringBeanAutowiringSupport; - -/** - * This Class reads the error message properties - * from the properties file - * @author rajashree.khare - * - */ -@Component -public class DMaaPErrorMessages { - - - - - //@Value("${resource.not.found}") - private String notFound="The requested resource was not found.Please verify the URL and try again"; - -// @Value("${server.unavailable}") - private String serverUnav="Server is temporarily unavailable or busy.Try again later, or try another server in the cluster."; - -// @Value("${http.method.not.allowed}") - private String methodNotAllowed="The specified HTTP method is not allowed for the requested resource.Enter a valid HTTP method and try again."; - - //@Value("${incorrect.request.json}") - private String badRequest="Incorrect JSON object. Please correct the JSON format and try again."; - -// @Value("${network.time.out}") - private String nwTimeout="Connection to the DMaaP MR was timed out.Please try again."; - - //@Value("${get.topic.failure}") - private String topicsfailure="Failed to retrieve list of all topics."; - - //@Value("${not.permitted.access.1}") - private String notPermitted1="Access Denied.User does not have permission to perform"; - - //@Value("${not.permitted.access.2}") - private String notPermitted2="operation on Topic:"; - - //@Value("${get.topic.details.failure}") - private String topicDetailsFail="Failed to retrieve details of topic:"; - - //@Value("${create.topic.failure}") - private String createTopicFail="Failed to create topic:"; - - //@Value("${delete.topic.failure}") - private String deleteTopicFail="Failed to delete topic:"; - - //@Value("${incorrect.json}") - private String incorrectJson="Incorrect JSON object.Could not parse JSON. Please correct the JSON format and try again."; - - //@Value("${consume.msg.error}") - private String consumeMsgError="Error while reading data from topic."; - - //@Value("${publish.msg.error}") - private String publishMsgError="Error while publishing data to topic."; - - - //@Value("${publish.msg.count}") - private String publishMsgCount="Successfully published number of messages :"; - - - //@Value("${authentication.failure}") - private String authFailure="Access Denied: Invalid Credentials. Enter a valid MechId and Password and try again."; - //@Value("${msg_size_exceeds}") - private String msgSizeExceeds="Message size exceeds the default size."; - - - //@Value("${topic.not.exist}") - private String topicNotExist="No such topic exists."; - - public String getMsgSizeExceeds() { - return msgSizeExceeds; - } - - public void setMsgSizeExceeds(String msgSizeExceeds) { - this.msgSizeExceeds = msgSizeExceeds; - } - - public String getNotFound() { - return notFound; - } - - public void setNotFound(String notFound) { - this.notFound = notFound; - } - - public String getServerUnav() { - return serverUnav; - } - - public void setServerUnav(String serverUnav) { - this.serverUnav = serverUnav; - } - - public String getMethodNotAllowed() { - return methodNotAllowed; - } - - public void setMethodNotAllowed(String methodNotAllowed) { - this.methodNotAllowed = methodNotAllowed; - } - - public String getBadRequest() { - return badRequest; - } - - public void setBadRequest(String badRequest) { - this.badRequest = badRequest; - } - - public String getNwTimeout() { - return nwTimeout; - } - - public void setNwTimeout(String nwTimeout) { - this.nwTimeout = nwTimeout; - } - - public String getNotPermitted1() { - return notPermitted1; - } - - public void setNotPermitted1(String notPermitted1) { - this.notPermitted1 = notPermitted1; - } - - public String getNotPermitted2() { - return notPermitted2; - } - - public void setNotPermitted2(String notPermitted2) { - this.notPermitted2 = notPermitted2; - } - - public String getTopicsfailure() { - return topicsfailure; - } - - public void setTopicsfailure(String topicsfailure) { - this.topicsfailure = topicsfailure; - } - - public String getTopicDetailsFail() { - return topicDetailsFail; - } - - public void setTopicDetailsFail(String topicDetailsFail) { - this.topicDetailsFail = topicDetailsFail; - } - - public String getCreateTopicFail() { - return createTopicFail; - } - - public void setCreateTopicFail(String createTopicFail) { - this.createTopicFail = createTopicFail; - } - - public String getIncorrectJson() { - return incorrectJson; - } - - public void setIncorrectJson(String incorrectJson) { - this.incorrectJson = incorrectJson; - } - - public String getDeleteTopicFail() { - return deleteTopicFail; - } - - public void setDeleteTopicFail(String deleteTopicFail) { - this.deleteTopicFail = deleteTopicFail; - } - - public String getConsumeMsgError() { - return consumeMsgError; - } - - public void setConsumeMsgError(String consumeMsgError) { - this.consumeMsgError = consumeMsgError; - } - - public String getPublishMsgError() { - return publishMsgError; - } - - public void setPublishMsgError(String publishMsgError) { - this.publishMsgError = publishMsgError; - } - - public String getPublishMsgCount() { - return publishMsgCount; - } - - public String getAuthFailure() { - return authFailure; - } - - public void setAuthFailure(String authFailure) { - this.authFailure = authFailure; - } - - public void setPublishMsgCount(String publishMsgCount) { - this.publishMsgCount = publishMsgCount; - } - - public String getTopicNotExist() { - return topicNotExist; - } - - public void setTopicNotExist(String topicNotExist) { - this.topicNotExist = topicNotExist; - } - - - @PostConstruct - public void init() { - SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(this); - } - -} diff --git a/src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java b/src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java deleted file mode 100644 index 593863a..0000000 --- a/src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java +++ /dev/null @@ -1,93 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.exception; - -/** - * Define the Error Response Codes for MR - * using this enumeration - * @author rajashree.khare - * - */ -public enum DMaaPResponseCode { - - - /** - * GENERIC - */ - RESOURCE_NOT_FOUND(3001), - SERVER_UNAVAILABLE(3002), - METHOD_NOT_ALLOWED(3003), - GENERIC_INTERNAL_ERROR(1004), - /** - * AAF - */ - INVALID_CREDENTIALS(4001), - ACCESS_NOT_PERMITTED(4002), - UNABLE_TO_AUTHORIZE(4003), - /** - * PUBLISH AND SUBSCRIBE - */ - MSG_SIZE_EXCEEDS_BATCH_LIMIT(5001), - UNABLE_TO_PUBLISH(5002), - INCORRECT_BATCHING_FORMAT(5003), - MSG_SIZE_EXCEEDS_MSG_LIMIT(5004), - INCORRECT_JSON(5005), - CONN_TIMEOUT(5006), - PARTIAL_PUBLISH_MSGS(5007), - CONSUME_MSG_ERROR(5008), - PUBLISH_MSG_ERROR(5009), - RETRIEVE_TRANSACTIONS(5010), - RETRIEVE_TRANSACTIONS_DETAILS(5011), - TOO_MANY_REQUESTS(5012), - - RATE_LIMIT_EXCEED(301), - - /** - * TOPICS - */ - GET_TOPICS_FAIL(6001), - GET_TOPICS_DETAILS_FAIL(6002), - CREATE_TOPIC_FAIL(6003), - DELETE_TOPIC_FAIL(6004), - GET_PUBLISHERS_BY_TOPIC(6005), - GET_CONSUMERS_BY_TOPIC(6006), - PERMIT_PUBLISHER_FOR_TOPIC(6007), - REVOKE_PUBLISHER_FOR_TOPIC(6008), - PERMIT_CONSUMER_FOR_TOPIC(6009), - REVOKE_CONSUMER_FOR_TOPIC(6010), - GET_CONSUMER_CACHE(6011), - DROP_CONSUMER_CACHE(6012), - GET_METRICS_ERROR(6013), - GET_BLACKLIST(6014), - ADD_BLACKLIST(6015), - REMOVE_BLACKLIST(6016), - TOPIC_NOT_IN_AAF(6017); - private int responseCode; - - public int getResponseCode() { - return responseCode; - } - private DMaaPResponseCode (final int code) { - responseCode = code; - } - -} diff --git a/src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java b/src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java deleted file mode 100644 index db691bd..0000000 --- a/src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java +++ /dev/null @@ -1,137 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.exception; - -import javax.inject.Singleton; -import javax.ws.rs.BadRequestException; -import javax.ws.rs.InternalServerErrorException; -import javax.ws.rs.NotAllowedException; -import javax.ws.rs.NotAuthorizedException; -import javax.ws.rs.NotFoundException; -import javax.ws.rs.ServiceUnavailableException; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.ext.ExceptionMapper; -import javax.ws.rs.ext.Provider; - -import org.apache.http.HttpStatus; - -import org.springframework.beans.factory.annotation.Autowired; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * Exception Mapper class to handle - * Jersey Exceptions - * @author rajashree.khare - * - */ -@Provider -@Singleton -public class DMaaPWebExceptionMapper implements ExceptionMapper{ - - - private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPWebExceptionMapper.class); - private ErrorResponse errRes; - - @Autowired - private DMaaPErrorMessages msgs; - - public DMaaPWebExceptionMapper() { - super(); - LOGGER.info("WebException Mapper Created.."); - } - - @Override - public Response toResponse(WebApplicationException ex) { - - LOGGER.info("Reached WebException Mapper"); - - /** - * Resource Not Found - */ - if(ex instanceof NotFoundException) - { - errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),msgs.getNotFound()); - - LOGGER.info(errRes.toString()); - - return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) - .build(); - - } - - if(ex instanceof InternalServerErrorException) - { - errRes = new ErrorResponse(HttpStatus.SC_INTERNAL_SERVER_ERROR,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav()); - - LOGGER.info(errRes.toString()); - return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) - .build(); - - } - - if(ex instanceof NotAuthorizedException) - { - errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),msgs.getAuthFailure()); - - LOGGER.info(errRes.toString()); - return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) - .build(); - } - - if(ex instanceof BadRequestException) - { - errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,DMaaPResponseCode.INCORRECT_JSON.getResponseCode(),msgs.getBadRequest()); - - LOGGER.info(errRes.toString()); - return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) - .build(); - } - if(ex instanceof NotAllowedException) - { - errRes = new ErrorResponse(HttpStatus.SC_METHOD_NOT_ALLOWED,DMaaPResponseCode.METHOD_NOT_ALLOWED.getResponseCode(),msgs.getMethodNotAllowed()); - - LOGGER.info(errRes.toString()); - return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) - .build(); - } - - if(ex instanceof ServiceUnavailableException) - { - errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav()); - - LOGGER.info(errRes.toString()); - return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) - .build(); - } - - - return Response.serverError().build(); - } - - - - -} diff --git a/src/main/java/com/att/dmf/mr/exception/ErrorResponse.java b/src/main/java/com/att/dmf/mr/exception/ErrorResponse.java deleted file mode 100644 index c92cadd..0000000 --- a/src/main/java/com/att/dmf/mr/exception/ErrorResponse.java +++ /dev/null @@ -1,135 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.exception; -import org.json.JSONObject; -/** - * Represents the Error Response Object - * that is rendered as a JSON object when - * an exception or error occurs on MR Rest Service. - * @author rajashree.khare - * - */ -//@XmlRootElement -public class ErrorResponse { - - private int httpStatusCode; - private int mrErrorCode; - private String errorMessage; - private String helpURL; - private String statusTs; - private String topic; - private String publisherId; - private String publisherIp; - private String subscriberId; - private String subscriberIp; - - - public ErrorResponse(int httpStatusCode, int mrErrorCode, - String errorMessage, String helpURL, String statusTs, String topic, - String publisherId, String publisherIp, String subscriberId, - String subscriberIp) { - super(); - this.httpStatusCode = httpStatusCode; - this.mrErrorCode = mrErrorCode; - this.errorMessage = errorMessage; - this.helpURL = "http://onap.readthedocs.io"; - this.statusTs = statusTs; - this.topic = topic; - this.publisherId = publisherId; - this.publisherIp = publisherIp; - this.subscriberId = subscriberId; - this.subscriberIp = subscriberIp; - } - - public ErrorResponse(int httpStatusCode, int mrErrorCode, - String errorMessage) { - super(); - this.httpStatusCode = httpStatusCode; - this.mrErrorCode = mrErrorCode; - this.errorMessage = errorMessage; - this.helpURL = "http://onap.readthedocs.io"; - - } - - public int getHttpStatusCode() { - return httpStatusCode; - } - - public void setHttpStatusCode(int httpStatusCode) { - this.httpStatusCode = httpStatusCode; - } - - public int getMrErrorCode() { - return mrErrorCode; - } - - - public void setMrErrorCode(int mrErrorCode) { - this.mrErrorCode = mrErrorCode; - } - - - public String getErrorMessage() { - return errorMessage; - } - - public void setErrorMessage(String errorMessage) { - this.errorMessage = errorMessage; - } - - public String getHelpURL() { - return helpURL; - } - - public void setHelpURL(String helpURL) { - this.helpURL = helpURL; - } - - @Override - public String toString() { - return "ErrorResponse {\"httpStatusCode\":\"" + httpStatusCode - + "\", \"mrErrorCode\":\"" + mrErrorCode + "\", \"errorMessage\":\"" - + errorMessage + "\", \"helpURL\":\"" + helpURL + "\", \"statusTs\":\""+statusTs+"\"" - + ", \"topicId\":\""+topic+"\", \"publisherId\":\""+publisherId+"\"" - + ", \"publisherIp\":\""+publisherIp+"\", \"subscriberId\":\""+subscriberId+"\"" - + ", \"subscriberIp\":\""+subscriberIp+"\"}"; - } - - public String getErrMapperStr1() { - return "ErrorResponse [httpStatusCode=" + httpStatusCode + ", mrErrorCode=" + mrErrorCode + ", errorMessage=" - + errorMessage + ", helpURL=" + helpURL + "]"; - } - - - - public JSONObject getErrMapperStr() { - JSONObject o = new JSONObject(); - o.put("status", getHttpStatusCode()); - o.put("mrstatus", getMrErrorCode()); - o.put("message", getErrorMessage()); - o.put("helpURL", getHelpURL()); - return o; - } - - - -} diff --git a/src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java b/src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java deleted file mode 100644 index 64b20e8..0000000 --- a/src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java +++ /dev/null @@ -1,64 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.listener; - -import javax.servlet.ServletContextEvent; -import javax.servlet.ServletContextListener; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * This is the Cambria Servlet Context Listner which helpes while loading the app which provide the endpoints - * @author nilanjana.maity - * - */ -public class CambriaServletContextListener implements ServletContextListener { - - DME2EndPointLoader loader = DME2EndPointLoader.getInstance(); - - private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaServletContextListener.class); - - - @Override - - /** - * contextDestroyed() loads unpublished end points - * @param arg0 - */ - public void contextDestroyed(ServletContextEvent arg0) { - log.info("CambriaServletContextListener contextDestroyed"); - - loader.unPublishEndPoints(); - } - - @Override - /** - * contextInitialized() loads published end points - * @param arg0 - */ - public void contextInitialized(ServletContextEvent arg0) { - log.info("CambriaServletContextListener contextInitialized"); - loader.publishEndPoints(); - } - -} diff --git a/src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java b/src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java deleted file mode 100644 index f61b6ea..0000000 --- a/src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java +++ /dev/null @@ -1,123 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.listener; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Properties; - -import com.att.aft.dme2.manager.registry.DME2EndpointRegistry; -import com.att.aft.dme2.api.DME2Exception; -import com.att.aft.dme2.api.DME2Manager; -import com.att.dmf.mr.service.impl.EventsServiceImpl; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * - * @author anowarul.islam - * - */ -public class DME2EndPointLoader { - - private String latitude; - private String longitude; - private String version; - private String serviceName; - private String env; - private String routeOffer; - private String hostName; - private String port; - private String contextPath; - private String protocol; - private String serviceURL; - private static DME2EndPointLoader loader = new DME2EndPointLoader(); - - private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class); - private DME2EndPointLoader() { - } - - public static DME2EndPointLoader getInstance() { - return loader; - } - - /** - * publishing endpoints - */ - public void publishEndPoints() { - - try { - InputStream input = this.getClass().getResourceAsStream("/endpoint.properties"); - Properties props = new Properties(); - props.load(input); - - latitude = props.getProperty("Latitude"); - longitude = props.getProperty("Longitude"); - version = props.getProperty("Version"); - serviceName = props.getProperty("ServiceName"); - env = props.getProperty("Environment"); - routeOffer = props.getProperty("RouteOffer"); - hostName = props.getProperty("HostName"); - port = props.getProperty("Port"); - contextPath = props.getProperty("ContextPath"); - protocol = props.getProperty("Protocol"); - - System.setProperty("AFT_LATITUDE", latitude); - System.setProperty("AFT_LONGITUDE", longitude); - System.setProperty("AFT_ENVIRONMENT", "AFTUAT"); - - serviceURL = "service=" + serviceName + "/" + "version=" + version + "/" + "envContext=" + env + "/" - + "routeOffer=" + routeOffer; - - DME2Manager manager = new DME2Manager("testEndpointPublish", props); - manager.setClientCredentials("sh301n", ""); - DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry(); - // Publish API takes service name, context path, hostname, port and - // protocol as args - svcRegistry.publish(serviceURL, contextPath, hostName, Integer.parseInt(port), protocol); - - } catch (IOException | DME2Exception e) { - LOG.error("Failed due to :" + e); - } - - } -/** - * unpublishing endpoints - */ - public void unPublishEndPoints() { - - DME2Manager manager; - try { - System.setProperty("AFT_LATITUDE", latitude); - System.setProperty("AFT_LONGITUDE", longitude); - System.setProperty("AFT_ENVIRONMENT", "AFTUAT"); - - manager = DME2Manager.getDefaultInstance(); - DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry(); - svcRegistry.unpublish(serviceURL, hostName, Integer.parseInt(port)); - } catch (DME2Exception e) { - LOG.error("Failed due to DME2Exception" + e); - } - - } - -} diff --git a/src/main/java/com/att/dmf/mr/metabroker/Broker.java b/src/main/java/com/att/dmf/mr/metabroker/Broker.java deleted file mode 100644 index e5fe8da..0000000 --- a/src/main/java/com/att/dmf/mr/metabroker/Broker.java +++ /dev/null @@ -1,92 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metabroker; - -import java.util.List; - -import com.att.dmf.mr.CambriaApiException; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -/** - * A broker interface to manage metadata around topics, etc. - * - * @author peter - * - */ -public interface Broker { - /** - * - * @author anowarul.islam - * - */ - public class TopicExistsException extends Exception { - /** - * - * @param topicName - */ - public TopicExistsException(String topicName) { - super("Topic " + topicName + " exists."); - } - - private static final long serialVersionUID = 1L; - } - - /** - * Get all topics in the underlying broker. - * - * @return - * @throws ConfigDbException - */ - List getAllTopics() throws ConfigDbException; - - /** - * Get a specific topic from the underlying broker. - * - * @param topic - * @return a topic, or null - */ - Topic getTopic(String topic) throws ConfigDbException; - - /** - * create a topic - * - * @param topic - * @param description - * @param ownerApiKey - * @param partitions - * @param replicas - * @param transactionEnabled - * @return - * @throws TopicExistsException - * @throws CambriaApiException - */ - Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas, - boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException; - - /** - * Delete a topic by name - * - * @param topic - */ - void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException; -} diff --git a/src/main/java/com/att/dmf/mr/metabroker/Broker1.java b/src/main/java/com/att/dmf/mr/metabroker/Broker1.java deleted file mode 100644 index e7d7f6c..0000000 --- a/src/main/java/com/att/dmf/mr/metabroker/Broker1.java +++ /dev/null @@ -1,95 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metabroker; - -import java.util.List; - -import com.att.dmf.mr.CambriaApiException; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -/** - * A broker interface to manage metadata around topics, etc. - * alternate for Broker1 to avoid this error in spring boot - *org.springframework.beans.factory.NoUniqueBeanDefinitionException: - * No qualifying bean of type [com.att.dmf.mr.metabroker.Broker] is defined: - * expected single matching bean but found 2: mmb,dMaaPKafkaMetaBroker - - * - */ -public interface Broker1 { - /** - * - * @author Ramkumar - * - */ - public class TopicExistsException extends Exception { - /** - * - * @param topicName - */ - public TopicExistsException(String topicName) { - super("Topic " + topicName + " exists."); - } - - private static final long serialVersionUID = 1L; - } - - /** - * Get all topics in the underlying broker. - * - * @return - * @throws ConfigDbException - */ - List getAllTopics() throws ConfigDbException; - - /** - * Get a specific topic from the underlying broker. - * - * @param topic - * @return a topic, or null - */ - Topic getTopic(String topic) throws ConfigDbException; - - /** - * create a topic - * - * @param topic - * @param description - * @param ownerApiKey - * @param partitions - * @param replicas - * @param transactionEnabled - * @return - * @throws TopicExistsException - * @throws CambriaApiException - */ - Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas, - boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException; - - /** - * Delete a topic by name - * - * @param topic - */ - void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException; -} diff --git a/src/main/java/com/att/dmf/mr/metabroker/Topic.java b/src/main/java/com/att/dmf/mr/metabroker/Topic.java deleted file mode 100644 index d191070..0000000 --- a/src/main/java/com/att/dmf/mr/metabroker/Topic.java +++ /dev/null @@ -1,133 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metabroker; - -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.NsaAcl; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.ReadWriteSecuredResource; -/** - * This is the interface for topic and all the topic related operations - * get topic name, owner, description, transactionEnabled etc. - * @author nilanjana.maity - * - */ -public interface Topic extends ReadWriteSecuredResource -{ - /** - * User defined exception for access denied while access the topic for Publisher and consumer - * @author nilanjana.maity - * - *//* - public class AccessDeniedException extends Exception - - *//** - * AccessDenied Description - *//* - - *//** - * AccessDenied Exception for the user while authenticating the user request - * @param user - *//* - - private static final long serialVersionUID = 1L; - }*/ - - /** - * Get this topic's name - * @return - */ - String getName (); - - /** - * Get the API key of the owner of this topic. - * @return - */ - String getOwner (); - - /** - * Get a description of the topic, as set by the owner at creation time. - * @return - */ - String getDescription (); - - /** - * If the topic is transaction enabled - * @return boolean true/false - */ - boolean isTransactionEnabled(); - - /** - * Get the ACL for reading on this topic. Can be null. - * @return - */ - NsaAcl getReaderAcl (); - - /** - * Get the ACL for writing on this topic. Can be null. - * @return - */ - NsaAcl getWriterAcl (); - - /** - * Check if this user can read the topic. Throw otherwise. Note that - * user may be null. - * @param user - */ - void checkUserRead ( NsaApiKey user ) throws AccessDeniedException; - - /** - * Check if this user can write to the topic. Throw otherwise. Note - * that user may be null. - * @param user - */ - void checkUserWrite ( NsaApiKey user ) throws AccessDeniedException; - - /** - * allow the given user to publish - * @param publisherId - * @param asUser - */ - void permitWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException; - - /** - * deny the given user from publishing - * @param publisherId - * @param asUser - */ - void denyWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException; - - /** - * allow the given user to read the topic - * @param consumerId - * @param asUser - */ - void permitReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException; - - /** - * deny the given user from reading the topic - * @param consumerId - * @param asUser - * @throws ConfigDbException - */ - void denyReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException; -} diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java deleted file mode 100644 index 45644b7..0000000 --- a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java +++ /dev/null @@ -1,52 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metrics.publisher; - -import java.io.IOException; -import java.util.List; -import java.util.concurrent.TimeUnit; - -/** - * A Cambria batching publisher is a publisher with additional functionality - * for managing delayed sends. - * - * @author peter - * - */ -public interface CambriaBatchingPublisher extends CambriaPublisher -{ - /** - * Get the number of messages that have not yet been sent. - * @return the number of pending messages - */ - int getPendingMessageCount (); - - /** - * Close this publisher, sending any remaining messages. - * @param timeout an amount of time to wait for unsent messages to be sent - * @param timeoutUnits the time unit for the timeout arg - * @return a list of any unsent messages after the timeout - * @throws IOException - * @throws InterruptedException - */ - List close ( long timeout, TimeUnit timeoutUnits ) throws IOException, InterruptedException; -} diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java deleted file mode 100644 index 4b219b1..0000000 --- a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java +++ /dev/null @@ -1,89 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metrics.publisher; - - - -// -import com.att.eelf.configuration.EELFLogger; - - -/** - * - * @author anowarul.islam - * - */ -public interface CambriaClient { - /** - * An exception at the Cambria layer. This is used when the HTTP transport - * layer returns a success code but the transaction is not completed as - * expected. - */ - public class CambriaApiException extends Exception { - /** - * - * @param msg - */ - public CambriaApiException(String msg) { - super(msg); - } - - /** - * - * @param msg - * @param t - */ - public CambriaApiException(String msg, Throwable t) { - super(msg, t); - } - - private static final long serialVersionUID = 1L; - } - - /** - * Optionally set the Logger to use - * - * @param log - */ - void logTo(EELFLogger log); - - /** - * Set the API credentials for this client connection. Subsequent calls will - * include authentication headers.who i - * - * @param apiKey - * @param apiSecret - */ - void setApiCredentials(String apiKey, String apiSecret); - - /** - * Remove API credentials, if any, on this connection. Subsequent calls will - * not include authentication headers. - */ - void clearApiCredentials(); - - /** - * Close this connection. Some client interfaces have additional close - * capability. - */ - void close(); -} diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java deleted file mode 100644 index 4a6ca81..0000000 --- a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java +++ /dev/null @@ -1,52 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metrics.publisher; - -import java.io.IOException; - -/** - * This interface will provide fetch mechanism for consumer - * @author nilanjana.maity - * - */ -public interface CambriaConsumer extends CambriaClient -{ - /** - * Fetch a set of messages. The consumer's timeout and message limit are used if set in the constructor call. - - * @return a set of messages - * @throws IOException - */ - Iterable fetch () throws IOException; - - /** - * Fetch a set of messages with an explicit timeout and limit for this call. These values - * override any set in the constructor call. - * - * @param timeoutMs The amount of time in milliseconds that the server should keep the connection - * open while waiting for message traffic. Use -1 for default timeout (controlled on the server-side). - * @param limit A limit on the number of messages returned in a single call. Use -1 for no limit. - * @return a set messages - * @throws IOException if there's a problem connecting to the server - */ - Iterable fetch ( int timeoutMs, int limit ) throws IOException; -} diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java deleted file mode 100644 index 4020a6d..0000000 --- a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java +++ /dev/null @@ -1,101 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metrics.publisher; - -import java.io.IOException; -import java.util.Collection; - -/** - * A Cambria publishing interface. - * - * @author peter - * - */ -public interface CambriaPublisher extends CambriaClient { - /** - * A simple message container - */ - public static class message { - /** - * - * @param partition - * @param msg - */ - public message(String partition, String msg) { - fPartition = partition == null ? "" : partition; - fMsg = msg; - if (fMsg == null) { - throw new IllegalArgumentException("Can't send a null message."); - } - } - - /** - * - * @param msg - */ - public message(message msg) { - this(msg.fPartition, msg.fMsg); - } - - /** - * declaring partition string - */ - public final String fPartition; - /** - * declaring fMsg String - */ - public final String fMsg; - } - - /** - * Send the given message using the given partition. - * - * @param partition - * @param msg - * @return the number of pending messages - * @throws IOException - */ - int send(String partition, String msg) throws IOException; - - /** - * Send the given message using its partition. - * - * @param msg - * @return the number of pending messages - * @throws IOException - */ - int send(message msg) throws IOException; - - /** - * Send the given messages using their partitions. - * - * @param msgs - * @return the number of pending messages - * @throws IOException - */ - int send(Collection msgs) throws IOException; - - /** - * Close this publisher. It's an error to call send() after close() - */ - void close(); -} diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java deleted file mode 100644 index 46dfa99..0000000 --- a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java +++ /dev/null @@ -1,146 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metrics.publisher; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import org.apache.http.HttpHost; -/** - * - * @author anowarul.islam - * - */ -public class CambriaPublisherUtility -{ - public static final String kBasePath = "/events/"; - public static final int kStdCambriaServicePort = 3904; -/** - * - * Translates a string into application/x-www-form-urlencoded - * format using a specific encoding scheme. - * @param s - * @return - * - */ - public static String escape ( String s ) - { - try - { - return URLEncoder.encode ( s, "UTF-8"); - } - catch ( UnsupportedEncodingException e ) - { - throw new RuntimeException ( e ); - } - } -/** - * - * building url - * @param rawTopic - * @return - */ - public static String makeUrl ( String rawTopic ) - { - final String cleanTopic = escape ( rawTopic ); - - final StringBuffer url = new StringBuffer(). - append ( CambriaPublisherUtility.kBasePath ). - append ( cleanTopic ); - return url.toString (); - } -/** - * - * building consumerUrl - * @param topic - * @param rawConsumerGroup - * @param rawConsumerId - * @return - */ - public static String makeConsumerUrl ( String topic, String rawConsumerGroup, String rawConsumerId ) - { - final String cleanConsumerGroup = escape ( rawConsumerGroup ); - final String cleanConsumerId = escape ( rawConsumerId ); - return CambriaPublisherUtility.kBasePath + topic + "/" + cleanConsumerGroup + "/" + cleanConsumerId; - } - - /** - * Create a list of HttpHosts from an input list of strings. Input strings have - * host[:port] as format. If the port section is not provided, the default port is used. - * - * @param hosts - * @return a list of hosts - */ - public static List createHostsList(Collection hosts) - { - final ArrayList convertedHosts = new ArrayList<>(); - for ( String host : hosts ) - { - if ( host.length () == 0 ) continue; - convertedHosts.add ( hostForString ( host ) ); - } - return convertedHosts; - } - - /** - * Return an HttpHost from an input string. Input string has - * host[:port] as format. If the port section is not provided, the default port is used. - * - * @param hosts - * @return a list of hosts - * if host.length<1 throws IllegalArgumentException - * - */ - public static HttpHost hostForString ( String host ) - { - if ( host.length() < 1 ) throw new IllegalArgumentException ( "An empty host entry is invalid." ); - - String hostPart = host; - int port = kStdCambriaServicePort; - - final int colon = host.indexOf ( ':' ); - if ( colon == 0 ) throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid." ); - if ( colon > 0 ) - { - hostPart = host.substring ( 0, colon ).trim(); - - final String portPart = host.substring ( colon + 1 ).trim(); - if ( portPart.length () > 0 ) - { - try - { - port = Integer.parseInt ( portPart ); - } - catch ( NumberFormatException x ) - { - throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid.", x ); - } - } - // else: use default port on "foo:" - } - - return new HttpHost ( hostPart, port ); - } -} diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java b/src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java deleted file mode 100644 index d7818de..0000000 --- a/src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java +++ /dev/null @@ -1,420 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metrics.publisher; - -import java.net.MalformedURLException; -import java.nio.channels.NotYetConnectedException; -import java.util.Collection; -import java.util.TreeSet; -import java.util.UUID; - -import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaConsumerImpl; -import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher; - -/** - * A factory for Cambria clients.
- *
- * Use caution selecting a consumer creator factory. If the call doesn't accept - * a consumer group name, then it creates a consumer that is not restartable. - * That is, if you stop your process and start it again, your client will NOT - * receive any missed messages on the topic. If you need to ensure receipt of - * missed messages, then you must use a consumer that's created with a group - * name and ID. (If you create multiple consumer processes using the same group, - * load is split across them. Be sure to use a different ID for each instance.)
- *
- * Publishers - * - * @author peter - */ -public class DMaaPCambriaClientFactory { - /** - * Create a consumer instance with the default timeout and no limit on - * messages returned. This consumer operates as an independent consumer - * (i.e., not in a group) and is NOT re-startable across sessions. - * - * @param hostList - * A comma separated list of hosts to use to connect to Cambria. - * You can include port numbers (3904 is the default). For - * example, "ueb01hydc.it.att.com:8080,ueb02hydc.it.att.com" - * - * @param topic - * The topic to consume - * - * @return a consumer - */ - public static CambriaConsumer createConsumer(String hostList, String topic) { - return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList), - topic); - } - - /** - * Create a consumer instance with the default timeout and no limit on - * messages returned. This consumer operates as an independent consumer - * (i.e., not in a group) and is NOT re-startable across sessions. - * - * @param hostSet - * The host used in the URL to Cambria. Entries can be - * "host:port". - * @param topic - * The topic to consume - * - * @return a consumer - */ - public static CambriaConsumer createConsumer(Collection hostSet, - String topic) { - return createConsumer(hostSet, topic, null); - } - - /** - * Create a consumer instance with server-side filtering, the default - * timeout, and no limit on messages returned. This consumer operates as an - * independent consumer (i.e., not in a group) and is NOT re-startable - * across sessions. - * - * @param hostSet - * The host used in the URL to Cambria. Entries can be - * "host:port". - * @param topic - * The topic to consume - * @param filter - * a filter to use on the server side - * - * @return a consumer - */ - public static CambriaConsumer createConsumer(Collection hostSet, - String topic, String filter) { - return createConsumer(hostSet, topic, UUID.randomUUID().toString(), - "0", -1, -1, filter, null, null); - } - - /** - * Create a consumer instance with the default timeout, and no limit on - * messages returned. This consumer can operate in a logical group and is - * re-startable across sessions when you use the same group and ID on - * restart. - * - * @param hostSet - * The host used in the URL to Cambria. Entries can be - * "host:port". - * @param topic - * The topic to consume - * @param consumerGroup - * The name of the consumer group this consumer is part of - * @param consumerId - * The unique id of this consume in its group - * - * @return a consumer - */ - public static CambriaConsumer createConsumer(Collection hostSet, - final String topic, final String consumerGroup, - final String consumerId) { - return createConsumer(hostSet, topic, consumerGroup, consumerId, -1, -1); - } - - /** - * Create a consumer instance with the default timeout, and no limit on - * messages returned. This consumer can operate in a logical group and is - * re-startable across sessions when you use the same group and ID on - * restart. - * - * @param hostSet - * The host used in the URL to Cambria. Entries can be - * "host:port". - * @param topic - * The topic to consume - * @param consumerGroup - * The name of the consumer group this consumer is part of - * @param consumerId - * The unique id of this consume in its group - * @param timeoutMs - * The amount of time in milliseconds that the server should keep - * the connection open while waiting for message traffic. Use -1 - * for default timeout. - * @param limit - * A limit on the number of messages returned in a single call. - * Use -1 for no limit. - * - * @return a consumer - */ - public static CambriaConsumer createConsumer(Collection hostSet, - final String topic, final String consumerGroup, - final String consumerId, int timeoutMs, int limit) { - return createConsumer(hostSet, topic, consumerGroup, consumerId, - timeoutMs, limit, null, null, null); - } - - /** - * Create a consumer instance with the default timeout, and no limit on - * messages returned. This consumer can operate in a logical group and is - * re-startable across sessions when you use the same group and ID on - * restart. This consumer also uses server-side filtering. - * - * @param hostList - * A comma separated list of hosts to use to connect to Cambria. - * You can include port numbers (3904 is the default). For - * example, "ueb01hydc.it.att.com:8080,ueb02hydc.it.att.com" - * @param topic - * The topic to consume - * @param consumerGroup - * The name of the consumer group this consumer is part of - * @param consumerId - * The unique id of this consume in its group - * @param timeoutMs - * The amount of time in milliseconds that the server should keep - * the connection open while waiting for message traffic. Use -1 - * for default timeout. - * @param limit - * A limit on the number of messages returned in a single call. - * Use -1 for no limit. - * @param filter - * A Highland Park filter expression using only built-in filter - * components. Use null for "no filter". - * @param apiKey - * key associated with a user - * @param apiSecret - * of a user - * - * @return a consumer - */ - public static CambriaConsumer createConsumer(String hostList, - final String topic, final String consumerGroup, - final String consumerId, int timeoutMs, int limit, String filter, - String apiKey, String apiSecret) { - return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList), - topic, consumerGroup, consumerId, timeoutMs, limit, filter, - apiKey, apiSecret); - } - - /** - * Create a consumer instance with the default timeout, and no limit on - * messages returned. This consumer can operate in a logical group and is - * re-startable across sessions when you use the same group and ID on - * restart. This consumer also uses server-side filtering. - * - * @param hostSet - * The host used in the URL to Cambria. Entries can be - * "host:port". - * @param topic - * The topic to consume - * @param consumerGroup - * The name of the consumer group this consumer is part of - * @param consumerId - * The unique id of this consume in its group - * @param timeoutMs - * The amount of time in milliseconds that the server should keep - * the connection open while waiting for message traffic. Use -1 - * for default timeout. - * @param limit - * A limit on the number of messages returned in a single call. - * Use -1 for no limit. - * @param filter - * A Highland Park filter expression using only built-in filter - * components. Use null for "no filter". - * @param apiKey - * key associated with a user - * @param apiSecret - * of a user - * @return a consumer - */ - public static CambriaConsumer createConsumer(Collection hostSet, - final String topic, final String consumerGroup, - final String consumerId, int timeoutMs, int limit, String filter, - String apiKey, String apiSecret) { - if (sfMock != null) - return sfMock; - try { - return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup, - consumerId, timeoutMs, limit, filter, apiKey, apiSecret); - } catch (MalformedURLException e) { - - NotYetConnectedException exception=new NotYetConnectedException(); - exception.setStackTrace(e.getStackTrace()); - - throw exception ; - } - } - - /*************************************************************************/ - /*************************************************************************/ - /*************************************************************************/ - - /** - * Create a publisher that sends each message (or group of messages) - * immediately. Most applications should favor higher latency for much - * higher message throughput and the "simple publisher" is not a good - * choice. - * - * @param hostlist - * The host used in the URL to Cambria. Can be "host:port", can - * be multiple comma-separated entries. - * @param topic - * The topic on which to publish messages. - * @return a publisher - */ - public static CambriaBatchingPublisher createSimplePublisher( - String hostlist, String topic) { - return createBatchingPublisher(hostlist, topic, 1, 1); - } - - /** - * Create a publisher that batches messages. Be sure to close the publisher - * to send the last batch and ensure a clean shutdown. Message payloads are - * not compressed. - * - * @param hostlist - * The host used in the URL to Cambria. Can be "host:port", can - * be multiple comma-separated entries. - * @param topic - * The topic on which to publish messages. - * @param maxBatchSize - * The largest set of messages to batch - * @param maxAgeMs - * The maximum age of a message waiting in a batch - * - * @return a publisher - */ - public static CambriaBatchingPublisher createBatchingPublisher( - String hostlist, String topic, int maxBatchSize, long maxAgeMs) { - return createBatchingPublisher(hostlist, topic, maxBatchSize, maxAgeMs, - false); - } - - /** - * Create a publisher that batches messages. Be sure to close the publisher - * to send the last batch and ensure a clean shutdown. - * - * @param hostlist - * The host used in the URL to Cambria. Can be "host:port", can - * be multiple comma-separated entries. - * @param topic - * The topic on which to publish messages. - * @param maxBatchSize - * The largest set of messages to batch - * @param maxAgeMs - * The maximum age of a message waiting in a batch - * @param compress - * use gzip compression - * - * @return a publisher - */ - public static CambriaBatchingPublisher createBatchingPublisher( - String hostlist, String topic, int maxBatchSize, long maxAgeMs, - boolean compress) { - return createBatchingPublisher( - DMaaPCambriaConsumerImpl.stringToList(hostlist), topic, - maxBatchSize, maxAgeMs, compress); - } - - /** - * Create a publisher that batches messages. Be sure to close the publisher - * to send the last batch and ensure a clean shutdown. - * - * @param hostSet - * A set of hosts to be used in the URL to Cambria. Can be - * "host:port". Use multiple entries to enable failover. - * @param topic - * The topic on which to publish messages. - * @param maxBatchSize - * The largest set of messages to batch - * @param maxAgeMs - * The maximum age of a message waiting in a batch - * @param compress - * use gzip compression - * - * @return a publisher - */ - public static CambriaBatchingPublisher createBatchingPublisher( - String[] hostSet, String topic, int maxBatchSize, long maxAgeMs, - boolean compress) { - final TreeSet hosts = new TreeSet(); - for (String hp : hostSet) { - hosts.add(hp); - } - return createBatchingPublisher(hosts, topic, maxBatchSize, maxAgeMs, - compress); - } - - /** - * Create a publisher that batches messages. Be sure to close the publisher - * to send the last batch and ensure a clean shutdown. - * - * @param hostSet - * A set of hosts to be used in the URL to Cambria. Can be - * "host:port". Use multiple entries to enable failover. - * @param topic - * The topic on which to publish messages. - * @param maxBatchSize - * The largest set of messages to batch - * @param maxAgeMs - * The maximum age of a message waiting in a batch - * @param compress - * use gzip compression - * - * @return a publisher - */ - public static CambriaBatchingPublisher createBatchingPublisher( - Collection hostSet, String topic, int maxBatchSize, - long maxAgeMs, boolean compress) { - return new DMaaPCambriaSimplerBatchPublisher.Builder() - .againstUrls(hostSet).onTopic(topic) - .batchTo(maxBatchSize, maxAgeMs).compress(compress).build(); - } - - /** - * Create an identity manager client to work with API keys. - * - * @param hostSet - * A set of hosts to be used in the URL to Cambria. Can be - * "host:port". Use multiple entries to enable failover. - * @param apiKey - * Your API key - * @param apiSecret - * Your API secret - * @return an identity manager - */ - - - /** - * Create a topic manager for working with topics. - * - * @param hostSet - * A set of hosts to be used in the URL to Cambria. Can be - * "host:port". Use multiple entries to enable failover. - * @param apiKey - * Your API key - * @param apiSecret - * Your API secret - * @return a topic manager - */ - - - /** - * Inject a consumer. Used to support unit tests. - * - * @param cc - */ - public static void $testInject(CambriaConsumer cc) { - sfMock = cc; - } - - private static CambriaConsumer sfMock = null; -} diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java deleted file mode 100644 index 84576fc..0000000 --- a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java +++ /dev/null @@ -1,100 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metrics.publisher.impl; - -import java.net.MalformedURLException; -import java.util.Collection; -import java.util.Set; -import java.util.TreeSet; -import java.util.concurrent.TimeUnit; - -import org.json.JSONArray; -import org.json.JSONException; - -import com.att.dmf.mr.constants.CambriaConstants; - -//import org.slf4j.LoggerFactory; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.apiClient.http.CacheUse; -import com.att.nsa.apiClient.http.HttpClient; - -/** - * - * @author anowarul.islam - * - */ -public class CambriaBaseClient extends HttpClient implements com.att.dmf.mr.metrics.publisher.CambriaClient -{ - protected CambriaBaseClient ( Collection hosts ) throws MalformedURLException - { - this ( hosts, null ); - } - - public CambriaBaseClient ( Collection hosts, String clientSignature ) throws MalformedURLException - { - - - - super(ConnectionType.HTTP, hosts, CambriaConstants.kStdCambriaServicePort, clientSignature, CacheUse.NONE, 1, 1L, TimeUnit.MILLISECONDS, 32, 32, 600000); - - - fLog = EELFManager.getInstance().getLogger(this.getClass().getName()); - - } - - @Override - public void close () - { - } - - public Set jsonArrayToSet ( JSONArray a ) throws JSONException - { - if ( a == null ) return null; - - final TreeSet set = new TreeSet<>(); - for ( int i=0; i hostPart, final String topic, final String consumerGroup, - final String consumerId, int timeoutMs, int limit, String filter, String apiKey, String apiSecret) throws MalformedURLException { - super(hostPart, topic + "::" + consumerGroup + "::" + consumerId); - - fTopic = topic; - fGroup = consumerGroup; - fId = consumerId; - fTimeoutMs = timeoutMs; - fLimit = limit; - fFilter = filter; - - setApiCredentials(apiKey, apiSecret); - } - - /** - * method converts String to list - * - * @param str - * @return - */ - public static List stringToList(String str) { - final LinkedList set = new LinkedList(); - if (str != null) { - final String[] parts = str.trim().split(","); - for (String part : parts) { - final String trimmed = part.trim(); - if (trimmed.length() > 0) { - set.add(trimmed); - } - } - } - return set; - } - - @Override - public Iterable fetch() throws IOException { - // fetch with the timeout and limit set in constructor - return fetch(fTimeoutMs, fLimit); - } - - @Override - public Iterable fetch(int timeoutMs, int limit) throws IOException { - final LinkedList msgs = new LinkedList(); - - final String urlPath = createUrlPath(timeoutMs, limit); - - getLog().info("UEB GET " + urlPath); - try { - final JSONObject o = get(urlPath); - - if (o != null) { - final JSONArray a = o.getJSONArray("result"); - if (a != null) { - for (int i = 0; i < a.length(); i++) { - msgs.add(a.getString(i)); - } - } - } - } catch (HttpObjectNotFoundException e) { - // this can happen if the topic is not yet created. ignore. - Log.error("Failed due to topic is not yet created" + e); - } catch (JSONException e) { - // unexpected response - reportProblemWithResponse(); - Log.error("Failed due to jsonException", e); - } catch (HttpException e) { - throw new IOException(e); - } - - return msgs; - } - - public String createUrlPath(int timeoutMs, int limit) { - final StringBuilder url = new StringBuilder(CambriaPublisherUtility.makeConsumerUrl(fTopic, fGroup, fId)); - final StringBuilder adds = new StringBuilder(); - if (timeoutMs > -1) { - adds.append("timeout=").append(timeoutMs); - } - - if (limit > -1) { - if (adds.length() > 0) { - adds.append("&"); - } - adds.append("limit=").append(limit); - } - if (fFilter != null && fFilter.length() > 0) { - try { - if (adds.length() > 0) { - adds.append("&"); - } - adds.append("filter=").append(URLEncoder.encode(fFilter, "UTF-8")); - } catch (UnsupportedEncodingException e) { - Log.error("Failed due to UnsupportedEncodingException" + e); - } - } - if (adds.length() > 0) { - url.append("?").append(adds.toString()); - } - return url.toString(); - } - -} diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java deleted file mode 100644 index e9b1cdb..0000000 --- a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java +++ /dev/null @@ -1,422 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.metrics.publisher.impl; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.net.MalformedURLException; -import java.nio.channels.NotYetConnectedException; -import java.util.Collection; -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ScheduledThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.zip.GZIPOutputStream; - -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; -import javax.ws.rs.client.Entity; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.Response; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility; - -/** - * - * class DMaaPCambriaSimplerBatchPublisher used to send the publish the messages - * in batch - * - * @author anowarul.islam - * - */ -public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient - implements com.att.dmf.mr.metrics.publisher.CambriaBatchingPublisher { - /** - * - * static inner class initializes with urls, topic,batchSize - * - * @author anowarul.islam - * - */ - public static class Builder { - public Builder() { - } - - /** - * constructor initialize with url - * - * @param baseUrls - * @return - * - */ - public Builder againstUrls(Collection baseUrls) { - fUrls = baseUrls; - return this; - } - - /** - * constructor initializes with topics - * - * @param topic - * @return - * - */ - public Builder onTopic(String topic) { - fTopic = topic; - return this; - } - - /** - * constructor initilazes with batch size and batch time - * - * @param maxBatchSize - * @param maxBatchAgeMs - * @return - * - */ - public Builder batchTo(int maxBatchSize, long maxBatchAgeMs) { - fMaxBatchSize = maxBatchSize; - fMaxBatchAgeMs = maxBatchAgeMs; - return this; - } - - /** - * constructor initializes with compress - * - * @param compress - * @return - */ - public Builder compress(boolean compress) { - fCompress = compress; - return this; - } - - /** - * method returns DMaaPCambriaSimplerBatchPublisher object - * - * @return - */ - public DMaaPCambriaSimplerBatchPublisher build() { - - try { - return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress); - } catch (MalformedURLException e) { - - NotYetConnectedException exception=new NotYetConnectedException(); - exception.setStackTrace(e.getStackTrace()); - - throw exception ; - - } - } - - private Collection fUrls; - private String fTopic; - private int fMaxBatchSize = 100; - private long fMaxBatchAgeMs = 1000; - private boolean fCompress = false; - }; - - /** - * - * @param partition - * @param msg - */ - @Override - public int send(String partition, String msg) { - return send(new message(partition, msg)); - } - - /** - * @param msg - */ - @Override - public int send(message msg) { - final LinkedList list = new LinkedList(); - list.add(msg); - return send(list); - } - - /** - * @param msgs - */ - @Override - public synchronized int send(Collection msgs) { - if (fClosed) { - throw new IllegalStateException("The publisher was closed."); - } - - for (message userMsg : msgs) { - fPending.add(new TimestampedMessage(userMsg)); - } - return getPendingMessageCount(); - } - - /** - * getPending message count - */ - @Override - public synchronized int getPendingMessageCount() { - return fPending.size(); - } - - /** - * - * @exception InterruptedException - * @exception IOException - */ - @Override - public void close() { - try { - final List remains = close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); - if (remains.isEmpty()) { - getLog().warn("Closing publisher with " + remains.size() + " messages unsent. " - + "Consider using CambriaBatchingPublisher.close( long timeout, TimeUnit timeoutUnits ) to recapture unsent messages on close."); - } - } catch (InterruptedException e) { - getLog().warn("Possible message loss. " + e.getMessage(), e); - } catch (IOException e) { - getLog().warn("Possible message loss. " + e.getMessage(), e); - } - } - - /** - * @param time - * @param unit - */ - @Override - public List close(long time, TimeUnit unit) throws IOException, InterruptedException { - synchronized (this) { - fClosed = true; - - // stop the background sender - fExec.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); - fExec.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); - fExec.shutdown(); - } - - final long now = Clock.now(); - final long waitInMs = TimeUnit.MILLISECONDS.convert(time, unit); - final long timeoutAtMs = now + waitInMs; - - while (Clock.now() < timeoutAtMs && getPendingMessageCount() > 0) { - send(true); - Thread.sleep(250); - } - // synchronizing the current object - synchronized (this) { - final LinkedList result = new LinkedList(); - fPending.drainTo(result); - return result; - } - } - - /** - * Possibly send a batch to the cambria server. This is called by the - * background thread and the close() method - * - * @param force - */ - private synchronized void send(boolean force) { - if (force || shouldSendNow()) { - if (!sendBatch()) { - getLog().warn("Send failed, " + fPending.size() + " message to send."); - - // note the time for back-off - fDontSendUntilMs = sfWaitAfterError + Clock.now(); - } - } - } - - /** - * - * @return - */ - private synchronized boolean shouldSendNow() { - boolean shouldSend = false; - if (fPending.isEmpty()) { - final long nowMs = Clock.now(); - - shouldSend = (fPending.size() >= fMaxBatchSize); - if (!shouldSend) { - final long sendAtMs = fPending.peek().timestamp + fMaxBatchAgeMs; - shouldSend = sendAtMs <= nowMs; - } - - // however, wait after an error - shouldSend = shouldSend && nowMs >= fDontSendUntilMs; - } - return shouldSend; - } - - /** - * - * @return - */ - private synchronized boolean sendBatch() { - // it's possible for this call to be made with an empty list. in this - // case, just return. - if (fPending.isEmpty()) { - return true; - } - - final long nowMs = Clock.now(); - final String url = CambriaPublisherUtility.makeUrl(fTopic); - - getLog().info("sending " + fPending.size() + " msgs to " + url + ". Oldest: " - + (nowMs - fPending.peek().timestamp) + " ms"); - - try { - - final ByteArrayOutputStream baseStream = new ByteArrayOutputStream(); - OutputStream os = baseStream; - if (fCompress) { - os = new GZIPOutputStream(baseStream); - } - for (TimestampedMessage m : fPending) { - os.write(("" + m.fPartition.length()).getBytes()); - os.write('.'); - os.write(("" + m.fMsg.length()).getBytes()); - os.write('.'); - os.write(m.fPartition.getBytes()); - os.write(m.fMsg.getBytes()); - os.write('\n'); - } - os.close(); - - final long startMs = Clock.now(); - - // code from REST Client Starts - - - - - Client client = ClientBuilder.newClient(); - String metricTopicname = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic"); - if (null==metricTopicname) { - - metricTopicname="msgrtr.apinode.metrics.dmaap"; - } - WebTarget target = client - .target("http://localhost:" + CambriaConstants.kStdCambriaServicePort); - target = target.path("/events/" + fTopic); - getLog().info("url : " + target.getUri().toString()); - // API Key - - Entity data = Entity.entity(baseStream.toByteArray(), "application/cambria"); - - Response response = target.request().post(data); - - getLog().info("Response received :: " + response.getStatus()); - getLog().info("Response received :: " + response.toString()); - - // code from REST Client Ends - - - fPending.clear(); - return true; - } catch (IllegalArgumentException x) { - getLog().warn(x.getMessage(), x); - } - - catch (IOException x) { - getLog().warn(x.getMessage(), x); - } - return false; - } - - private final String fTopic; - private final int fMaxBatchSize; - private final long fMaxBatchAgeMs; - private final boolean fCompress; - private boolean fClosed; - - private final LinkedBlockingQueue fPending; - private long fDontSendUntilMs; - private final ScheduledThreadPoolExecutor fExec; - - private static final long sfWaitAfterError = 1000; - - /** - * - * @param hosts - * @param topic - * @param maxBatchSize - * @param maxBatchAgeMs - * @param compress - */ - private DMaaPCambriaSimplerBatchPublisher(Collection hosts, String topic, int maxBatchSize, - long maxBatchAgeMs, boolean compress) throws MalformedURLException { - - super(hosts); - - if (topic == null || topic.length() < 1) { - throw new IllegalArgumentException("A topic must be provided."); - } - - fClosed = false; - fTopic = topic; - fMaxBatchSize = maxBatchSize; - fMaxBatchAgeMs = maxBatchAgeMs; - fCompress = compress; - - fPending = new LinkedBlockingQueue(); - fDontSendUntilMs = 0; - - fExec = new ScheduledThreadPoolExecutor(1); - fExec.scheduleAtFixedRate(new Runnable() { - @Override - public void run() { - send(false); - } - }, 100, 50, TimeUnit.MILLISECONDS); - } - - /** - * - * - * @author anowarul.islam - * - */ - private static class TimestampedMessage extends message { - /** - * to store timestamp value - */ - public final long timestamp; - - /** - * constructor initialize with message - * - * @param m - * - */ - public TimestampedMessage(message m) { - super(m); - timestamp = Clock.now(); - } - } - -} diff --git a/src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java b/src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java deleted file mode 100644 index 4565d3a..0000000 --- a/src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java +++ /dev/null @@ -1,114 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.resources; - -import java.io.IOException; -import java.io.InputStream; -import java.util.zip.GZIPInputStream; - -import javax.servlet.http.HttpServletResponse; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.resources.streamReaders.CambriaJsonStreamReader; -import com.att.dmf.mr.resources.streamReaders.CambriaRawStreamReader; -import com.att.dmf.mr.resources.streamReaders.CambriaStreamReader; -import com.att.dmf.mr.resources.streamReaders.CambriaTextStreamReader; -import com.att.nsa.apiServer.streams.ChunkedInputStream; -import com.att.nsa.drumlin.service.standards.HttpStatusCodes; - -/** - * An inbound event set. - * - * @author peter - */ -public class CambriaEventSet { - private final reader fReader; - - /** - * constructor initialization - * - * @param mediaType - * @param originalStream - * @param chunked - * @param defPartition - * @throws CambriaApiException - */ - public CambriaEventSet(String mediaType, InputStream originalStream, - boolean chunked, String defPartition) throws CambriaApiException { - InputStream is = originalStream; - if (chunked) { - is = new ChunkedInputStream(originalStream); - } - - if (("application/json").equals(mediaType)) { - if (chunked) { - throw new CambriaApiException( - HttpServletResponse.SC_BAD_REQUEST, - "The JSON stream reader doesn't support chunking."); - } - fReader = new CambriaJsonStreamReader(is, defPartition); - } else if (("application/cambria").equals(mediaType)) { - fReader = new CambriaStreamReader(is); - } else if (("application/cambria-zip").equals(mediaType)) { - try { - is = new GZIPInputStream(is); - } catch (IOException e) { - throw new CambriaApiException(HttpStatusCodes.k400_badRequest, - "Couldn't read compressed format: " + e); - } - fReader = new CambriaStreamReader(is); - } else if (("text/plain").equals(mediaType)) { - fReader = new CambriaTextStreamReader(is, defPartition); - } else { - fReader = new CambriaRawStreamReader(is, defPartition); - } - } - - /** - * Get the next message from this event set. Returns null when the end of - * stream is reached. Will block until a message arrives (or the stream is - * closed/broken). - * - * @return a message, or null - * @throws IOException - * @throws CambriaApiException - */ - public message next() throws IOException, CambriaApiException { - return fReader.next(); - } - - /** - * - * @author anowarul.islam - * - */ - public interface reader { - /** - * - * @return - * @throws IOException - * @throws CambriaApiException - */ - message next() throws IOException, CambriaApiException; - } -} diff --git a/src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java b/src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java deleted file mode 100644 index aae15fb..0000000 --- a/src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java +++ /dev/null @@ -1,554 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.resources; - -import java.io.IOException; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Date; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.json.JSONException; -import org.json.JSONObject; -import org.json.JSONTokener; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Consumer; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.utils.DMaaPResponseBuilder.StreamWriter; -import com.att.dmf.mr.utils.Utils; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/*import com.att.sa.highlandPark.config.HpConfigContext; -import com.att.sa.highlandPark.config.HpReaderException; -import com.att.sa.highlandPark.events.HpJsonEvent; -import com.att.sa.highlandPark.events.HpJsonEventFactory; -import com.att.sa.highlandPark.processor.HpAlarmFilter; -import com.att.sa.highlandPark.processor.HpEvent; -import com.att.sa.highlandPark.processor.HpProcessingEngine; -import com.att.sa.highlandPark.processor.HpProcessingEngine.EventFactory; -*/ -/** - * class used to write the consumed messages - * - * @author anowarul.islam - * - */ -public class CambriaOutboundEventStream implements StreamWriter { - private static final int kTopLimit = 1024 * 4; - - /** - * - * static innerclass it takes all the input parameter for kafka consumer - * like limit, timeout, meta, pretty - * - * @author anowarul.islam - * - */ - public static class Builder { - - // Required - private final Consumer fConsumer; - // private final rrNvReadable fSettings; // used during write to tweak - // format, decide to explicitly - // close stream or not - - // Optional - private int fLimit; - private int fTimeoutMs; - private String fTopicFilter; - private boolean fPretty; - private boolean fWithMeta; - ArrayList fKafkaConsumerList; - - - /** - * constructor it initializes all the consumer parameters - * - * @param c - * @param settings - */ - public Builder(Consumer c) { - this.fConsumer = c; - - - fLimit = CambriaConstants.kNoTimeout; - fTimeoutMs = CambriaConstants.kNoLimit; - fTopicFilter = CambriaConstants.kNoFilter; - fPretty = false; - fWithMeta = false; - - - } - - /** - * - * constructor initializes with limit - * - * @param l - * only l no of messages will be consumed - * @return - */ - public Builder limit(int l) { - this.fLimit = l; - return this; - } - - /** - * constructor initializes with timeout - * - * @param t - * if there is no message to consume, them DMaaP will wait - * for t time - * @return - */ - public Builder timeout(int t) { - this.fTimeoutMs = t; - return this; - } - - /** - * constructor initializes with filter - * - * @param f - * filter - * @return - */ - public Builder filter(String f) { - this.fTopicFilter = f; - return this; - } - - /** - * constructor initializes with boolean value pretty - * - * @param p - * messages print in new line - * @return - */ - public Builder pretty(boolean p) { - fPretty = p; - return this; - } - - /** - * constructor initializes with boolean value meta - * - * @param withMeta, - * along with messages offset will print - * @return - */ - public Builder withMeta(boolean withMeta) { - fWithMeta = withMeta; - return this; - } - - // public Builder atOffset ( int pos ) - - - // return this; - // } - /** - * method returs object of CambriaOutboundEventStream - * - * @return - * @throws CambriaApiException - */ - public CambriaOutboundEventStream build() throws CambriaApiException { - return new CambriaOutboundEventStream(this); - } - } - - @SuppressWarnings("unchecked") - /** - * - * @param builder - * @throws CambriaApiException - * - */ - private CambriaOutboundEventStream(Builder builder) throws CambriaApiException { - fConsumer = builder.fConsumer; - fLimit = builder.fLimit; - fTimeoutMs = builder.fTimeoutMs; - - fSent = 0; - fPretty = builder.fPretty; - fWithMeta = builder.fWithMeta; - fKafkaConsumerList = builder.fKafkaConsumerList; - /* if (CambriaConstants.kNoFilter.equals(builder.fTopicFilter)) { - fHpAlarmFilter = null; - fHppe = null; - } else { - try { - final JSONObject filter = new JSONObject(new JSONTokener(builder.fTopicFilter)); - HpConfigContext cc = new HpConfigContext(); - fHpAlarmFilter = cc.create(HpAlarmFilter.class, filter); - final EventFactory ef = new HpJsonEventFactory(); - fHppe = new HpProcessingEngine(ef); - } catch (HpReaderException e) { - // JSON was okay, but the filter engine says it's bogus - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, - "Couldn't create filter: " + e.getMessage()); - } catch (JSONException e) { - // user sent a bogus JSON object - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, - "Couldn't parse JSON: " + e.getMessage()); - } - }*/ - } - - /** - * - * interface provides onWait and onMessage methods - * - */ - public interface operation { - /** - * Call thread.sleep - * - * @throws IOException - */ - void onWait() throws IOException; - - /** - * provides the output based in the consumer paramter - * - * @param count - * @param msg - * @throws IOException - */ - - void onMessage(int count, String msg, String transId, long offSet) throws IOException, JSONException; - } - - /** - * - * @return - */ - public int getSentCount() { - return fSent; - } - - @Override - /** - * - * @param os - * throws IOException - */ - public void write(final OutputStream os) throws IOException { - - - // final boolean transactionEnabled = istransEnable; - // synchronized(this){ - os.write('['); - fSent = forEachMessage(new operation() { - @Override - public void onMessage(int count, String msg, String transId, long offSet) - throws IOException, JSONException { - - if (count > 0) { - os.write(','); - } - if (fWithMeta) { - final JSONObject entry = new JSONObject(); - entry.put("offset", offSet); - entry.put("message", msg); - os.write(entry.toString().getBytes()); - } else { - - String jsonString = JSONObject.valueToString(msg); - os.write(jsonString.getBytes()); - } - - if (fPretty) { - os.write('\n'); - } - - String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap - .getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic"); - if (null == metricTopicname) - metricTopicname = "msgrtr.apinode.metrics.dmaap"; - if (!metricTopicname.equalsIgnoreCase(topic.getName())) { - try { - if (istransEnable && istransType) { - // final String transactionId = - - - StringBuilder consumerInfo = new StringBuilder(); - if (null != dmaapContext && null != dmaapContext.getRequest()) { - final HttpServletRequest request = dmaapContext.getRequest(); - consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\","); - consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\","); - consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\","); - consumerInfo.append("consumerGroup= \"" - + getConsumerGroupFromRequest(request.getRequestURI()) + "\","); - consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\","); - } - log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transId - + "\",messageLength= \"" + msg.length() + "\",topic= \"" + topic.getName() + "\"]"); - } - } catch (Exception e) { - } - } - - } - - @Override - /** - * - * It makes thread to wait - * - * @throws IOException - */ - public void onWait() throws IOException { - os.flush(); // likely totally unnecessary for a network socket - try { - // FIXME: would be good to wait/signal - Thread.sleep(100); - } catch (InterruptedException e) { - // ignore - } - } - }); - - - if (null != dmaapContext && istransEnable && istransType) { - - dmaapContext.getResponse().setHeader("transactionId", - Utils.getResponseTransactionId(responseTransactionId)); - } - - os.write(']'); - os.flush(); - - boolean close_out_stream = true; - String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "close.output.stream"); - if (null != strclose_out_stream) - close_out_stream = Boolean.parseBoolean(strclose_out_stream); - - - if (close_out_stream) { - os.close(); - - } - } - - /** - * - * @param requestURI - * @return - */ - private String getConsumerGroupFromRequest(String requestURI) { - if (null != requestURI && !requestURI.isEmpty()) { - - String consumerDetails = requestURI.substring(requestURI.indexOf("events/") + 7); - - int startIndex = consumerDetails.indexOf("/") + 1; - int endIndex = consumerDetails.lastIndexOf("/"); - return consumerDetails.substring(startIndex, endIndex); - } - return null; - } - - /** - * - * @param op - * @return - * @throws IOException - * @throws JSONException - */ - public int forEachMessage(operation op) throws IOException, JSONException { - final int effectiveLimit = (fLimit == 0 ? kTopLimit : fLimit); - - int count = 0; - boolean firstPing = true; - // boolean isTransType=false; - final long startMs = System.currentTimeMillis(); - final long timeoutMs = fTimeoutMs + startMs -500; //500 ms used in poll - - while (firstPing || (count == 0 && System.currentTimeMillis() < timeoutMs)) { - if (!firstPing) { - op.onWait(); - } - firstPing = false; - - - Consumer.Message msgRecord = null; - while (count < effectiveLimit && (msgRecord = - fConsumer.nextMessage()) != null) { - - String message = ""; - String transactionid = ""; - try { - // String msgRecord = msg; - JSONObject jsonMessage = new JSONObject(msgRecord); - String[] keys = JSONObject.getNames(jsonMessage); - boolean wrapheader1 = false; - boolean wrapheader2 = false; - boolean found_attr3 = false; - String wrapElement1 = "message"; - String wrapElement2 = "msgWrapMR"; - String transIdElement = "transactionId"; - if (null != keys) { - for (String key : keys) { - if (key.equals(wrapElement1)) { - wrapheader1 = true; - } else if (key.equals(wrapElement2)) { - wrapheader2 = true; - } else if (key.equals(transIdElement)) { - found_attr3 = true; - transactionid = jsonMessage.getString(key); - } - } - } - - // returns contents of attribute 1 if both attributes - // present, otherwise - // the whole msg - if (wrapheader2 && found_attr3) { - message = jsonMessage.getString(wrapElement2); - } else if (wrapheader1 && found_attr3) { - message = jsonMessage.getString(wrapElement1); - } else { - message = msgRecord.getMessage(); - } - // jsonMessage = extractMessage(jsonMessage , - // "message","msgWrapMR","transactionId"); - istransType = true; - } catch (JSONException e) { // This check is required for the - // message sent by MR AAF flow but - // consumed by UEB ACL flow which - // wont expect transaction id in - // cambria client api - // Ignore - log.info("JSON Exception logged when the message is non JSON Format"); - } catch (Exception exp) { - log.info("****Some Exception occured for writing messages in topic" + topic.getName() - + " Exception" + exp); - } - if (message == null || message.equals("")) { - istransType = false; - message = msgRecord.getMessage(); - } - - // If filters are enabled/set, message should be in JSON format - // for filters to work for - // otherwise filter will automatically ignore message in - // non-json format. - if (filterMatches(message)) { - op.onMessage(count, message, transactionid, msgRecord.getOffset()); - count++; - - } - - } - } - return count; - } - - - - /** - * - * Checks whether filter is initialized - */ - - - - - /** - * - * @param msg - * @return - */ - private boolean filterMatches(String msg) { - boolean result = true; - - - - - - - - - - - - - - return result; - } - - public DMaaPContext getDmaapContext() { - return dmaapContext; - } - - public void setDmaapContext(DMaaPContext dmaapContext) { - this.dmaapContext = dmaapContext; - } - - public Topic getTopic() { - return topic; - } - - public void setTopic(Topic topic) { - this.topic = topic; - } - - public void setTopicStyle(boolean aaftopic) { - this.isAAFTopic = aaftopic; - } - - public void setTransEnabled(boolean transEnable) { - this.istransEnable = transEnable; - } - - - private final Consumer fConsumer; - private final int fLimit; - private final int fTimeoutMs; - - private final boolean fPretty; - private final boolean fWithMeta; - private int fSent; - - //private final HpProcessingEngine fHppe; - private DMaaPContext dmaapContext; - private String responseTransactionId; - private Topic topic; - private boolean isAAFTopic = false; - private boolean istransEnable = false; - private ArrayList fKafkaConsumerList; - private boolean istransType = true; - // private static final Logger log = - - - private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaOutboundEventStream.class); -} \ No newline at end of file diff --git a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java deleted file mode 100644 index 7a67c92..0000000 --- a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java +++ /dev/null @@ -1,169 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.resources.streamReaders; - -import java.io.InputStream; - -import javax.servlet.http.HttpServletResponse; - -import org.json.JSONException; -import org.json.JSONObject; -import org.json.JSONTokener; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.beans.LogDetails; -import com.att.dmf.mr.resources.CambriaEventSet.reader; - -/** - * - * @author anowarul.islam - * - */ -public class CambriaJsonStreamReader implements reader { - private final JSONTokener fTokens; - private final boolean fIsList; - private long fCount; - private final String fDefPart; - public static final String kKeyField = "cambria.partition"; - - /** - * - * @param is - * @param defPart - * @throws CambriaApiException - */ - public CambriaJsonStreamReader(InputStream is, String defPart) throws CambriaApiException { - try { - fTokens = new JSONTokener(is); - fCount = 0; - fDefPart = defPart; - - final int c = fTokens.next(); - if (c == '[') { - fIsList = true; - } else if (c == '{') { - fTokens.back(); - fIsList = false; - } else { - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expecting an array or an object."); - } - } catch (JSONException e) { - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage()); - } - } - - @Override - public message next() throws CambriaApiException { - try { - if (!fTokens.more()) { - return null; - } - - final int c = fTokens.next(); - - - if (fIsList) { - if (c == ']' || (fCount > 0 && c == 10)) - return null; - - - if (fCount > 0 && c != ',' && c!= 10) { - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, - "Expected ',' or closing ']' after last object."); - } - - if (fCount == 0 && c != '{' && c!= 10 && c!=32) { - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected { to start an object."); - } - } else if (fCount != 0 || c != '{') { - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected '{' to start an object."); - } - - if (c == '{') { - fTokens.back(); - } - final JSONObject o = new JSONObject(fTokens); - fCount++; - return new msg(o); - } catch (JSONException e) { - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage()); - - } - } - - private class msg implements message { - private final String fKey; - private String fMsg; - private LogDetails logDetails; - private boolean transactionEnabled; - - /** - * constructor - * - * @param o - */ - - - - public msg(JSONObject o) { - String key = o.optString(kKeyField, fDefPart); - if (key == null) { - key = "" + System.currentTimeMillis(); - } - fKey = key; - - fMsg = o.toString().trim(); - - } - - @Override - public String getKey() { - return fKey; - } - - @Override - public String getMessage() { - return fMsg; - } - - @Override - public boolean isTransactionEnabled() { - return transactionEnabled; - } - - @Override - public void setTransactionEnabled(boolean transactionEnabled) { - this.transactionEnabled = transactionEnabled; - } - - @Override - public void setLogDetails(LogDetails logDetails) { - this.logDetails = logDetails; - } - - @Override - public LogDetails getLogDetails() { - return logDetails; - } - } -} diff --git a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java deleted file mode 100644 index f64c0de..0000000 --- a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java +++ /dev/null @@ -1,141 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.resources.streamReaders; - -import java.io.IOException; -import java.io.InputStream; - -import javax.servlet.http.HttpServletResponse; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.beans.LogDetails; -import com.att.dmf.mr.resources.CambriaEventSet.reader; -import com.att.nsa.util.StreamTools; - -/** - * - * This stream reader reads raw bytes creating a single message. - * @author peter - * - */ -public class CambriaRawStreamReader implements reader -{ - /** - * This is the constructor of CambriaRawStreamReader, it will basically the read from Input stream - * @param is - * @param defPart - * @throws CambriaApiException - */ - public CambriaRawStreamReader ( InputStream is, String defPart ) throws CambriaApiException - { - fStream = is; - fDefPart = defPart; - fClosed = false; - } - - @Override - /** - * - * next() method reads the bytes and - * iterates through the messages - * @throws CambriaApiException - * - */ - public message next () throws CambriaApiException - { - if ( fClosed ) return null; - - try - { - final byte[] rawBytes = StreamTools.readBytes ( fStream ); - fClosed = true; - return new message () - { - private LogDetails logDetails; - private boolean transactionEnabled; - - /** - * returns boolean value which - * indicates whether transaction is enabled - */ - public boolean isTransactionEnabled() { - return transactionEnabled; - } - - /** - * sets boolean value which - * indicates whether transaction is enabled - */ - public void setTransactionEnabled(boolean transactionEnabled) { - this.transactionEnabled = transactionEnabled; - } - - @Override - /** - * @returns key - * It ch4ecks whether fDefPart value is Null. - * If yes, it will return ystem.currentTimeMillis () else - * it will return fDefPart variable value - */ - public String getKey () - { - return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart; - } - - @Override - /** - * returns the message in String type object - */ - public String getMessage () - { - return new String ( rawBytes ); - } - - /** - * set log details in logDetails variable - */ - @Override - public void setLogDetails(LogDetails logDetails) { - this.logDetails = logDetails; - } - - @Override - /** - * get the log details - */ - public LogDetails getLogDetails() { - return this.logDetails; - } - }; - } - catch ( IOException e ) - { - throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () ); - } - } - - private final InputStream fStream; - private final String fDefPart; - private boolean fClosed; - -} diff --git a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java deleted file mode 100644 index 3dbf339..0000000 --- a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java +++ /dev/null @@ -1,229 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.resources.streamReaders; - -import java.io.IOException; -import java.io.InputStream; - -import javax.servlet.http.HttpServletResponse; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.beans.LogDetails; -import com.att.dmf.mr.resources.CambriaEventSet.reader; - -/** - * Read an optionally chunked stream in the Cambria app format. This format - * allows for speedier server-side message parsing than pure JSON. It's looks - * like:
- *
- * <keyLength>.<msgLength>.<key><message>
- *
- * Whitespace before/after each entry is ignored, so messages can be delivered - * with newlines between them, or not. - * - * @author peter - * - */ -public class CambriaStreamReader implements reader { - /** - * constructor initializing InputStream with fStream - * - * @param senderStream - * @throws CambriaApiException - */ - public CambriaStreamReader(InputStream senderStream) throws CambriaApiException { - fStream = senderStream; - } - - @Override - /** - * next method iterates through msg length - * throws IOException - * throws CambriaApiException - * - */ - public message next() throws IOException, CambriaApiException { - final int keyLen = readLength(); - if (keyLen == -1) - return null; - - final int msgLen = readLength(); - final String keyPart = readString(keyLen); - final String msgPart = readString(msgLen); - - return new msg(keyPart, msgPart); - } - - private static class msg implements message { - /** - * constructor initialization - * - * @param key - * @param msg - */ - public msg(String key, String msg) { - // if no key, use the current time. This allows the message to be - // delivered - // in any order without forcing it into a single partition as empty - // string would. - if (key.length() < 1) { - key = "" + System.currentTimeMillis(); - } - - fKey = key; - fMsg = msg; - } - - @Override - /** - * @returns fkey - */ - public String getKey() { - return fKey; - } - - @Override - /** - * returns the message in String type object - */ - public String getMessage() { - return fMsg; - } - - private final String fKey; - private final String fMsg; - private LogDetails logDetails; - private boolean transactionEnabled; - - /** - * returns boolean value which - * indicates whether transaction is enabled - */ - public boolean isTransactionEnabled() { - return transactionEnabled; - } - - /** - * sets boolean value which - * indicates whether transaction is enabled - */ - public void setTransactionEnabled(boolean transactionEnabled) { - this.transactionEnabled = transactionEnabled; - } - - @Override - /** - * set log details in logDetails variable - */ - public void setLogDetails(LogDetails logDetails) { - this.logDetails = logDetails; - } - - @Override - /** - * get the log details - */ - public LogDetails getLogDetails() { - return this.logDetails; - } - - } - - private final InputStream fStream; - - /** - * max cambria length indicates message length - - // This limit is here to prevent the server from spinning on a long string of numbers - // that is delivered with 'application/cambria' as the format. The limit needs to be - // large enough to support the max message length (currently 1MB, the default Kafka - // limit) - * */ - - private static final int kMaxCambriaLength = 4*1000*1024; - - - /** - * - * @return - * @throws IOException - * @throws CambriaApiException - */ - private int readLength() throws IOException, CambriaApiException { - // always ignore leading whitespace - int c = fStream.read(); - while (Character.isWhitespace(c)) { - c = fStream.read(); - } - - if (c == -1) { - return -1; - } - - int result = 0; - while (Character.isDigit(c)) { - result = (result * 10) + (c - '0'); - if (result > kMaxCambriaLength) { - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length."); - } - c = fStream.read(); - } - - if (c != '.') { - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length."); - } - - return result; - } - - /** - * - * @param len - * @return - * @throws IOException - * @throws CambriaApiException - */ - private String readString(int len) throws IOException, CambriaApiException { - final byte[] buffer = new byte[len]; - - final long startMs = System.currentTimeMillis(); - final long timeoutMs = startMs + 30000; // FIXME configurable - - int readTotal = 0; - while (readTotal < len) { - final int read = fStream.read(buffer, readTotal, len - readTotal); - if (read == -1 || System.currentTimeMillis() > timeoutMs) { - // EOF - break; - } - readTotal += read; - } - - if (readTotal < len) { - throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, - "End of stream while reading " + len + " bytes"); - } - - return new String(buffer); - } -} diff --git a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java deleted file mode 100644 index b06e17a..0000000 --- a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java +++ /dev/null @@ -1,140 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.resources.streamReaders; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; - -import javax.servlet.http.HttpServletResponse; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.beans.LogDetails; -import com.att.dmf.mr.resources.CambriaEventSet.reader; - -/** - * This stream reader just pulls single lines. It uses the default partition if provided. If - * not, the key is the current time, which does not guarantee ordering. - * - * @author peter - * - */ -public class CambriaTextStreamReader implements reader -{ - /** - * This is the constructor for Cambria Text Reader format - * @param is - * @param defPart - * @throws CambriaApiException - */ - public CambriaTextStreamReader ( InputStream is, String defPart ) throws CambriaApiException - { - fReader = new BufferedReader ( new InputStreamReader ( is ) ); - fDefPart = defPart; - } - - @Override - /** - * next() method iterates through msg length - * throws IOException - * throws CambriaApiException - * - */ - public message next () throws CambriaApiException - { - try - { - final String line = fReader.readLine (); - if ( line == null ) return null; - - return new message () - { - private LogDetails logDetails; - private boolean transactionEnabled; - - /** - * returns boolean value which - * indicates whether transaction is enabled - * @return - */ - public boolean isTransactionEnabled() { - return transactionEnabled; - } - - /** - * sets boolean value which - * indicates whether transaction is enabled - */ - public void setTransactionEnabled(boolean transactionEnabled) { - this.transactionEnabled = transactionEnabled; - } - - @Override - /** - * @returns key - * It ch4ecks whether fDefPart value is Null. - * If yes, it will return ystem.currentTimeMillis () else - * it will return fDefPart variable value - */ - public String getKey () - { - return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart; - } - - @Override - /** - * returns the message in String type object - * @return - */ - public String getMessage () - { - return line; - } - - @Override - /** - * set log details in logDetails variable - */ - public void setLogDetails(LogDetails logDetails) { - this.logDetails = logDetails; - } - - @Override - /** - * get the log details - */ - public LogDetails getLogDetails() { - return this.logDetails; - } - }; - } - catch ( IOException e ) - { - throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () ); - } - } - - private final BufferedReader fReader; - private final String fDefPart; -} diff --git a/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java b/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java deleted file mode 100644 index 59196d2..0000000 --- a/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java +++ /dev/null @@ -1,39 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.security; - -import javax.servlet.http.HttpServletRequest; - -import com.att.dmf.mr.CambriaApiException; - - - - -/** - * - * @author sneha.d.desai - * - */ -public interface DMaaPAAFAuthenticator { - boolean aafAuthentication( HttpServletRequest req , String role); - String aafPermissionString(String permission, String action) throws CambriaApiException; -} diff --git a/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java b/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java deleted file mode 100644 index e4e24cd..0000000 --- a/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java +++ /dev/null @@ -1,80 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.security; - -import javax.servlet.http.HttpServletRequest; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.constants.CambriaConstants; - - -/** - * - * @author sneha.d.desai - * - */ -public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator { - - /** - * @param req - * @param role - */ - @Override - public boolean aafAuthentication(HttpServletRequest req, String role) { - boolean auth = false; - if(req.isUserInRole(role)) - { - - auth = true; - } - - return auth; - } - - @Override - public String aafPermissionString(String topicName, String action) throws CambriaApiException { - - - String permission = ""; - String nameSpace =""; - if(topicName.contains(".") && topicName.contains("org.onap")) { - - nameSpace = topicName.substring(0,topicName.lastIndexOf(".")); - } - else { - nameSpace = null; - nameSpace= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"defaultNSforUEB"); - - if(null==nameSpace)nameSpace="org.onap.dmaap.mr"; - - - - } - - permission = nameSpace+".topic|:topic."+topicName+"|"+action; - return permission; - - } - - - -} diff --git a/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java b/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java deleted file mode 100644 index 848d4cc..0000000 --- a/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java +++ /dev/null @@ -1,61 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.security; - -import javax.servlet.http.HttpServletRequest; - -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.nsa.security.NsaApiKey; - - -/** - * An interface for authenticating an inbound request. - * @author nilanjana.maity - * - * @param NsaApiKey - */ -public interface DMaaPAuthenticator { - - /** - * Qualify a request as possibly using the authentication method that this class implements. - * @param req - * @return true if the request might be authenticated by this class - */ - boolean qualify ( HttpServletRequest req ); - - /** - * Check for a request being authentic. If it is, return the API key. If not, return null. - * @param req An inbound web request - * @return the API key for an authentic request, or null - */ - K isAuthentic ( HttpServletRequest req ); - /** - * Check for a ctx being authenticate. If it is, return the API key. If not, return null. - * @param ctx - * @return the API key for an authentication request, or null - */ - K authenticate ( DMaaPContext ctx ); - - - void addAuthenticator(DMaaPAuthenticator a); - -} diff --git a/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java b/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java deleted file mode 100644 index 0ae0839..0000000 --- a/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java +++ /dev/null @@ -1,133 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.security; - -import java.util.LinkedList; - -import javax.servlet.http.HttpServletRequest; - -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.db.NsaApiDb; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; - -/** - * - * @author anowarul.islam - * - * @param - */ -public class DMaaPAuthenticatorImpl implements DMaaPAuthenticator { - - private final LinkedList> fAuthenticators; - - - - // Setting timeout to a large value for testing purpose. - - // 10 minutes - private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10 * 10 * 10 * 10 * 10; - - /** - * Construct the security manager against an API key database - * - * @param db - * the API key db - */ - public DMaaPAuthenticatorImpl(NsaApiDb db) { - this(db, kDefaultRequestTimeWindow); - } - - - - - /** - * Construct the security manager against an API key database with a - * specific request time window size - * - * @param db - * the API key db - * @param authTimeWindowMs - * the size of the time window for request authentication - */ - public DMaaPAuthenticatorImpl(NsaApiDb db, long authTimeWindowMs) { - fAuthenticators = new LinkedList<>(); - - fAuthenticators.add(new DMaaPOriginalUebAuthenticator(db, authTimeWindowMs)); - } - - /** - * Authenticate a user's request. This method returns the API key if the - * user is authentic, null otherwise. - * - * @param ctx - * @return an api key record, or null - */ - public K authenticate(DMaaPContext ctx) { - final HttpServletRequest req = ctx.getRequest(); - for (DMaaPAuthenticator a : fAuthenticators) { - if (a.qualify(req)) { - final K k = a.isAuthentic(req); - if (k != null) - return k; - } - // else: this request doesn't look right to the authenticator - } - return null; - } - - /** - * Get the user associated with the incoming request, or null if the user is - * not authenticated. - * - * @param ctx - * @return - */ - public static NsaSimpleApiKey getAuthenticatedUser(DMaaPContext ctx) { - final DMaaPAuthenticator m = ctx.getConfigReader().getfSecurityManager(); - return m.authenticate(ctx); - } - - /** - * method by default returning false - * @param req - * @return false - */ - public boolean qualify(HttpServletRequest req) { - return false; - } -/** - * method by default returning null - * @param req - * @return null - */ - public K isAuthentic(HttpServletRequest req) { - return null; - } - - public void addAuthenticator ( DMaaPAuthenticator a ) - { - this.fAuthenticators.add(a); - } - -} diff --git a/src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java b/src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java deleted file mode 100644 index 64dbc14..0000000 --- a/src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java +++ /dev/null @@ -1,87 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.security.impl; - -import javax.servlet.http.HttpServletRequest; - -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.security.DMaaPAuthenticator; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.authenticators.MechIdAuthenticator; - -/** - * An authenticator for AT&T MechIds. - * - * @author peter - * - * @param - */ -public class DMaaPMechIdAuthenticator implements DMaaPAuthenticator { - -/** - * This is not yet implemented. by refault its returing false - * @param req HttpServletRequest - * @return false - */ - public boolean qualify (HttpServletRequest req) { - // we haven't implemented anything here yet, so there's no qualifying request - return false; - } -/** - * This metod authenticate the mech id - * @param req - * @return APIkey or null - */ - public K isAuthentic (HttpServletRequest req) { - final String remoteAddr = req.getRemoteAddr(); - authLog ( "MechId auth is not yet implemented.", remoteAddr ); - return null; - } - - private static void authLog ( String msg, String remoteAddr ) - { - log.info ( "AUTH-LOG(" + remoteAddr + "): " + msg ); - } - - - //private static final Logger log = Logger.getLogger( MechIdAuthenticator.class.toString()); - private static final EELFLogger log = EELFManager.getInstance().getLogger(MechIdAuthenticator.class); -/** - * Curently its not yet implemented returning null - * @param ctx DMaaP context - * @return APIkey or null - */ - @Override - public K authenticate(DMaaPContext ctx) { - // TODO Auto-generated method stub - return null; - } -@Override -public void addAuthenticator(DMaaPAuthenticator a) { - // TODO Auto-generated method stub - -} - -} \ No newline at end of file diff --git a/src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java b/src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java deleted file mode 100644 index b1e28e7..0000000 --- a/src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java +++ /dev/null @@ -1,293 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.security.impl; - -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Date; - -import javax.servlet.http.HttpServletRequest; - -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.security.DMaaPAuthenticator; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.drumlin.till.data.sha1HmacSigner; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.db.NsaApiDb; - -/** - * This authenticator handles an AWS-like authentication, originally used by the - * Cambria server (the API server for UEB). - * - * @author peter - * - * @param - */ -public class DMaaPOriginalUebAuthenticator implements DMaaPAuthenticator { - /** - * constructor initialization - * - * @param db - * @param requestTimeWindowMs - */ - public DMaaPOriginalUebAuthenticator(NsaApiDb db, long requestTimeWindowMs) { - fDb = db; - fRequestTimeWindowMs = requestTimeWindowMs; - - - - - } - - @Override - public boolean qualify(HttpServletRequest req) { - // accept anything that comes in with X-(Cambria)Auth in the header - final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" }); - return xAuth != null; - } - - /** - * method for authentication - * - * @param req - * @return - */ - public K isAuthentic(HttpServletRequest req) { - final String remoteAddr = req.getRemoteAddr(); - // Cambria originally used "Cambria..." headers, but as the API key - // system is now more - // general, we take either form. - final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" }); - final String xDate = getFirstHeader(req, new String[] { "X-CambriaDate", "X-Date" }); - - final String httpDate = req.getHeader("Date"); - - final String xNonce = getFirstHeader(req, new String[] { "X-Nonce" }); - return authenticate(remoteAddr, xAuth, xDate, httpDate, xNonce); - } - - /** - * Authenticate a user's request. This method returns the API key if the - * user is authentic, null otherwise. - * - * @param remoteAddr - * @param xAuth - * @param xDate - * @param httpDate - * @param nonce - * @return an api key record, or null - */ - public K authenticate(String remoteAddr, String xAuth, String xDate, String httpDate, String nonce) { - if (xAuth == null) { - authLog("No X-Auth header on request", remoteAddr); - return null; - } - - final String[] xAuthParts = xAuth.split(":"); - if (xAuthParts.length != 2) { - authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr); - return null; - } - - - // get the api key and signature - final String clientApiKey = xAuthParts[0]; - final String clientApiHash = xAuthParts[1]; - if (clientApiKey.length() == 0 || clientApiHash.length() == 0) { - authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr); - return null; - } - // if the user provided X-Date, use that. Otherwise, go for Date - final String dateString = xDate != null ? xDate : httpDate; - final Date clientDate = getClientDate(dateString); - if (clientDate == null) { - authLog("Couldn't parse client date '" + dateString + "'. Preferring X-Date over Date.", remoteAddr); - return null; - } - // check the time range - final long nowMs = System.currentTimeMillis(); - final long diffMs = Math.abs(nowMs - clientDate.getTime()); - if (diffMs > fRequestTimeWindowMs) { - authLog("Client date is not in acceptable range of server date. Client:" + clientDate.getTime() - + ", Server: " + nowMs + ", Threshold: " + fRequestTimeWindowMs + ".", remoteAddr); - return null; - } - K apiRecord; - try { - apiRecord = fDb.loadApiKey(clientApiKey); - if (apiRecord == null) { - authLog("No such API key " + clientApiKey, remoteAddr); - return null; - } - } catch (ConfigDbException e) { - authLog("Couldn't load API key " + clientApiKey + ": " + e.getMessage(), remoteAddr); - return null; - } - // make the signed content - final StringBuilder sb = new StringBuilder(); - sb.append(dateString); - if (nonce != null) { - sb.append(":"); - sb.append(nonce); - } - final String signedContent = sb.toString(); - // now check the signed date string - final String serverCalculatedSignature = sha1HmacSigner.sign(signedContent, apiRecord.getSecret()); - if (serverCalculatedSignature == null || !serverCalculatedSignature.equals(clientApiHash)) { - authLog("Signatures don't match. Rec'd " + clientApiHash + ", expect " + serverCalculatedSignature + ".", - remoteAddr); - return null; - } - authLog("authenticated " + apiRecord.getKey(), remoteAddr); - return apiRecord; - } - - /** - * Get the first value of the first existing header from the headers list - * - * @param req - * @param headers - * @return a header value, or null if none exist - */ - private static String getFirstHeader(HttpServletRequest req, String[] headers) { - for (String header : headers) { - final String result = req.getHeader(header); - if (result != null) - return result; - } - return null; - } - - /** - * Parse the date string into a Date using one of the supported date - * formats. - * - * @param dateHeader - * @return a date, or null - */ - private static Date getClientDate(String dateString) { - if (dateString == null) { - return null; - } - - // parse the date - Date result = null; - for (String dateFormat : kDateFormats) { - final SimpleDateFormat parser = new SimpleDateFormat(dateFormat, java.util.Locale.US); - if (!dateFormat.contains("z") && !dateFormat.contains("Z")) { - parser.setTimeZone(TIMEZONE_GMT); - } - - try { - result = parser.parse(dateString); - break; - } catch (ParseException e) { - // presumably wrong format - } - } - return result; - } - - private static void authLog(String msg, String remoteAddr) { - log.info("AUTH-LOG(" + remoteAddr + "): " + msg); - } - - private final NsaApiDb fDb; - private final long fRequestTimeWindowMs; - - private static final java.util.TimeZone TIMEZONE_GMT = java.util.TimeZone.getTimeZone("GMT"); - - private static final String kDateFormats[] = - { - // W3C date format (RFC 3339). - "yyyy-MM-dd'T'HH:mm:ssz", - "yyyy-MM-dd'T'HH:mm:ssXXX", // as of Java 7, reqd to handle colon in TZ offset - - // Preferred HTTP date format (RFC 1123). - "EEE, dd MMM yyyy HH:mm:ss zzz", - - // simple unix command line 'date' format - "EEE MMM dd HH:mm:ss z yyyy", - - // Common date format (RFC 822). - "EEE, dd MMM yy HH:mm:ss z", - "EEE, dd MMM yy HH:mm z", - "dd MMM yy HH:mm:ss z", - "dd MMM yy HH:mm z", - - // Obsoleted HTTP date format (ANSI C asctime() format). - "EEE MMM dd HH:mm:ss yyyy", - - // Obsoleted HTTP date format (RFC 1036). - "EEEE, dd-MMM-yy HH:mm:ss zzz", - }; - - - - - - - - - - - - - - - - - - - - // logger declaration - - private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPOriginalUebAuthenticator.class); - @Override - - // TODO Auto-generated method stub - - //} - - public K authenticate(DMaaPContext ctx) { - - - - - - - - - - - return null; - } - - - public void addAuthenticator ( DMaaPAuthenticator a ) - { - - } - -} \ No newline at end of file diff --git a/src/main/java/com/att/dmf/mr/service/AdminService.java b/src/main/java/com/att/dmf/mr/service/AdminService.java deleted file mode 100644 index aaf7c0b..0000000 --- a/src/main/java/com/att/dmf/mr/service/AdminService.java +++ /dev/null @@ -1,83 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service; - -import java.io.IOException; - -import org.json.JSONException; - -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -/** - * @author muzainulhaque.qazi - * - */ -public interface AdminService { - /** - * method provide consumerCache - * - * @param dMaaPContext - * @throws IOException - */ - void showConsumerCache(DMaaPContext dMaaPContext) throws IOException,AccessDeniedException; - - /** - * method drops consumer cache - * - * @param dMaaPContext - * @throws JSONException - * @throws IOException - */ - void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException,AccessDeniedException; - - - /** - * Get list of blacklisted ips - * @param dMaaPContext context - * @throws IOException ex - * @throws AccessDeniedException ex - */ - void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException; - - /** - * Add ip to blacklist - * @param dMaaPContext context - * @param ip ip - * @throws IOException ex - * @throws ConfigDbException ex - * @throws AccessDeniedException ex - */ - void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException; - - /** - * Remove ip from blacklist - * @param dMaaPContext context - * @param ip ip - * @throws IOException ex - * @throws ConfigDbException ex - * @throws AccessDeniedException ex - */ - void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException; - -} diff --git a/src/main/java/com/att/dmf/mr/service/ApiKeysService.java b/src/main/java/com/att/dmf/mr/service/ApiKeysService.java deleted file mode 100644 index 57fc8be..0000000 --- a/src/main/java/com/att/dmf/mr/service/ApiKeysService.java +++ /dev/null @@ -1,105 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service; - -import java.io.IOException; - -import com.att.dmf.mr.beans.ApiKeyBean; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; -import com.att.nsa.security.db.NsaApiDb.KeyExistsException; - -/** - * Declaring all the method in interface that is mainly used for authentication - * purpose. - * - * - */ - -public interface ApiKeysService { - /** - * This method declaration for getting all ApiKey that has generated on - * server. - * - * @param dmaapContext - * @throws ConfigDbException - * @throws IOException - */ - - public void getAllApiKeys(DMaaPContext dmaapContext) - throws ConfigDbException, IOException; - - /** - * Getting information about specific ApiKey - * - * @param dmaapContext - * @param apikey - * @throws ConfigDbException - * @throws IOException - */ - - public void getApiKey(DMaaPContext dmaapContext, String apikey) - throws ConfigDbException, IOException; - - /** - * Thid method is used for create a particular ApiKey - * - * @param dmaapContext - * @param nsaApiKey - * @throws KeyExistsException - * @throws ConfigDbException - * @throws IOException - */ - - public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey) - throws KeyExistsException, ConfigDbException, IOException; - - /** - * This method is used for update ApiKey that is already generated on - * server. - * - * @param dmaapContext - * @param apikey - * @param nsaApiKey - * @throws ConfigDbException - * @throws IOException - * @throws AccessDeniedException - * @throws com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException - */ - public void updateApiKey(DMaaPContext dmaapContext, String apikey, - ApiKeyBean nsaApiKey) throws ConfigDbException, IOException,AccessDeniedException - ; - - /** - * This method is used for delete specific ApiKey - * - * @param dmaapContext - * @param apikey - * @throws ConfigDbException - * @throws IOException - * @throws AccessDeniedException - */ - - public void deleteApiKey(DMaaPContext dmaapContext, String apikey) - throws ConfigDbException, IOException,AccessDeniedException; -} diff --git a/src/main/java/com/att/dmf/mr/service/EventsService.java b/src/main/java/com/att/dmf/mr/service/EventsService.java deleted file mode 100644 index 2f89bd2..0000000 --- a/src/main/java/com/att/dmf/mr/service/EventsService.java +++ /dev/null @@ -1,75 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service; - -import java.io.IOException; -import java.io.InputStream; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -/** - * - * @author anowarul.islam - * - */ -public interface EventsService { - /** - * - * @param ctx - * @param topic - * @param consumerGroup - * @param clientId - * @throws ConfigDbException - * @throws TopicExistsException - * @throws AccessDeniedException - * @throws UnavailableException - * @throws CambriaApiException - * @throws IOException - */ - public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId) - throws ConfigDbException, TopicExistsException,UnavailableException, - CambriaApiException, IOException,AccessDeniedException; - - /** - * - * @param ctx - * @param topic - * @param msg - * @param defaultPartition - * @param requestTime - * @throws ConfigDbException - * @throws AccessDeniedException - * @throws TopicExistsException - * @throws CambriaApiException - * @throws IOException - */ - public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition, - final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException, - CambriaApiException, IOException,missingReqdSetting; - -} diff --git a/src/main/java/com/att/dmf/mr/service/MMService.java b/src/main/java/com/att/dmf/mr/service/MMService.java deleted file mode 100644 index ae01bbf..0000000 --- a/src/main/java/com/att/dmf/mr/service/MMService.java +++ /dev/null @@ -1,66 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service; - -import java.io.IOException; -import java.io.InputStream; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -/** - * Contains the logic for executing calls to the Mirror Maker agent tool. - * - * @author Kawsar Jahan - * - * @since May 25, 2016 - */ - -public interface MMService { - - /* - * this method calls the add white list method of a Mirror Maker agent API - */ - public void addWhiteList(); - - /* - * this method calls the remove white list method of a Mirror Maker agent API - */ - public void removeWhiteList(); - - /* - * This method calls the list white list method of a Mirror Maker agent API - */ - public void listWhiteList(); - - public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId) throws ConfigDbException, TopicExistsException, - AccessDeniedException, UnavailableException, CambriaApiException, IOException; - - public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition, - final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException, - CambriaApiException, IOException, missingReqdSetting; -} diff --git a/src/main/java/com/att/dmf/mr/service/MetricsService.java b/src/main/java/com/att/dmf/mr/service/MetricsService.java deleted file mode 100644 index b6cc60d..0000000 --- a/src/main/java/com/att/dmf/mr/service/MetricsService.java +++ /dev/null @@ -1,54 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service; - -/** - * @author amol.ramesh.dalne - * - */ -import java.io.IOException; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; - -/** - * - * @author anowarul.islam - * - */ -public interface MetricsService { - /** - * - * @param ctx - * @throws IOException - */ - public void get(DMaaPContext ctx) throws IOException; - - /** - * - * @param ctx - * @param name - * @throws IOException - * @throws CambriaApiException - */ - public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException; -} diff --git a/src/main/java/com/att/dmf/mr/service/TopicService.java b/src/main/java/com/att/dmf/mr/service/TopicService.java deleted file mode 100644 index b42d9c9..0000000 --- a/src/main/java/com/att/dmf/mr/service/TopicService.java +++ /dev/null @@ -1,176 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service; - -import java.io.IOException; - -import org.json.JSONException; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.TopicBean; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -/** - * interface provide all the topic related operations - * - * @author anowarul.islam - * - */ -public interface TopicService { - /** - * method fetch details of all the topics - * - * @param dmaapContext - * @throws JSONException - * @throws ConfigDbException - * @throws IOException - */ - void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException; - void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException; - - /** - * method fetch details of specific topic - * - * @param dmaapContext - * @param topicName - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - */ - void getTopic(DMaaPContext dmaapContext, String topicName) - throws ConfigDbException, IOException, TopicExistsException; - - /** - * method used to create the topic - * - * @param dmaapContext - * @param topicBean - * @throws CambriaApiException - * @throws TopicExistsException - * @throws IOException - * @throws AccessDeniedException - * @throws JSONException - */ - - void createTopic(DMaaPContext dmaapContext, TopicBean topicBean) - throws CambriaApiException, TopicExistsException, IOException, AccessDeniedException; - - /** - * method used to delete to topic - * - * @param dmaapContext - * @param topicName - * @throws IOException - * @throws AccessDeniedException - * @throws ConfigDbException - * @throws CambriaApiException - * @throws TopicExistsException - */ - - void deleteTopic(DMaaPContext dmaapContext, String topicName) - throws IOException, AccessDeniedException, ConfigDbException, CambriaApiException, TopicExistsException; - - /** - * method provides list of all the publishers associated with a topic - * - * @param dmaapContext - * @param topicName - * @throws IOException - * @throws ConfigDbException - * @throws TopicExistsException - */ - void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName) - throws IOException, ConfigDbException, TopicExistsException; - - /** - * method provides details of all the consumer associated with a specific - * topic - * - * @param dmaapContext - * @param topicName - * @throws IOException - * @throws ConfigDbException - * @throws TopicExistsException - */ - void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName) - throws IOException, ConfigDbException, TopicExistsException; - - /** - * method provides publishing right to a specific topic - * - * @param dmaapContext - * @param topicName - * @param producerId - * @throws AccessDeniedException - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - */ - void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId) - throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException; - - /** - * method denies any specific publisher from a topic - * - * @param dmaapContext - * @param topicName - * @param producerId - * @throws AccessDeniedException - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - */ - void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId) - throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException; - - /** - * method provide consuming right to a specific user on a topic - * - * @param dmaapContext - * @param topicName - * @param consumerId - * @throws AccessDeniedException - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - */ - void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId) - throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException; - - /** - * method denies a particular user's consuming right on a topic - * - * @param dmaapContext - * @param topicName - * @param consumerId - * @throws AccessDeniedException - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - */ - void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId) - throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException; - -} diff --git a/src/main/java/com/att/dmf/mr/service/TransactionService.java b/src/main/java/com/att/dmf/mr/service/TransactionService.java deleted file mode 100644 index f2763a6..0000000 --- a/src/main/java/com/att/dmf/mr/service/TransactionService.java +++ /dev/null @@ -1,61 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service; - -import java.io.IOException; - -import com.att.aft.dme2.internal.jettison.json.JSONException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.transaction.TransactionObj; -import com.att.nsa.configs.ConfigDbException; - -/** - * - * @author anowarul.islam - * - */ -public interface TransactionService { - /** - * - * @param trnObj - */ - void checkTransaction(TransactionObj trnObj); - - /** - * - * @param dmaapContext - * @throws ConfigDbException - * @throws IOException - */ - void getAllTransactionObjs(DMaaPContext dmaapContext) throws ConfigDbException, IOException; - - /** - * - * @param dmaapContext - * @param transactionId - * @throws ConfigDbException - * @throws JSONException - * @throws IOException - */ - void getTransactionObj(DMaaPContext dmaapContext, String transactionId) - throws ConfigDbException, JSONException, IOException; -} diff --git a/src/main/java/com/att/dmf/mr/service/UIService.java b/src/main/java/com/att/dmf/mr/service/UIService.java deleted file mode 100644 index 1155a2a..0000000 --- a/src/main/java/com/att/dmf/mr/service/UIService.java +++ /dev/null @@ -1,92 +0,0 @@ -/** - * - */ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service; - -import java.io.IOException; - -import org.apache.kafka.common.errors.TopicExistsException; -import org.json.JSONException; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.nsa.configs.ConfigDbException; -/** - * @author muzainulhaque.qazi - * - */ -public interface UIService { - /** - * Returning template of hello page. - * - * @param dmaapContext - * @throws IOException - */ - void hello(DMaaPContext dmaapContext) throws IOException; - - /** - * Fetching list of all api keys and returning in a templated form for - * display - * - * @param dmaapContext - * @throws ConfigDbException - * @throws IOException - */ - void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException, - IOException; - - /** - * Fetching detials of apikey in a templated form for display - * - * @param dmaapContext - * @param apiKey - * @throws Exception - */ - void getApiKey(DMaaPContext dmaapContext, final String apiKey) - throws CambriaApiException, ConfigDbException, JSONException, IOException; - - /** - * Fetching list of all the topics and returning in a templated form for - * display - * - * @param dmaapContext - * @throws ConfigDbException - * @throws IOException - */ - void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException, - IOException; - - /** - * Fetching detials of topic in a templated form for display - * - * @param dmaapContext - * @param topic - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - */ - void getTopic(DMaaPContext dmaapContext, final String topic) - throws ConfigDbException, IOException, TopicExistsException; - -} diff --git a/src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java deleted file mode 100644 index f7c48de..0000000 --- a/src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java +++ /dev/null @@ -1,190 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service.impl; - -import java.io.IOException; -import java.util.Collection; -import java.util.Set; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; -import org.springframework.stereotype.Component; - -import com.att.dmf.mr.backends.Consumer; -import com.att.dmf.mr.backends.ConsumerFactory; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.service.AdminService; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.limits.Blacklist; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - - -/** - * @author muzainulhaque.qazi - * - */ -@Component -public class AdminServiceImpl implements AdminService { - - //private Logger log = Logger.getLogger(AdminServiceImpl.class.toString()); - private static final EELFLogger log = EELFManager.getInstance().getLogger(AdminServiceImpl.class); - /** - * getConsumerCache returns consumer cache - * @param dMaaPContext context - * @throws IOException ex - * @throws AccessDeniedException - */ - @Override - public void showConsumerCache(DMaaPContext dMaaPContext) throws IOException, AccessDeniedException { - adminAuthenticate(dMaaPContext); - - JSONObject consumers = new JSONObject(); - JSONArray jsonConsumersList = new JSONArray(); - - for (Consumer consumer : getConsumerFactory(dMaaPContext).getConsumers()) { - JSONObject consumerObject = new JSONObject(); - consumerObject.put("name", consumer.getName()); - consumerObject.put("created", consumer.getCreateTimeMs()); - consumerObject.put("accessed", consumer.getLastAccessMs()); - jsonConsumersList.put(consumerObject); - } - - consumers.put("consumers", jsonConsumersList); - log.info("========== AdminServiceImpl: getConsumerCache: " + jsonConsumersList.toString() + "==========="); - DMaaPResponseBuilder.respondOk(dMaaPContext, consumers); - } - - /** - * - * dropConsumerCache() method clears consumer cache - * @param dMaaPContext context - * @throws JSONException ex - * @throws IOException ex - * @throws AccessDeniedException - * - */ - @Override - public void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException, AccessDeniedException { - adminAuthenticate(dMaaPContext); - getConsumerFactory(dMaaPContext).dropCache(); - DMaaPResponseBuilder.respondOkWithHtml(dMaaPContext, "Consumer cache cleared successfully"); - // log.info("========== AdminServiceImpl: dropConsumerCache: Consumer - // Cache successfully dropped.==========="); - } - - /** - * getfConsumerFactory returns CosnumerFactory details - * @param dMaaPContext contxt - * @return ConsumerFactory obj - * - */ - private ConsumerFactory getConsumerFactory(DMaaPContext dMaaPContext) { - return dMaaPContext.getConfigReader().getfConsumerFactory(); - } - - /** - * return ipblacklist - * @param dMaaPContext context - * @return blacklist obj - */ - private static Blacklist getIpBlacklist(DMaaPContext dMaaPContext) { - return dMaaPContext.getConfigReader().getfIpBlackList(); - } - - - /** - * Get list of blacklisted ips - */ - @Override - public void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException - { - adminAuthenticate ( dMaaPContext ); - - DMaaPResponseBuilder.respondOk ( dMaaPContext, - new JSONObject().put ( "blacklist", - setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) ); - } - - public static JSONArray setToJsonArray ( Set fields ) - { - return collectionToJsonArray ( fields ); - } - - public static JSONArray collectionToJsonArray ( Collection fields ) - { - final JSONArray a = new JSONArray (); - if ( fields != null ) - { - for ( Object o : fields ) - { - a.put ( o ); - } - } - return a; - } - - /** - * Add ip to blacklist - */ - @Override - public void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException - { - adminAuthenticate ( dMaaPContext ); - - getIpBlacklist (dMaaPContext).add ( ip ); - DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext ); - } - - /** - * Remove ip from blacklist - */ - @Override - public void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException - { - adminAuthenticate ( dMaaPContext ); - - getIpBlacklist (dMaaPContext).remove ( ip ); - DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext ); - } - - /** - * Authenticate if user is admin - * @param dMaaPContext context - * @throws AccessDeniedException ex - */ - private static void adminAuthenticate ( DMaaPContext dMaaPContext ) throws AccessDeniedException - { - - final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dMaaPContext); - if ( user == null || !user.getKey ().equals ( "admin" ) ) - { - throw new AccessDeniedException (); - } - } - -} diff --git a/src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java deleted file mode 100644 index b0e8a86..0000000 --- a/src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java +++ /dev/null @@ -1,320 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service.impl; - -import java.io.IOException; - -import org.json.JSONArray; -import org.json.JSONObject; -import org.springframework.stereotype.Service; - -import com.att.dmf.mr.beans.ApiKeyBean; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.service.ApiKeysService; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.dmf.mr.utils.Emailer; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.drumlin.service.standards.HttpStatusCodes; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; -import com.att.nsa.security.db.NsaApiDb; -import com.att.nsa.security.db.NsaApiDb.KeyExistsException; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; - -/** - * Implementation of the ApiKeysService, this will provide the below operations, - * getAllApiKeys, getApiKey, createApiKey, updateApiKey, deleteApiKey - * - * @author nilanjana.maity - */ -@Service -public class ApiKeysServiceImpl implements ApiKeysService { - - - private static final EELFLogger log = EELFManager.getInstance().getLogger(ApiKeysServiceImpl.class.toString()); - /** - * This method will provide all the ApiKeys present in kafka server. - * - * @param dmaapContext - * @throws ConfigDbException - * @throws IOException - */ - public void getAllApiKeys(DMaaPContext dmaapContext) - throws ConfigDbException, IOException { - - ConfigurationReader configReader = dmaapContext.getConfigReader(); - - log.info("configReader : " + configReader.toString()); - - final JSONObject result = new JSONObject(); - final JSONArray keys = new JSONArray(); - result.put("apiKeys", keys); - - NsaApiDb apiDb = configReader.getfApiKeyDb(); - - for (String key : apiDb.loadAllKeys()) { - keys.put(key); - } - log.info("========== ApiKeysServiceImpl: getAllApiKeys: Api Keys are : " - + keys.toString() + "==========="); - DMaaPResponseBuilder.respondOk(dmaapContext, result); - } - - /** - * @param dmaapContext - * @param apikey - * @throws ConfigDbException - * @throws IOException - */ - @Override - public void getApiKey(DMaaPContext dmaapContext, String apikey) - throws ConfigDbException, IOException { - - String errorMsg = "Api key name is not mentioned."; - int errorCode = HttpStatusCodes.k400_badRequest; - - if (null != apikey) { - NsaSimpleApiKey simpleApiKey = getApiKeyDb(dmaapContext) - .loadApiKey(apikey); - - - if (null != simpleApiKey) { - JSONObject result = simpleApiKey.asJsonObject(); - DMaaPResponseBuilder.respondOk(dmaapContext, result); - log.info("========== ApiKeysServiceImpl: getApiKey : " - + result.toString() + "==========="); - return; - } else { - errorMsg = "Api key [" + apikey + "] does not exist."; - errorCode = HttpStatusCodes.k404_notFound; - log.info("========== ApiKeysServiceImpl: getApiKey: Error : API Key does not exist. " - + "==========="); - DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode, - errorMsg); - throw new IOException(); - } - } - - } - - /** - * @param dmaapContext - * @param nsaApiKey - * @throws KeyExistsException - * @throws ConfigDbException - * @throws IOException - */ - @Override - public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey) - throws KeyExistsException, ConfigDbException, IOException { - - log.debug("TopicService: : createApiKey...."); - - String contactEmail = nsaApiKey.getEmail(); - final boolean emailProvided = contactEmail != null && contactEmail.length() > 0 && contactEmail.indexOf("@") > 1 ; - String kSetting_AllowAnonymousKeys= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"apiKeys.allowAnonymous"); - if(null==kSetting_AllowAnonymousKeys) kSetting_AllowAnonymousKeys ="false"; - - - if ( kSetting_AllowAnonymousKeys.equalsIgnoreCase("true") && !emailProvided ) - { - DMaaPResponseBuilder.respondWithErrorInJson(dmaapContext, 400, "You must provide an email address."); - return; - } - - - final NsaApiDb apiKeyDb = getApiKeyDb(dmaapContext); - String apiKey = nsaApiKey.getKey(); - String sharedSecret = nsaApiKey.getSharedSecret(); - final NsaSimpleApiKey key = apiKeyDb.createApiKey(apiKey, - sharedSecret); - if (null != key) { - - if (null != nsaApiKey.getEmail()) { - key.setContactEmail(nsaApiKey.getEmail()); - } - - if (null != nsaApiKey.getDescription()) { - key.setDescription(nsaApiKey.getDescription()); - } - - log.debug("=======ApiKeysServiceImpl: createApiKey : saving api key : " - + key.toString() + "====="); - apiKeyDb.saveApiKey(key); - - // email out the secret to validate the email address - if ( emailProvided ) - { - String body = "\n" + "Your email address was provided as the creator of new API key \"" - + apiKey + "\".\n" + "\n" + "If you did not make this request, please let us know." - + " See http://sa2020.it.att.com:8888 for contact information, " + "but don't worry -" - + " the API key is useless without the information below, which has been provided " - + "only to you.\n" + "\n\n" + "For API key \"" + apiKey + "\", use API key secret:\n\n\t" - + sharedSecret + "\n\n" + "Note that it's normal to share the API key" - + " (" + apiKey + "). " - + "This is how you are granted access to resources " + "like a UEB topic or Flatiron scope. " - + "However, you should NOT share the API key's secret. " + "The API key is associated with your" - + " email alone. ALL access to data made with this " + "key will be your responsibility. If you " - + "share the secret, someone else can use the API key " + "to access proprietary data with your " - + "identity.\n" + "\n" + "Enjoy!\n" + "\n" + "The GFP/SA-2020 Team"; - - Emailer em = dmaapContext.getConfigReader().getSystemEmailer(); - em.send(contactEmail, "New API Key", body); - } - log.debug("TopicService: : sending response."); - - JSONObject o = key.asJsonObject(); - - o.put ( NsaSimpleApiKey.kApiSecretField, - emailProvided ? - "Emailed to " + contactEmail + "." : - key.getSecret () - ); - DMaaPResponseBuilder.respondOk(dmaapContext, - o); - - return; - } else { - log.debug("=======ApiKeysServiceImpl: createApiKey : Error in creating API Key.====="); - DMaaPResponseBuilder.respondWithError(dmaapContext, - HttpStatusCodes.k500_internalServerError, - "Failed to create api key."); - throw new KeyExistsException(apiKey); - } - } - - /** - * @param dmaapContext - * @param apikey - * @param nsaApiKey - * @throws ConfigDbException - * @throws IOException - * @throws AccessDeniedException - */ - @Override - public void updateApiKey(DMaaPContext dmaapContext, String apikey, - ApiKeyBean nsaApiKey) throws ConfigDbException, IOException, AccessDeniedException { - - String errorMsg = "Api key name is not mentioned."; - int errorCode = HttpStatusCodes.k400_badRequest; - - if (null != apikey) { - final NsaApiDb apiKeyDb = getApiKeyDb(dmaapContext); - final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey); - boolean shouldUpdate = false; - - if (null != key) { - final NsaApiKey user = DMaaPAuthenticatorImpl - .getAuthenticatedUser(dmaapContext); - - if (user == null || !user.getKey().equals(key.getKey())) { - throw new AccessDeniedException("You must authenticate with the key you'd like to update."); - } - - if (null != nsaApiKey.getEmail()) { - key.setContactEmail(nsaApiKey.getEmail()); - shouldUpdate = true; - } - - if (null != nsaApiKey.getDescription()) { - key.setDescription(nsaApiKey.getDescription()); - shouldUpdate = true; - } - - if (shouldUpdate) { - apiKeyDb.saveApiKey(key); - } - - log.info("======ApiKeysServiceImpl : updateApiKey : Key Updated Successfully :" - + key.toString() + "========="); - DMaaPResponseBuilder.respondOk(dmaapContext, - key.asJsonObject()); - return; - } - } else { - errorMsg = "Api key [" + apikey + "] does not exist."; - errorCode = HttpStatusCodes.k404_notFound; - DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode, - errorMsg); - log.info("======ApiKeysServiceImpl : updateApiKey : Error in Updating Key.============"); - throw new IOException(); - } - } - - /** - * @param dmaapContext - * @param apikey - * @throws ConfigDbException - * @throws IOException - * @throws AccessDeniedException - */ - @Override - public void deleteApiKey(DMaaPContext dmaapContext, String apikey) - throws ConfigDbException, IOException, AccessDeniedException { - - String errorMsg = "Api key name is not mentioned."; - int errorCode = HttpStatusCodes.k400_badRequest; - - if (null != apikey) { - final NsaApiDb apiKeyDb = getApiKeyDb(dmaapContext); - final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey); - - if (null != key) { - - final NsaApiKey user = DMaaPAuthenticatorImpl - .getAuthenticatedUser(dmaapContext); - if (user == null || !user.getKey().equals(key.getKey())) { - throw new AccessDeniedException("You don't own the API key."); - } - - apiKeyDb.deleteApiKey(key); - log.info("======ApiKeysServiceImpl : deleteApiKey : Deleted Key successfully.============"); - DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, - "Api key [" + apikey + "] deleted successfully."); - return; - } - } else { - errorMsg = "Api key [" + apikey + "] does not exist."; - errorCode = HttpStatusCodes.k404_notFound; - DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode, - errorMsg); - log.info("======ApiKeysServiceImpl : deleteApiKey : Error while deleting key.============"); - throw new IOException(); - } - } - - /** - * - * @param dmaapContext - * @return - */ - private NsaApiDb getApiKeyDb(DMaaPContext dmaapContext) { - ConfigurationReader configReader = dmaapContext.getConfigReader(); - return configReader.getfApiKeyDb(); - } - -} diff --git a/src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java b/src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java deleted file mode 100644 index 104d7de..0000000 --- a/src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java +++ /dev/null @@ -1,153 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service.impl; - -import java.util.Set; -import java.util.TreeSet; - -import com.att.dmf.mr.transaction.DMaaPTransactionFactory; -import com.att.dmf.mr.transaction.DMaaPTransactionObj; -import com.att.dmf.mr.transaction.DMaaPTransactionObjDB; -import com.att.dmf.mr.transaction.TransactionObj; -import com.att.nsa.configs.ConfigDb; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.configs.ConfigPath; - -/** - * Persistent storage for Transaction objects built over an abstract config db. - * - * @author anowarul.islam - * - * @param - */ -public class BaseTransactionDbImpl implements DMaaPTransactionObjDB { - - private final ConfigDb fDb; - private final ConfigPath fBasePath; - private final DMaaPTransactionFactory fKeyFactory; - - private static final String kStdRootPath = "/transaction"; - - private ConfigPath makePath(String transactionId) { - return fBasePath.getChild(transactionId); - } - - /** - * Construct an Transaction db over the given config db at the standard - * location - * - * @param db - * @param keyFactory - * @throws ConfigDbException - */ - public BaseTransactionDbImpl(ConfigDb db, DMaaPTransactionFactory keyFactory) throws ConfigDbException { - this(db, kStdRootPath, keyFactory); - } - - /** - * Construct an Transaction db over the given config db using the given root - * location - * - * @param db - * @param rootPath - * @param keyFactory - * @throws ConfigDbException - */ - public BaseTransactionDbImpl(ConfigDb db, String rootPath, DMaaPTransactionFactory keyFactory) - throws ConfigDbException { - fDb = db; - fBasePath = db.parse(rootPath); - fKeyFactory = keyFactory; - - if (!db.exists(fBasePath)) { - db.store(fBasePath, ""); - } - } - - /** - * Create a new Transaction Obj. If one exists, - * - * @param id - * @return the new Transaction record - * @throws ConfigDbException - */ - public synchronized K createTransactionObj(String id) throws KeyExistsException, ConfigDbException { - final ConfigPath path = makePath(id); - if (fDb.exists(path)) { - throw new KeyExistsException(id); - } - - // make one, store it, return it - final K newKey = fKeyFactory.makeNewTransactionId(id); - fDb.store(path, newKey.serialize()); - return newKey; - } - - /** - * Save an Transaction record. This must be used after changing auxiliary - * data on the record. Note that the transaction object must exist (via - * createTransactionObj). - * - * @param transaction - * object - * @throws ConfigDbException - */ - @Override - public synchronized void saveTransactionObj(K trnObj) throws ConfigDbException { - final ConfigPath path = makePath(trnObj.getId()); - if (!fDb.exists(path) || !(trnObj instanceof TransactionObj)) { - throw new IllegalStateException(trnObj.getId() + " is not known to this database"); - } - fDb.store(path, ((TransactionObj) trnObj).serialize()); - } - - /** - * Load an Transaction record based on the Transaction Id value - * - * @param transactionId - * @return an Transaction Object record or null - * @throws ConfigDbException - */ - @Override - public synchronized K loadTransactionObj(String transactionId) throws ConfigDbException { - final String data = fDb.load(makePath(transactionId)); - if (data != null) { - return fKeyFactory.makeNewTransactionObj(data); - } - return null; - } - - /** - * Load all transactions known to this database. (This could be expensive.) - * - * @return a set of all Transaction objects - * @throws ConfigDbException - */ - public synchronized Set loadAllTransactionObjs() throws ConfigDbException { - final TreeSet result = new TreeSet<>(); - for (ConfigPath cp : fDb.loadChildrenNames(fBasePath)) { - result.add(cp.getName()); - } - return result; - } - -} diff --git a/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java deleted file mode 100644 index 73a373e..0000000 --- a/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java +++ /dev/null @@ -1,867 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service.impl; - -import java.io.IOException; -import java.io.InputStream; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.Properties; - -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.core.MediaType; - -import org.apache.http.HttpStatus; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.errors.TopicExistsException; -import org.json.JSONObject; -import org.json.JSONTokener; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.stereotype.Service; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Consumer; -import com.att.dmf.mr.backends.ConsumerFactory; -import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException; -import com.att.dmf.mr.backends.MetricsSet; -import com.att.dmf.mr.backends.Publisher; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.backends.kafka.KafkaLiveLockAvoider2; -import com.att.dmf.mr.beans.DMaaPCambriaLimiter; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.LogDetails; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.exception.DMaaPAccessDeniedException; -import com.att.dmf.mr.exception.DMaaPErrorMessages; -import com.att.dmf.mr.exception.DMaaPResponseCode; -import com.att.dmf.mr.exception.ErrorResponse; - -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.resources.CambriaEventSet; -import com.att.dmf.mr.resources.CambriaOutboundEventStream; -import com.att.dmf.mr.security.DMaaPAAFAuthenticator; -import com.att.dmf.mr.security.DMaaPAAFAuthenticatorImpl; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.service.EventsService; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.dmf.mr.utils.Utils; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.drumlin.service.standards.MimeTypes; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; -import com.att.nsa.util.rrConvertor; - -/** - * This class provides the functinality to publish and subscribe message to - * kafka - * - * @author Ramkumar Sembaiyam - * - */ -@Service -public class EventsServiceImpl implements EventsService { - // private static final Logger LOG = - - private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class); - - private static final String BATCH_LENGTH = "event.batch.length"; - private static final String TRANSFER_ENCODING = "Transfer-Encoding"; - @Autowired - private DMaaPErrorMessages errorMessages; - - //@Autowired - - - // @Value("${metrics.send.cambria.topic}") - - - public DMaaPErrorMessages getErrorMessages() { - return errorMessages; - } - - public void setErrorMessages(DMaaPErrorMessages errorMessages) { - this.errorMessages = errorMessages; - } - - /** - * @param ctx - * @param topic - * @param consumerGroup - * @param clientId - * @throws ConfigDbException, - * TopicExistsException, AccessDeniedException, - * UnavailableException, CambriaApiException, IOException - * - * - */ - @Override - public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId) - throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException, - CambriaApiException, IOException, DMaaPAccessDeniedException { - final long startTime = System.currentTimeMillis(); - final HttpServletRequest req = ctx.getRequest(); - - boolean isAAFTopic = false; - // was this host blacklisted? - final String remoteAddr = Utils.getRemoteAddress(ctx); - if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - - int limit = CambriaConstants.kNoLimit; - if (req.getParameter("limit") != null) { - limit = Integer.parseInt(req.getParameter("limit")); - } - - int timeoutMs = CambriaConstants.kNoTimeout; - String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout"); - if (strtimeoutMS != null) - timeoutMs = Integer.parseInt(strtimeoutMS); - // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout", - - if (req.getParameter("timeout") != null) { - timeoutMs = Integer.parseInt(req.getParameter("timeout")); - } - - // By default no filter is applied if filter is not passed as a - // parameter in the request URI - String topicFilter = CambriaConstants.kNoFilter; - if (null != req.getParameter("filter")) { - topicFilter = req.getParameter("filter"); - } - // pretty to print the messaages in new line - String prettyval = "0"; - String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty"); - if (null != strPretty) - prettyval = strPretty; - - String metaval = "0"; - String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta"); - if (null != strmeta) - metaval = strmeta; - - final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval); - // withMeta to print offset along with message - final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval); - - final LogWrap logger = new LogWrap(topic, consumerGroup, clientId); - logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter + " from Remote host "+ctx.getRequest().getRemoteHost()); - - // is this user allowed to read this topic? - final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); - final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); - - if (metatopic == null) { - // no such topic. - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND, - DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(), - errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()), - topic, null, null, consumerGroup + "/" + clientId, ctx.getRequest().getRemoteHost()); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "metrics.send.cambria.topic"); - if (null == metricTopicname) - metricTopicname = "msgrtr.apinode.metrics.dmaap"; - - if (null == ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) { - if (null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))) { - // check permissions - metatopic.checkUserRead(user); - } - } - // if headers are not provided then user will be null - if (user == null && null != ctx.getRequest().getHeader("Authorization")) { - // the topic name will be sent by the client - - DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); - String permission = aaf.aafPermissionString(topic, "sub"); - if (!aaf.aafAuthentication(ctx.getRequest(), permission)) { - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - errorMessages.getNotPermitted1() + " read " + errorMessages.getNotPermitted2() + topic + " on " - + permission, - null, Utils.getFormattedDate(new Date()), topic, null, null, consumerGroup + "/" + clientId, - ctx.getRequest().getRemoteHost()); - LOG.info(errRes.toString()); - throw new DMaaPAccessDeniedException(errRes); - - } - isAAFTopic = true; - } - final long elapsedMs1 = System.currentTimeMillis() - startTime; - logger.info("Time taken in getEvents Authorization " + elapsedMs1 + " ms for " + topic + " " + consumerGroup - + " " + clientId); - Consumer c = null; - - String lhostId = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "clusterhostid"); - if (null == lhostId) { - try { - lhostId = InetAddress.getLocalHost().getCanonicalHostName(); - } catch (UnknownHostException e) { - LOG.info("Unknown Host Exception error occured while getting getting hostid"); - } - - } - CambriaOutboundEventStream coes = null; - try { - final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); - final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter(); - rl.onCall(topic, consumerGroup, clientId, ctx.getRequest().getRemoteHost()); - c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs, - ctx.getRequest().getRemoteHost()); - coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs) - .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build(); - coes.setDmaapContext(ctx); - coes.setTopic(metatopic); - if (isTransEnabled() || isAAFTopic) { - coes.setTransEnabled(true); - } else { - coes.setTransEnabled(false); - } - coes.setTopicStyle(isAAFTopic); - final long elapsedMs2 = System.currentTimeMillis() - startTime; - logger.info("Time taken in getEvents getConsumerFor " + elapsedMs2 + " ms for " + topic + " " - + consumerGroup + " " + clientId); - - DMaaPResponseBuilder.setNoCacheHeadings(ctx); - - DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes); - // No IOException thrown during respondOkWithStream, so commit the - // new offsets to all the brokers - c.commitOffsets(); - final int sent = coes.getSentCount(); - - metricsSet.consumeTick(sent); - rl.onSend(topic, consumerGroup, clientId, sent); - final long elapsedMs = System.currentTimeMillis() - startTime; - logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + c.getOffset() + " for " - + topic + " " + consumerGroup + " " + clientId + " on to the server " - + ctx.getRequest().getRemoteHost()); - - } catch (UnavailableException excp) { - logger.warn(excp.getMessage(), excp); - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, - DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), - errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic, - null, null, consumerGroup + "-" + clientId, ctx.getRequest().getRemoteHost()); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - - } catch (java.util.ConcurrentModificationException excp1) { - LOG.info(excp1.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId + " from Remote"+ctx.getRequest().getRemoteHost()); - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_CONFLICT, - DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(), - "Couldn't respond to client, possible of consumer requests from more than one server. Please contact MR team if you see this issue occurs continously", null, - Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost()); - logger.info(errRes.toString()); - throw new CambriaApiException(errRes); - - } catch (CambriaApiException excp) { - LOG.info(excp.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId); - - throw excp; - } - catch (Exception excp) { - // System.out.println(excp + "------------------ " + topic+" - // "+consumerGroup+" "+clientId); - - logger.info("Couldn't respond to client, closing cambria consumer " + " " + topic + " " + consumerGroup - + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE + " ****** " + excp); - - ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId); - - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, - DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), - "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null, - Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost()); - logger.info(errRes.toString()); - throw new CambriaApiException(errRes); - } finally { - coes = null; - // If no cache, close the consumer now that we're done with it. - boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled; - String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - ConsumerFactory.kSetting_EnableCache); - if (null != strkSetting_EnableCache) - kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache); - // if - // (!ctx.getConfigReader().getSettings().getBoolean(ConsumerFactory.kSetting_EnableCache, - // ConsumerFactory.kDefault_IsCacheEnabled) && (c != null)) { - if (!kSetting_EnableCache && (c != null)) { - try { - c.close(); - } catch (Exception e) { - logger.info("***Exception occured in getEvents finaly block while closing the consumer " + " " - + topic + " " + consumerGroup + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE - + " " + e); - } - } - } - } - - /** - * @throws missingReqdSetting - * - */ - @Override - public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition, - final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException, - CambriaApiException, IOException, missingReqdSetting, DMaaPAccessDeniedException { - - // is this user allowed to write to this topic? - final long startMs = System.currentTimeMillis(); - final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); - final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); - boolean isAAFTopic = false; - - // was this host blacklisted? - final String remoteAddr = Utils.getRemoteAddress(ctx); - - if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - - String topicNameStd = null; - - // topicNameStd= - - topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, - "enforced.topic.name.AAF"); - String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "metrics.send.cambria.topic"); - if (null == metricTopicname) - metricTopicname = "msgrtr.apinode.metrics.dmaap"; - boolean topicNameEnforced = false; - if (null != topicNameStd && topic.startsWith(topicNameStd)) { - topicNameEnforced = true; - } - - // Here check if the user has rights to publish on the topic - // ( This will be called when no auth is added or when UEB API Key - // Authentication is used) - // checkUserWrite(user) method will throw an error when there is no Auth - // header added or when the - // user has no publish rights - - if (null != metatopic && null != metatopic.getOwner() && !("".equals(metatopic.getOwner())) - && null == ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) { - metatopic.checkUserWrite(user); - } - - // if headers are not provided then user will be null - if (topicNameEnforced || (user == null && null != ctx.getRequest().getHeader("Authorization") - && !topic.equalsIgnoreCase(metricTopicname))) { - // the topic name will be sent by the client - - DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); - String permission = aaf.aafPermissionString(topic, "pub"); - if (!aaf.aafAuthentication(ctx.getRequest(), permission)) { - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - errorMessages.getNotPermitted1() + " publish " + errorMessages.getNotPermitted2() + topic - + " on " + permission, - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new DMaaPAccessDeniedException(errRes); - } - isAAFTopic = true; - } - - final HttpServletRequest req = ctx.getRequest(); - - // check for chunked input - boolean chunked = false; - if (null != req.getHeader(TRANSFER_ENCODING)) { - chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked"); - } - // get the media type, or set it to a generic value if it wasn't - // provided - String mediaType = req.getContentType(); - if (mediaType == null || mediaType.length() == 0) { - mediaType = MimeTypes.kAppGenericBinary; - } - - if (mediaType.contains("charset=UTF-8")) { - mediaType = mediaType.replace("; charset=UTF-8", "").trim(); - } - - String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "transidUEBtopicreqd"); - boolean istransidreqd = false; - if (null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) { - istransidreqd = true; - } - - if (isAAFTopic || istransidreqd) { - pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType); - } else { - pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType); - } - final long endMs = System.currentTimeMillis(); - final long totalMs = endMs - startMs; - - LOG.info("Overall Response time - Published " + " msgs in " + totalMs + " ms for topic " + topic); - - } - - /** - * - * @param ctx - * @param topic - * @param msg - * @param defaultPartition - * @param chunked - * @param mediaType - * @throws ConfigDbException - * @throws AccessDeniedException - * @throws TopicExistsException - * @throws CambriaApiException - * @throws IOException - */ - private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked, - String mediaType) - throws ConfigDbException, AccessDeniedException, TopicExistsException, CambriaApiException, IOException { - final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); - // setup the event set - final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition); - - // start processing, building a batch to push to the backend - final long startMs = System.currentTimeMillis(); - long count = 0; - long maxEventBatch = 1024L* 16; - String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); - if (null != batchlen) - maxEventBatch = Long.parseLong(batchlen); - // long maxEventBatch = - - final LinkedList batch = new LinkedList<>(); - // final ArrayList> kms = new - - final ArrayList> pms = new ArrayList<>(); - try { - // for each message... - Publisher.message m = null; - while ((m = events.next()) != null) { - // add the message to the batch - batch.add(m); - // final KeyedMessage data = new - // KeyedMessage(topic, m.getKey(), - - // kms.add(data); - final ProducerRecord data = new ProducerRecord(topic, m.getKey(), - m.getMessage()); - - pms.add(data); - // check if the batch is full - final int sizeNow = batch.size(); - if (sizeNow > maxEventBatch) { - // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, - - // kms.clear(); - ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); - pms.clear(); - batch.clear(); - metricsSet.publishTick(sizeNow); - count += sizeNow; - } - } - - // send the pending batch - final int sizeNow = batch.size(); - if (sizeNow > 0) { - // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, - - // kms.clear(); - ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); - pms.clear(); - batch.clear(); - metricsSet.publishTick(sizeNow); - count += sizeNow; - } - - final long endMs = System.currentTimeMillis(); - final long totalMs = endMs - startMs; - - LOG.info("Published " + count + " msgs in " + totalMs + " ms for topic " + topic + " from server " - + ctx.getRequest().getRemoteHost()); - - // build a responseP - final JSONObject response = new JSONObject(); - response.put("count", count); - response.put("serverTimeMs", totalMs); - DMaaPResponseBuilder.respondOk(ctx, response); - - } catch (Exception excp) { - int status = HttpStatus.SC_NOT_FOUND; - String errorMsg = null; - if (excp instanceof CambriaApiException) { - status = ((CambriaApiException) excp).getStatus(); - JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); - JSONObject errObject = new JSONObject(jsonTokener); - errorMsg = (String) errObject.get("message"); - - } - ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), - errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count - + "." + errorMsg, - null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null, - null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - - } - } - - /** - * - * @param ctx - * @param inputStream - * @param topic - * @param partitionKey - * @param requestTime - * @param chunked - * @param mediaType - * @throws ConfigDbException - * @throws AccessDeniedException - * @throws TopicExistsException - * @throws IOException - * @throws CambriaApiException - */ - private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic, - final String partitionKey, final String requestTime, final boolean chunked, final String mediaType) - throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException, CambriaApiException { - - final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); - - // setup the event set - final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey); - - // start processing, building a batch to push to the backend - final long startMs = System.currentTimeMillis(); - long count = 0; - long maxEventBatch = 1024L * 16; - String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); - if (null != evenlen) - maxEventBatch = Long.parseLong(evenlen); - // final long maxEventBatch = - - final LinkedList batch = new LinkedList(); - // final ArrayList> kms = new - - final ArrayList> pms = new ArrayList>(); - Publisher.message m = null; - int messageSequence = 1; - Long batchId = 1L; - final boolean transactionEnabled = true; - int publishBatchCount = 0; - SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS"); - - // LOG.warn("Batch Start Id: " + - - try { - // for each message... - batchId = DMaaPContext.getBatchID(); - - String responseTransactionId = null; - - while ((m = events.next()) != null) { - - // LOG.warn("Batch Start Id: " + - - - addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId, - transactionEnabled); - messageSequence++; - - - batch.add(m); - - responseTransactionId = m.getLogDetails().getTransactionId(); - - JSONObject jsonObject = new JSONObject(); - jsonObject.put("msgWrapMR", m.getMessage()); - jsonObject.put("transactionId", responseTransactionId); - // final KeyedMessage data = new - // KeyedMessage(topic, m.getKey(), - - // kms.add(data); - final ProducerRecord data = new ProducerRecord(topic, m.getKey(), - m.getMessage()); - - pms.add(data); - // check if the batch is full - final int sizeNow = batch.size(); - if (sizeNow >= maxEventBatch) { - String startTime = sdf.format(new Date()); - LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" - + batchId + "]"); - try { - // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, - // kms); - ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); - // transactionLogs(batch); - for (message msg : batch) { - LogDetails logDetails = msg.getLogDetails(); - LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); - } - } catch (Exception excp) { - - int status = HttpStatus.SC_NOT_FOUND; - String errorMsg = null; - if (excp instanceof CambriaApiException) { - status = ((CambriaApiException) excp).getStatus(); - JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); - JSONObject errObject = new JSONObject(jsonTokener); - errorMsg = (String) errObject.get("message"); - } - ErrorResponse errRes = new ErrorResponse(status, - DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), - "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." - + errorMessages.getPublishMsgCount() + count + "." + errorMsg, - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - pms.clear(); - batch.clear(); - metricsSet.publishTick(sizeNow); - publishBatchCount = sizeNow; - count += sizeNow; - // batchId++; - String endTime = sdf.format(new Date()); - LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" - + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime - + ",Batch End Time=" + endTime + "]"); - batchId = DMaaPContext.getBatchID(); - } - } - - // send the pending batch - final int sizeNow = batch.size(); - if (sizeNow > 0) { - String startTime = sdf.format(new Date()); - LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" - + batchId + "]"); - try { - // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, - // kms); - ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); - // transactionLogs(batch); - for (message msg : batch) { - LogDetails logDetails = msg.getLogDetails(); - LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); - } - } catch (Exception excp) { - int status = HttpStatus.SC_NOT_FOUND; - String errorMsg = null; - if (excp instanceof CambriaApiException) { - status = ((CambriaApiException) excp).getStatus(); - JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); - JSONObject errObject = new JSONObject(jsonTokener); - errorMsg = (String) errObject.get("message"); - } - - ErrorResponse errRes = new ErrorResponse(status, - DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), - "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." - + errorMessages.getPublishMsgCount() + count + "." + errorMsg, - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - pms.clear(); - metricsSet.publishTick(sizeNow); - count += sizeNow; - // batchId++; - String endTime = sdf.format(new Date()); - publishBatchCount = sizeNow; - LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId - + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time=" - + endTime + "]"); - } - - final long endMs = System.currentTimeMillis(); - final long totalMs = endMs - startMs; - - LOG.info("Published " + count + " msgs(with transaction id) in " + totalMs + " ms for topic " + topic); - - if (null != responseTransactionId) { - ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId)); - } - - // build a response - final JSONObject response = new JSONObject(); - response.put("count", count); - response.put("serverTimeMs", totalMs); - DMaaPResponseBuilder.respondOk(ctx, response); - - } catch (Exception excp) { - int status = HttpStatus.SC_NOT_FOUND; - String errorMsg = null; - if (excp instanceof CambriaApiException) { - status = ((CambriaApiException) excp).getStatus(); - JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); - JSONObject errObject = new JSONObject(jsonTokener); - errorMsg = (String) errObject.get("message"); - } - - ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), - "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." - + errorMessages.getPublishMsgCount() + count + "." + errorMsg, - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - } - - /** - * - * @param msg - * @param topic - * @param request - * @param messageCreationTime - * @param messageSequence - * @param batchId - * @param transactionEnabled - */ - private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request, - final String messageCreationTime, final int messageSequence, final Long batchId, - final boolean transactionEnabled) { - LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId, - transactionEnabled); - logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage())); - msg.setTransactionEnabled(transactionEnabled); - msg.setLogDetails(logDetails); - } - - /** - * - * @author anowarul.islam - * - */ - private static class LogWrap { - private final String fId; - - /** - * constructor initialization - * - * @param topic - * @param cgroup - * @param cid - */ - public LogWrap(String topic, String cgroup, String cid) { - fId = "[" + topic + "/" + cgroup + "/" + cid + "] "; - } - - /** - * - * @param msg - */ - public void info(String msg) { - LOG.info(fId + msg); - } - - /** - * - * @param msg - * @param t - */ - public void warn(String msg, Exception t) { - LOG.warn(fId + msg, t); - } - - } - - public boolean isTransEnabled() { - String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "transidUEBtopicreqd"); - boolean istransidreqd = false; - if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true"))) { - istransidreqd = true; - } - - return istransidreqd; - - } - - private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request, - final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) { - LogDetails logDetails = new LogDetails(); - logDetails.setTopicId(topicName); - logDetails.setMessageTimestamp(messageTimestamp); - logDetails.setPublisherId(Utils.getUserApiKey(request)); - logDetails.setPublisherIp(request.getRemoteHost()); - logDetails.setMessageBatchId(batchId); - logDetails.setMessageSequence(String.valueOf(messageSequence)); - logDetails.setTransactionEnabled(transactionEnabled); - logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date())); - logDetails.setServerIp(request.getLocalAddr()); - return logDetails; - } - - /* - * public String getMetricsTopic() { return metricsTopic; } - * - * public void setMetricsTopic(String metricsTopic) { this.metricsTopic = - * metricsTopic; } - */ - - - -} \ No newline at end of file diff --git a/src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java deleted file mode 100644 index 387d8b1..0000000 --- a/src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java +++ /dev/null @@ -1,600 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service.impl; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Date; -import java.util.LinkedList; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.core.Context; - -import org.apache.http.HttpStatus; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.json.JSONObject; -import org.json.JSONTokener; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.stereotype.Service; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Consumer; -import com.att.dmf.mr.backends.ConsumerFactory; -import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException; -import com.att.dmf.mr.backends.MetricsSet; -import com.att.dmf.mr.backends.Publisher; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.LogDetails; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.exception.DMaaPErrorMessages; -import com.att.dmf.mr.exception.DMaaPResponseCode; -import com.att.dmf.mr.exception.ErrorResponse; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.resources.CambriaEventSet; -import com.att.dmf.mr.resources.CambriaOutboundEventStream; -import com.att.dmf.mr.service.MMService; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.dmf.mr.utils.Utils; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.drumlin.service.standards.MimeTypes; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; -import com.att.nsa.util.rrConvertor; - - - -@Service -public class MMServiceImpl implements MMService { - private static final String BATCH_LENGTH = "event.batch.length"; - private static final String TRANSFER_ENCODING = "Transfer-Encoding"; - //private static final Logger LOG = Logger.getLogger(MMServiceImpl.class); - private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MMServiceImpl.class); - @Autowired - private DMaaPErrorMessages errorMessages; - - @Autowired - @Qualifier("configurationReader") - private ConfigurationReader configReader; - - // HttpServletRequest object - @Context - private HttpServletRequest request; - - // HttpServletResponse object - @Context - private HttpServletResponse response; - - @Override - public void addWhiteList() { - - } - - @Override - public void removeWhiteList() { - - } - - @Override - public void listWhiteList() { - - } - - @Override - public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId) - throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException, - CambriaApiException, IOException { - - - final HttpServletRequest req = ctx.getRequest(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - - // was this host blacklisted? - final String remoteAddr = Utils.getRemoteAddress(ctx); - - if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - - int limit = CambriaConstants.kNoLimit; - - if (req.getParameter("limit") != null) { - limit = Integer.parseInt(req.getParameter("limit")); - } - limit = 1; - - int timeoutMs = CambriaConstants.kNoTimeout; - String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout"); - if (strtimeoutMS != null) - timeoutMs = Integer.parseInt(strtimeoutMS); - // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout", - - if (req.getParameter("timeout") != null) { - timeoutMs = Integer.parseInt(req.getParameter("timeout")); - } - - // By default no filter is applied if filter is not passed as a - // parameter in the request URI - String topicFilter = CambriaConstants.kNoFilter; - if (null != req.getParameter("filter")) { - topicFilter = req.getParameter("filter"); - } - // pretty to print the messaages in new line - String prettyval = "0"; - String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty"); - if (null != strPretty) - prettyval = strPretty; - - String metaval = "0"; - String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta"); - if (null != strmeta) - metaval = strmeta; - - final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval); - // withMeta to print offset along with message - final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval); - - // is this user allowed to read this topic? - //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); - final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); - - if (metatopic == null) { - // no such topic. - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND, - DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(), - errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()), - topic, null, null, clientId, ctx.getRequest().getRemoteHost()); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - //String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic"); - /* - * if (null==metricTopicname) - * metricTopicname="msgrtr.apinode.metrics.dmaap"; //else if(user!=null) - * if(null==ctx.getRequest().getHeader("Authorization")&& - * !topic.equalsIgnoreCase(metricTopicname)) { if (null != - * metatopic.getOwner() && !("".equals(metatopic.getOwner()))){ // check - * permissions metatopic.checkUserRead(user); } } - */ - - Consumer c = null; - try { - final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); - - c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,ctx.getRequest().getRemoteHost()); - - final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs) - .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build(); - coes.setDmaapContext(ctx); - coes.setTopic(metatopic); - - DMaaPResponseBuilder.setNoCacheHeadings(ctx); - - try { - coes.write(baos); - } catch (Exception ex) { - - } - - c.commitOffsets(); - final int sent = coes.getSentCount(); - - metricsSet.consumeTick(sent); - - } catch (UnavailableException excp) { - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, - DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), - errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic, - null, null, clientId, ctx.getRequest().getRemoteHost()); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - - } catch (CambriaApiException excp) { - - throw excp; - } catch (Exception excp) { - - ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId); - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, - DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), - "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null, - Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost()); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } finally { - - boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled; - String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - ConsumerFactory.kSetting_EnableCache); - if (null != strkSetting_EnableCache) - kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache); - - if (!kSetting_EnableCache && (c != null)) { - c.close(); - - } - } - return baos.toString(); - } - - @Override - public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition, - final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException, - CambriaApiException, IOException, missingReqdSetting { - - //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); - //final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); - - final String remoteAddr = Utils.getRemoteAddress(ctx); - - if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - - String topicNameStd = null; - - topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, - "enforced.topic.name.AAF"); - String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "metrics.send.cambria.topic"); - if (null == metricTopicname) - metricTopicname = "msgrtr.apinode.metrics.dmaap"; - boolean topicNameEnforced = false; - if (null != topicNameStd && topic.startsWith(topicNameStd)) { - topicNameEnforced = true; - } - - final HttpServletRequest req = ctx.getRequest(); - - boolean chunked = false; - if (null != req.getHeader(TRANSFER_ENCODING)) { - chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked"); - } - - String mediaType = req.getContentType(); - if (mediaType == null || mediaType.length() == 0) { - mediaType = MimeTypes.kAppGenericBinary; - } - - if (mediaType.contains("charset=UTF-8")) { - mediaType = mediaType.replace("; charset=UTF-8", "").trim(); - } - - if (!topic.equalsIgnoreCase(metricTopicname)) { - pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType); - } else { - pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType); - } - } - - private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request, - final String messageCreationTime, final int messageSequence, final Long batchId, - final boolean transactionEnabled) { - LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId, - transactionEnabled); - logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage())); - msg.setTransactionEnabled(transactionEnabled); - msg.setLogDetails(logDetails); - } - - private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request, - final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) { - LogDetails logDetails = new LogDetails(); - logDetails.setTopicId(topicName); - logDetails.setMessageTimestamp(messageTimestamp); - logDetails.setPublisherId(Utils.getUserApiKey(request)); - logDetails.setPublisherIp(request.getRemoteHost()); - logDetails.setMessageBatchId(batchId); - logDetails.setMessageSequence(String.valueOf(messageSequence)); - logDetails.setTransactionEnabled(transactionEnabled); - logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date())); - logDetails.setServerIp(request.getLocalAddr()); - return logDetails; - } - - private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked, - String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException, - CambriaApiException, IOException { - final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); - - // setup the event set - final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition); - - // start processing, building a batch to push to the backend - final long startMs = System.currentTimeMillis(); - long count = 0; - - long maxEventBatch = 1024 * 16; - String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); - if (null != batchlen) - maxEventBatch = Long.parseLong(batchlen); - - // long maxEventBatch = - // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16); - final LinkedList batch = new LinkedList(); - final ArrayList> pms = new ArrayList>(); - //final ArrayList> kms = new ArrayList>(); - - try { - // for each message... - Publisher.message m = null; - while ((m = events.next()) != null) { - // add the message to the batch - batch.add(m); - final ProducerRecord data = new ProducerRecord(topic, m.getKey(), - m.getMessage()); - // check if the batch is full - final int sizeNow = batch.size(); - if (sizeNow > maxEventBatch) { - ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); - pms.clear(); - batch.clear(); - metricsSet.publishTick(sizeNow); - count += sizeNow; - } - } - - // send the pending batch - final int sizeNow = batch.size(); - if (sizeNow > 0) { - ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); - pms.clear(); - batch.clear(); - metricsSet.publishTick(sizeNow); - count += sizeNow; - } - - final long endMs = System.currentTimeMillis(); - final long totalMs = endMs - startMs; - - LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic); - - // build a responseP - final JSONObject response = new JSONObject(); - response.put("count", count); - response.put("serverTimeMs", totalMs); - // DMaaPResponseBuilder.respondOk(ctx, response); - - } catch (Exception excp) { - - int status = HttpStatus.SC_NOT_FOUND; - String errorMsg = null; - if (excp.getClass().toString().contains("CambriaApiException")) { - status = ((CambriaApiException) excp).getStatus(); - JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); - JSONObject errObject = new JSONObject(jsonTokener); - errorMsg = (String) errObject.get("message"); - - } - ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), - errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count - + "." + errorMsg, - null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null, - null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - - } - } - - private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic, - final String partitionKey, final String requestTime, final boolean chunked, final String mediaType) - throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException, - CambriaApiException { - - final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); - - // setup the event set - final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey); - - // start processing, building a batch to push to the backend - final long startMs = System.currentTimeMillis(); - long count = 0; - long maxEventBatch = 1024 * 16; - String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); - if (null != evenlen) - maxEventBatch = Long.parseLong(evenlen); - - final LinkedList batch = new LinkedList(); - final ArrayList> pms = new ArrayList>(); - - Publisher.message m = null; - int messageSequence = 1; - Long batchId = 1L; - final boolean transactionEnabled = true; - int publishBatchCount = 0; - SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS"); - - // LOG.warn("Batch Start Id: " + - // Utils.getFromattedBatchSequenceId(batchId)); - try { - // for each message... - batchId = DMaaPContext.getBatchID(); - - String responseTransactionId = null; - - while ((m = events.next()) != null) { - - // LOG.warn("Batch Start Id: " + - // Utils.getFromattedBatchSequenceId(batchId)); - - addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId, - transactionEnabled); - messageSequence++; - - // add the message to the batch - batch.add(m); - - responseTransactionId = m.getLogDetails().getTransactionId(); - - JSONObject jsonObject = new JSONObject(); - jsonObject.put("message", m.getMessage()); - jsonObject.put("transactionId", responseTransactionId); - final ProducerRecord data = new ProducerRecord(topic, m.getKey(), - m.getMessage()); - pms.add(data); - - // check if the batch is full - final int sizeNow = batch.size(); - if (sizeNow >= maxEventBatch) { - String startTime = sdf.format(new Date()); - LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" - + batchId + "]"); - try { - ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); - // transactionLogs(batch); - for (message msg : batch) { - LogDetails logDetails = msg.getLogDetails(); - LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); - } - } catch (Exception excp) { - - int status = HttpStatus.SC_NOT_FOUND; - String errorMsg = null; - if (excp.getClass().toString().contains("CambriaApiException")) { - status = ((CambriaApiException) excp).getStatus(); - JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); - JSONObject errObject = new JSONObject(jsonTokener); - errorMsg = (String) errObject.get("message"); - } - ErrorResponse errRes = new ErrorResponse(status, - DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), - "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." - + errorMessages.getPublishMsgCount() + count + "." + errorMsg, - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - pms.clear(); - batch.clear(); - metricsSet.publishTick(sizeNow); - publishBatchCount = sizeNow; - count += sizeNow; - // batchId++; - String endTime = sdf.format(new Date()); - LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" - + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime - + ",Batch End Time=" + endTime + "]"); - batchId = DMaaPContext.getBatchID(); - } - } - - // send the pending batch - final int sizeNow = batch.size(); - if (sizeNow > 0) { - String startTime = sdf.format(new Date()); - LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" - + batchId + "]"); - try { - ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); - // transactionLogs(batch); - for (message msg : batch) { - LogDetails logDetails = msg.getLogDetails(); - LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); - } - } catch (Exception excp) { - int status = HttpStatus.SC_NOT_FOUND; - String errorMsg = null; - if (excp.getClass().toString().contains("CambriaApiException")) { - status = ((CambriaApiException) excp).getStatus(); - JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); - JSONObject errObject = new JSONObject(jsonTokener); - errorMsg = (String) errObject.get("message"); - } - - ErrorResponse errRes = new ErrorResponse(status, - DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), - "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." - + errorMessages.getPublishMsgCount() + count + "." + errorMsg, - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - pms.clear(); - metricsSet.publishTick(sizeNow); - count += sizeNow; - // batchId++; - String endTime = sdf.format(new Date()); - publishBatchCount = sizeNow; - LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId - + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time=" - + endTime + "]"); - } - - final long endMs = System.currentTimeMillis(); - final long totalMs = endMs - startMs; - - LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic); - - // build a response - final JSONObject response = new JSONObject(); - response.put("count", count); - response.put("serverTimeMs", totalMs); - - } catch (Exception excp) { - int status = HttpStatus.SC_NOT_FOUND; - String errorMsg = null; - if (excp.getClass().toString().contains("CambriaApiException")) { - status = ((CambriaApiException) excp).getStatus(); - JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); - JSONObject errObject = new JSONObject(jsonTokener); - errorMsg = (String) errObject.get("message"); - } - - ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), - "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." - + errorMessages.getPublishMsgCount() + count + "." + errorMsg, - null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), - ctx.getRequest().getRemoteHost(), null, null); - LOG.info(errRes.toString()); - throw new CambriaApiException(errRes); - } - } -} diff --git a/src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java deleted file mode 100644 index d867ea8..0000000 --- a/src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java +++ /dev/null @@ -1,115 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service.impl; - -import java.io.IOException; - -import org.json.JSONObject; -import org.springframework.stereotype.Component; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.MetricsSet; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.service.MetricsService; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.metrics.CdmMeasuredItem; - -/** - * - * - * This will provide all the generated metrics details also it can provide the - * get metrics details - * - * - * @author nilanjana.maity - * - * - */ -@Component -public class MetricsServiceImpl implements MetricsService { - - - private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MetricsService.class); - /** - * - * - * @param ctx - * @throws IOException - * - * - * get Metric details - * - */ - @Override - - public void get(DMaaPContext ctx) throws IOException { - LOG.info("Inside : MetricsServiceImpl : get()"); - final MetricsSet metrics = ctx.getConfigReader().getfMetrics(); - DMaaPResponseBuilder.setNoCacheHeadings(ctx); - final JSONObject result = metrics.toJson(); - DMaaPResponseBuilder.respondOk(ctx, result); - LOG.info("============ Metrics generated : " + result.toString() + "================="); - - } - - - @Override - /** - * - * get Metric by name - * - * - * @param ctx - * @param name - * @throws IOException - * @throws CambriaApiException - * - * - */ - public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException { - LOG.info("Inside : MetricsServiceImpl : getMetricByName()"); - final MetricsSet metrics = ctx.getConfigReader().getfMetrics(); - - final CdmMeasuredItem item = metrics.getItem(name); - /** - * check if item is null - */ - if (item == null) { - throw new CambriaApiException(404, "No metric named [" + name + "]."); - } - - final JSONObject entry = new JSONObject(); - entry.put("summary", item.summarize()); - entry.put("raw", item.getRawValueString()); - - DMaaPResponseBuilder.setNoCacheHeadings(ctx); - - final JSONObject result = new JSONObject(); - result.put(name, entry); - - DMaaPResponseBuilder.respondOk(ctx, result); - LOG.info("============ Metrics generated : " + entry.toString() + "================="); - } - -} diff --git a/src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java deleted file mode 100644 index 983af7e..0000000 --- a/src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java +++ /dev/null @@ -1,694 +0,0 @@ -/** - * - */ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service.impl; - -import java.io.IOException; - -import org.apache.http.HttpStatus; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker; -import com.att.dmf.mr.beans.TopicBean; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.exception.DMaaPAccessDeniedException; -import com.att.dmf.mr.exception.DMaaPErrorMessages; -import com.att.dmf.mr.exception.DMaaPResponseCode; -import com.att.dmf.mr.exception.ErrorResponse; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.dmf.mr.metabroker.Broker1; - -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.security.DMaaPAAFAuthenticator; -import com.att.dmf.mr.security.DMaaPAAFAuthenticatorImpl; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.service.TopicService; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.dmf.mr.utils.Utils; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.NsaAcl; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -/** - * @author muzainulhaque.qazi - * - */ -@Service -public class TopicServiceImpl implements TopicService { - - // private static final Logger LOGGER = - - private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicServiceImpl.class); - @Autowired - private DMaaPErrorMessages errorMessages; - - // @Value("${msgRtr.topicfactory.aaf}") - - - public DMaaPErrorMessages getErrorMessages() { - return errorMessages; - } - - public void setErrorMessages(DMaaPErrorMessages errorMessages) { - this.errorMessages = errorMessages; - } - - /** - * @param dmaapContext - * @throws JSONException - * @throws ConfigDbException - * @throws IOException - * - */ - @Override - public void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException { - LOGGER.info("Fetching list of all the topics."); - JSONObject json = new JSONObject(); - - JSONArray topicsList = new JSONArray(); - - for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) { - topicsList.put(topic.getName()); - } - - json.put("topics", topicsList); - - LOGGER.info("Returning list of all the topics."); - DMaaPResponseBuilder.respondOk(dmaapContext, json); - - } - - /** - * @param dmaapContext - * @throws JSONException - * @throws ConfigDbException - * @throws IOException - * - */ - public void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException { - - LOGGER.info("Fetching list of all the topics."); - JSONObject json = new JSONObject(); - - JSONArray topicsList = new JSONArray(); - - for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) { - JSONObject obj = new JSONObject(); - obj.put("topicName", topic.getName()); - - obj.put("owner", topic.getOwner()); - obj.put("txenabled", topic.isTransactionEnabled()); - topicsList.put(obj); - } - - json.put("topics", topicsList); - - LOGGER.info("Returning list of all the topics."); - DMaaPResponseBuilder.respondOk(dmaapContext, json); - - } - - /** - * @param dmaapContext - * @param topicName - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - */ - @Override - public void getTopic(DMaaPContext dmaapContext, String topicName) - throws ConfigDbException, IOException, TopicExistsException { - - LOGGER.info("Fetching details of topic " + topicName); - Topic t = getMetaBroker(dmaapContext).getTopic(topicName); - - if (null == t) { - LOGGER.error("Topic [" + topicName + "] does not exist."); - throw new TopicExistsException("Topic [" + topicName + "] does not exist."); - } - - JSONObject o = new JSONObject(); - o.put("name", t.getName()); - o.put("description", t.getDescription()); - - if (null != t.getOwners()) - o.put("owner", t.getOwners().iterator().next()); - if (null != t.getReaderAcl()) - o.put("readerAcl", aclToJson(t.getReaderAcl())); - if (null != t.getWriterAcl()) - o.put("writerAcl", aclToJson(t.getWriterAcl())); - - LOGGER.info("Returning details of topic " + topicName); - DMaaPResponseBuilder.respondOk(dmaapContext, o); - - } - - /** - * @param dmaapContext - * @param topicBean - * @throws CambriaApiException - * @throws AccessDeniedException - * @throws IOException - * @throws TopicExistsException - * @throws JSONException - * - * - * - */ - @Override - public void createTopic(DMaaPContext dmaapContext, TopicBean topicBean) - throws CambriaApiException, DMaaPAccessDeniedException, IOException, TopicExistsException { - LOGGER.info("Creating topic " + topicBean.getTopicName()); - - final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); - String key = null; - String appName = dmaapContext.getRequest().getHeader("AppName"); - String enfTopicName = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, - "enforced.topic.name.AAF"); - - if (user != null) { - key = user.getKey(); - - if (enfTopicName != null && topicBean.getTopicName().indexOf(enfTopicName) >= 0) { - - LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed."); - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - "Failed to create topic: Access Denied.User does not have permission to perform create topic"); - - LOGGER.info(errRes.toString()); - // throw new DMaaPAccessDeniedException(errRes); - - } - } - // else if (user==null && - // (null==dmaapContext.getRequest().getHeader("Authorization") && null - // == dmaapContext.getRequest().getHeader("cookie")) ) { - else if (Utils.isCadiEnabled()&&user == null && null == dmaapContext.getRequest().getHeader("Authorization") - && (null == appName && null == dmaapContext.getRequest().getHeader("cookie"))) { - LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed."); - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - "Failed to create topic: Access Denied.User does not have permission to perform create topic"); - - LOGGER.info(errRes.toString()); - // throw new DMaaPAccessDeniedException(errRes); - } - - if (user == null && (null != dmaapContext.getRequest().getHeader("Authorization") - )) { - // if (user == null && - // (null!=dmaapContext.getRequest().getHeader("Authorization") || - // null != dmaapContext.getRequest().getHeader("cookie"))) { - // ACL authentication is not provided so we will use the aaf - // authentication - LOGGER.info("Authorization the topic"); - - String permission = ""; - String nameSpace = ""; - if (topicBean.getTopicName().indexOf(".") > 1) - nameSpace = topicBean.getTopicName().substring(0, topicBean.getTopicName().lastIndexOf(".")); - - String mrFactoryVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "msgRtr.topicfactory.aaf"); - - // AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper); - - permission = mrFactoryVal + nameSpace + "|create"; - DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); - - if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) { - - LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed."); - - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - "Failed to create topic: Access Denied.User does not have permission to create topic with perm " - + permission); - - LOGGER.info(errRes.toString()); - throw new DMaaPAccessDeniedException(errRes); - - } else { - // if user is null and aaf authentication is ok then key should - // be "" - // key = ""; - /** - * Added as part of AAF user it should return username - */ - - key = dmaapContext.getRequest().getUserPrincipal().getName().toString(); - LOGGER.info("key ==================== " + key); - - } - } - - try { - final String topicName = topicBean.getTopicName(); - final String desc = topicBean.getTopicDescription(); - int partition = topicBean.getPartitionCount(); - // int replica = topicBean.getReplicationCount(); - if (partition == 0) { - partition = 1; - } - final int partitions = partition; - - int replica = topicBean.getReplicationCount(); - if (replica == 0) { - replica = 1; - } - final int replicas = replica; - boolean transactionEnabled = topicBean.isTransactionEnabled(); - - final Broker1 metabroker = getMetaBroker(dmaapContext); - final Topic t = metabroker.createTopic(topicName, desc, key, partitions, replicas, transactionEnabled); - - LOGGER.info("Topic created successfully. Sending response"); - DMaaPResponseBuilder.respondOk(dmaapContext, topicToJson(t)); - } catch (JSONException excp) { - - LOGGER.error("Failed to create topic. Couldn't parse JSON data.", excp); - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST, - DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson()); - LOGGER.info(errRes.toString()); - throw new CambriaApiException(errRes); - - } catch (ConfigDbException excp1) { - - LOGGER.error("Failed to create topic. Config DB Exception", excp1); - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST, - DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson()); - LOGGER.info(errRes.toString()); - throw new CambriaApiException(errRes); - } catch (com.att.dmf.mr.metabroker.Broker1.TopicExistsException e) { - // TODO Auto-generated catch block - LOGGER.error( e.getMessage()); - } - } - - /** - * @param dmaapContext - * @param topicName - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - * @throws CambriaApiException - * @throws AccessDeniedException - */ - @Override - public void deleteTopic(DMaaPContext dmaapContext, String topicName) throws IOException, ConfigDbException, - CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException { - - - LOGGER.info(" Deleting topic " + topicName); - /*if (true) { // { - LOGGER.error("Failed to delete topi" + topicName + ". Authentication failed."); - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), errorMessages.getCreateTopicFail() + " " - + errorMessages.getNotPermitted1() + " delete " + errorMessages.getNotPermitted2()); - LOGGER.info(errRes.toString()); - throw new DMaaPAccessDeniedException(errRes); - }*/ - - final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); - - if (user == null && null != dmaapContext.getRequest().getHeader("Authorization")) { - LOGGER.info("Authenticating the user, as ACL authentication is not provided"); - // String permission = - - String permission = ""; - String nameSpace = topicName.substring(0, topicName.lastIndexOf(".")); - String mrFactoryVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "msgRtr.topicfactory.aaf"); - - permission = mrFactoryVal + nameSpace + "|destroy"; - DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); - if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) { - LOGGER.error("Failed to delete topi" + topicName + ". Authentication failed."); - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, - DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - errorMessages.getCreateTopicFail() + " " + errorMessages.getNotPermitted1() + " delete " - + errorMessages.getNotPermitted2()); - LOGGER.info(errRes.toString()); - throw new DMaaPAccessDeniedException(errRes); - } - - } - - final Broker1 metabroker = getMetaBroker(dmaapContext); - final Topic topic = metabroker.getTopic(topicName); - - if (topic == null) { - LOGGER.error("Failed to delete topic. Topic [" + topicName + "] does not exist."); - throw new TopicExistsException("Failed to delete topic. Topic [" + topicName + "] does not exist."); - } - - // metabroker.deleteTopic(topicName); - - LOGGER.info("Topic [" + topicName + "] deleted successfully. Sending response."); - DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Topic [" + topicName + "] deleted successfully"); - } - - /** - * - * @param dmaapContext - * @return - */ - private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) { - return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker(); - } - - /** - * @param dmaapContext - * @param topicName - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - * - */ - @Override - public void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName) - throws ConfigDbException, IOException, TopicExistsException { - LOGGER.info("Retrieving list of all the publishers for topic " + topicName); - Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); - - if (topic == null) { - LOGGER.error("Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist."); - throw new TopicExistsException( - "Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist."); - } - - final NsaAcl acl = topic.getWriterAcl(); - - LOGGER.info("Returning list of all the publishers for topic " + topicName + ". Sending response."); - DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl)); - - } - - /** - * - * @param acl - * @return - */ - private static JSONObject aclToJson(NsaAcl acl) { - final JSONObject o = new JSONObject(); - if (acl == null) { - o.put("enabled", false); - o.put("users", new JSONArray()); - } else { - o.put("enabled", acl.isActive()); - - final JSONArray a = new JSONArray(); - for (String user : acl.getUsers()) { - a.put(user); - } - o.put("users", a); - } - return o; - } - - /** - * @param dmaapContext - * @param topicName - */ - @Override - public void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName) - throws IOException, ConfigDbException, TopicExistsException { - LOGGER.info("Retrieving list of all the consumers for topic " + topicName); - Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); - - if (topic == null) { - LOGGER.error("Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist."); - throw new TopicExistsException( - "Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist."); - } - - final NsaAcl acl = topic.getReaderAcl(); - - LOGGER.info("Returning list of all the consumers for topic " + topicName + ". Sending response."); - DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl)); - - } - - /** - * - * @param t - * @return - */ - private static JSONObject topicToJson(Topic t) { - final JSONObject o = new JSONObject(); - - o.put("name", t.getName()); - o.put("description", t.getDescription()); - o.put("owner", t.getOwner()); - o.put("readerAcl", aclToJson(t.getReaderAcl())); - o.put("writerAcl", aclToJson(t.getWriterAcl())); - - return o; - } - - /** - * @param dmaapContext - * @param topicName @param producerId @throws - * ConfigDbException @throws IOException @throws - * TopicExistsException @throws AccessDeniedException @throws - * - */ - @Override - public void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId) - throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, CambriaApiException { - - LOGGER.info("Granting write access to producer [" + producerId + "] for topic " + topicName); - final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); - - - // - // LOGGER.info("Authenticating the user, as ACL authentication is not - - //// String permission = - - // - - - - // { - // LOGGER.error("Failed to permit write access to producer [" + - // producerId + "] for topic " + topicName - - // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, - // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - // errorMessages.getNotPermitted1()+" - - - - // } - // } - - Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); - - if (null == topic) { - LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic. Topic [" + topicName - + "] does not exist."); - throw new TopicExistsException("Failed to permit write access to producer [" + producerId - + "] for topic. Topic [" + topicName + "] does not exist."); - } - - topic.permitWritesFromUser(producerId, user); - - LOGGER.info("Write access has been granted to producer [" + producerId + "] for topic [" + topicName - + "]. Sending response."); - DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been granted to publisher."); - - } - - /** - * @param dmaapContext - * @param topicName - * @param producerId - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - * @throws AccessDeniedException - * @throws DMaaPAccessDeniedException - * - */ - @Override - public void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId) - throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, - DMaaPAccessDeniedException { - - LOGGER.info("Revoking write access to producer [" + producerId + "] for topic " + topicName); - final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); - - // - //// String permission = - - // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); - // String permission = aaf.aafPermissionString(topicName, "manage"); - // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) - // { - // LOGGER.error("Failed to revoke write access to producer [" + - // producerId + "] for topic " + topicName - - // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, - // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - // errorMessages.getNotPermitted1()+" - - - // throw new DMaaPAccessDeniedException(errRes); - // - - // } - - Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); - - if (null == topic) { - LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic. Topic [" + topicName - + "] does not exist."); - throw new TopicExistsException("Failed to revoke write access to producer [" + producerId - + "] for topic. Topic [" + topicName + "] does not exist."); - } - - topic.denyWritesFromUser(producerId, user); - - LOGGER.info("Write access has been revoked to producer [" + producerId + "] for topic [" + topicName - + "]. Sending response."); - DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been revoked for publisher."); - - } - - /** - * @param dmaapContext - * @param topicName - * @param consumerId - * @throws DMaaPAccessDeniedException - */ - @Override - public void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId) - throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, - DMaaPAccessDeniedException { - - LOGGER.info("Granting read access to consumer [" + consumerId + "] for topic " + topicName); - final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); - - // - //// String permission = - - - // String permission = aaf.aafPermissionString(topicName, "manage"); - // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) - // { - // LOGGER.error("Failed to permit read access to consumer [" + - // consumerId + "] for topic " + topicName - - // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, - // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - // errorMessages.getNotPermitted1()+" - - - - // } - // } - - Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); - - if (null == topic) { - LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic. Topic [" + topicName - + "] does not exist."); - throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId - + "] for topic. Topic [" + topicName + "] does not exist."); - } - - topic.permitReadsByUser(consumerId, user); - - LOGGER.info("Read access has been granted to consumer [" + consumerId + "] for topic [" + topicName - + "]. Sending response."); - DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, - "Read access has been granted for consumer [" + consumerId + "] for topic [" + topicName + "]."); - } - - /** - * @param dmaapContext - * @param topicName - * @param consumerId - * @throws DMaaPAccessDeniedException - */ - @Override - public void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId) - throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, - DMaaPAccessDeniedException { - - LOGGER.info("Revoking read access to consumer [" + consumerId + "] for topic " + topicName); - final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); - - //// String permission = - - - // String permission = aaf.aafPermissionString(topicName, "manage"); - // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) - // { - // LOGGER.error("Failed to revoke read access to consumer [" + - // consumerId + "] for topic " + topicName - - // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, - // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), - // errorMessages.getNotPermitted1()+" - - - // throw new DMaaPAccessDeniedException(errRes); - // } - // - // - - Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); - - if (null == topic) { - LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic. Topic [" + topicName - + "] does not exist."); - throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId - + "] for topic. Topic [" + topicName + "] does not exist."); - } - - topic.denyReadsByUser(consumerId, user); - - LOGGER.info("Read access has been revoked to consumer [" + consumerId + "] for topic [" + topicName - + "]. Sending response."); - DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, - "Read access has been revoked for consumer [" + consumerId + "] for topic [" + topicName + "]."); - - } - -} diff --git a/src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java deleted file mode 100644 index 3065928..0000000 --- a/src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java +++ /dev/null @@ -1,100 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service.impl; - -import java.io.IOException; - -import org.springframework.stereotype.Service; - -import com.att.aft.dme2.internal.jettison.json.JSONException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.service.TransactionService; -import com.att.dmf.mr.transaction.TransactionObj; -import com.att.nsa.configs.ConfigDbException; - -/** - * Once the transaction rest gateway will be using that time it will provide all - * the transaction details like fetching all the transactional objects or get - * any particular transaction object details - * - * @author nilanjana.maity - * - */ -@Service -public class TransactionServiceImpl implements TransactionService { - - @Override - public void checkTransaction(TransactionObj trnObj) { - /* Need to implement the method */ - } - - @Override - public void getAllTransactionObjs(DMaaPContext dmaapContext) - throws ConfigDbException, IOException { - - /* - - * - * LOG.info("configReader : "+configReader.toString()); - * - * final JSONObject result = new JSONObject (); final JSONArray - * transactionIds = new JSONArray (); result.put ( "transactionIds", - * transactionIds ); - * - * DMaaPTransactionObjDB transDb = - * configReader.getfTranDb(); - * - * for (String transactionId : transDb.loadAllTransactionObjs()) { - * transactionIds.put (transactionId); } LOG.info( - * "========== TransactionServiceImpl: getAllTransactionObjs: Transaction objects are : " - * + transactionIds.toString()+"==========="); - * DMaaPResponseBuilder.respondOk(dmaapContext, result); - */ - } - - @Override - public void getTransactionObj(DMaaPContext dmaapContext, - String transactionId) throws ConfigDbException, JSONException, - IOException { - - /* - - * - * ConfigurationReader configReader = dmaapContext.getConfigReader(); - * - * DMaaPTransactionObj trnObj; - * - * trnObj = configReader.getfTranDb().loadTransactionObj(transactionId); - * - * - * if (null != trnObj) { trnObj.serialize(); JSONObject result = - * trnObj.asJsonObject(); DMaaPResponseBuilder.respondOk(dmaapContext, - * result); - * LOG.info("========== TransactionServiceImpl: getTransactionObj : "+ - * result.toString()+"==========="); return; } - * - * } LOG.info( - * "========== TransactionServiceImpl: getTransactionObj: Error : Transaction object does not exist. " - * +"==========="); - */ - } -} diff --git a/src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java deleted file mode 100644 index 73ad83b..0000000 --- a/src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java +++ /dev/null @@ -1,210 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.service.impl; - -import java.io.IOException; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - - -import org.apache.kafka.common.errors.TopicExistsException; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; -import org.springframework.stereotype.Service; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker; -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.service.UIService; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.db.NsaApiDb; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; -/** - * @author muzainulhaque.qazi - * - */ -@Service -public class UIServiceImpl implements UIService { - - - private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(UIServiceImpl.class); - /** - * Returning template of hello page - * @param dmaapContext - * @throws IOException - */ - @Override - public void hello(DMaaPContext dmaapContext) throws IOException { - LOGGER.info("Returning template of hello page."); - DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "templates/hello.html"); - } - - /** - * Fetching list of all api keys and returning in a templated form for display. - * @param dmaapContext - * @throws ConfigDbException - * @throws IOException - */ - @Override - public void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException { - // TODO - We need to work on the templates and how data will be set in - // the template - LOGGER.info("Fetching list of all api keys and returning in a templated form for display."); - Map keyMap = getApiKeyDb(dmaapContext).loadAllKeyRecords(); - - LinkedList keyList = new LinkedList<>(); - - JSONObject jsonList = new JSONObject(); - - for (Entry e : keyMap.entrySet()) { - final NsaSimpleApiKey key = e.getValue(); - final JSONObject jsonObject = new JSONObject(); - jsonObject.put("key", key.getKey()); - jsonObject.put("email", key.getContactEmail()); - jsonObject.put("description", key.getDescription()); - keyList.add(jsonObject); - } - - jsonList.put("apiKeys", keyList); - - LOGGER.info("Returning list of all the api keys in JSON format for the template."); - // "templates/apiKeyList.html" - DMaaPResponseBuilder.respondOk(dmaapContext, jsonList); - - } - - /** - * @param dmaapContext - * @param apiKey - * @throws ConfigDbException - * @throws IOException - * @throws JSONException - * @throws Exception - */ - @Override - public void getApiKey(DMaaPContext dmaapContext, String apiKey) throws CambriaApiException, ConfigDbException, JSONException, IOException { - // TODO - We need to work on the templates and how data will be set in - // the template - LOGGER.info("Fetching detials of apikey: " + apiKey); - final NsaSimpleApiKey key = getApiKeyDb(dmaapContext).loadApiKey(apiKey); - - if (null != key) { - LOGGER.info("Details of apikey [" + apiKey + "] found. Returning response"); - DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject()); - } else { - LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response"); - throw new CambriaApiException(400,"Key [" + apiKey + "] not found."); - } - - } - - /** - * Fetching list of all the topics - * @param dmaapContext - * @throws ConfigDbException - * @throws IOException - */ - @Override - public void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException { - // TODO - We need to work on the templates and how data will be set in - // the template - LOGGER.info("Fetching list of all the topics and returning in a templated form for display"); - List topicsList = getMetaBroker(dmaapContext).getAllTopics(); - - JSONObject jsonObject = new JSONObject(); - - JSONArray topicsArray = new JSONArray(); - - List topicList = getMetaBroker(dmaapContext).getAllTopics(); - - for (Topic topic : topicList) { - JSONObject obj = new JSONObject(); - obj.put("topicName", topic.getName()); - obj.put("description", topic.getDescription()); - obj.put("owner", topic.getOwner()); - topicsArray.put(obj); - } - - jsonObject.put("topics", topicsList); - - LOGGER.info("Returning the list of topics in templated format for display."); - DMaaPResponseBuilder.respondOk(dmaapContext, jsonObject); - - } - - /** - * @param dmaapContext - * @param topicName - * @throws ConfigDbException - * @throws IOException - * @throws TopicExistsException - */ - @Override - public void getTopic(DMaaPContext dmaapContext, String topicName) - throws ConfigDbException, IOException, TopicExistsException { - // TODO - We need to work on the templates and how data will be set in - // the template - LOGGER.info("Fetching detials of apikey: " + topicName); - Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); - - if (null == topic) { - LOGGER.error("Topic [" + topicName + "] does not exist."); - throw new TopicExistsException("Topic [" + topicName + "] does not exist."); - } - - JSONObject json = new JSONObject(); - json.put("topicName", topic.getName()); - json.put("description", topic.getDescription()); - json.put("owner", topic.getOwner()); - - LOGGER.info("Returning details of topic [" + topicName + "]. Sending response."); - DMaaPResponseBuilder.respondOk(dmaapContext, json); - - } - - /** - * - * @param dmaapContext - * @return - */ - private NsaApiDb getApiKeyDb(DMaaPContext dmaapContext) { - return dmaapContext.getConfigReader().getfApiKeyDb(); - - } - - /** - * - * @param dmaapContext - * @return - */ - private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) { - return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker(); - } - -} diff --git a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java deleted file mode 100644 index 8ae4c12..0000000 --- a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.transaction; -/** - * - * @author anowarul.islam - * - * @param - */ -public interface DMaaPTransactionFactory { - - /** - * - * @param data - * @return - */ - K makeNewTransactionObj ( String data ); - /** - * - * @param id - * @return - */ - K makeNewTransactionId ( String id ); - -} diff --git a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java deleted file mode 100644 index 7f5dd3a..0000000 --- a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java +++ /dev/null @@ -1,83 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.transaction; - -import org.json.JSONObject; -/** - * This is an interface for DMaaP transactional logging object class. - * @author nilanjana.maity - * - */ -public interface DMaaPTransactionObj { - /** - * This will get the transaction id - * @return id transactionId - */ - String getId(); - /** - * This will set the transaction id - * @param id transactionId - */ - void setId(String id); - /** - * This will sync the transaction object mapping - * @return String or null - */ - String serialize(); - /** - * get the total message count once the publisher published - * @return long totalMessageCount - */ - long getTotalMessageCount(); - /** - * set the total message count once the publisher published - * @param totalMessageCount - */ - void setTotalMessageCount(long totalMessageCount); - /** - * get the total Success Message Count once the publisher published - * @return getSuccessMessageCount - */ - long getSuccessMessageCount(); - /** - * set the total Success Message Count once the publisher published - * @param successMessageCount - */ - void setSuccessMessageCount(long successMessageCount); - /** - * get the failure Message Count once the publisher published - * @return failureMessageCount - */ - long getFailureMessageCount(); - /** - * set the failure Message Count once the publisher published - * @param failureMessageCount - */ - void setFailureMessageCount(long failureMessageCount); - - /** - * wrapping the data into json object - * @return JSONObject - */ - JSONObject asJsonObject(); - -} diff --git a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java deleted file mode 100644 index abebaba..0000000 --- a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java +++ /dev/null @@ -1,86 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.transaction; - -import java.util.Set; - -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.NsaSecurityManagerException; - - -/** - * Persistent storage for Transaction Object and secrets built over an abstract config db. Instances - * of this DB must support concurrent access. - * @author nilanjana.maity - * - * @param DMaaPTransactionObj - */ -public interface DMaaPTransactionObjDB { - - - /** - * Create a new Transaction Object. If one exists, - * @param id - * @return the new Transaction record - * @throws ConfigDbException - */ - K createTransactionObj (String id) throws KeyExistsException, ConfigDbException; - - - /** - * An exception to signal a Transaction object already exists - * @author nilanjana.maity - * - */ - public static class KeyExistsException extends NsaSecurityManagerException - { - /** - * If the key exists - * @param key - */ - public KeyExistsException ( String key ) { super ( "Transaction Object " + key + " exists" ); } - private static final long serialVersionUID = 1L; - } - - /** - * Save a Transaction Object record. This must be used after changing auxiliary data on the record. - * Note that the transaction must exist (via createTransactionObj). - * @param transactionObj - * @throws ConfigDbException - */ - void saveTransactionObj ( K transactionObj ) throws ConfigDbException; - - /** - * Load an Transaction Object record based on the Transaction ID value - * @param transactionId - * @return a transaction record or null - * @throws ConfigDbException - */ - K loadTransactionObj ( String transactionId ) throws ConfigDbException; - - /** - * Load all Transaction objects. - * @return - * @throws ConfigDbException - */ - Set loadAllTransactionObjs () throws ConfigDbException; -} \ No newline at end of file diff --git a/src/main/java/com/att/dmf/mr/transaction/TransactionObj.java b/src/main/java/com/att/dmf/mr/transaction/TransactionObj.java deleted file mode 100644 index 7223f0f..0000000 --- a/src/main/java/com/att/dmf/mr/transaction/TransactionObj.java +++ /dev/null @@ -1,202 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.transaction; - -import org.json.JSONObject; - -/** - * This is the class which will have the transaction enabled logging object - * details - * - * @author nilanjana.maity - * - */ -public class TransactionObj implements DMaaPTransactionObj { - - private String id; - private String createTime; - private long totalMessageCount; - private long successMessageCount; - private long failureMessageCount; - private JSONObject fData = new JSONObject(); - private TrnRequest trnRequest; - private static final String kAuxData = "transaction"; - - /** - * Initializing constructor - * put the json data for transaction enabled logging - * - * @param data - */ - public TransactionObj(JSONObject data) { - fData = data; - - // check for required fields (these throw if not present) - getId(); - getTotalMessageCount(); - getSuccessMessageCount(); - getFailureMessageCount(); - - // make sure we've got an aux data object - final JSONObject aux = fData.optJSONObject(kAuxData); - if (aux == null) { - fData.put(kAuxData, new JSONObject()); - } - } - - /** - * this constructor will have the details of transaction id, - * totalMessageCount successMessageCount, failureMessageCount to get the - * transaction object - * - * @param id - * @param totalMessageCount - * @param successMessageCount - * @param failureMessageCount - */ - public TransactionObj(String id, long totalMessageCount, long successMessageCount, long failureMessageCount) { - this.id = id; - this.totalMessageCount = totalMessageCount; - this.successMessageCount = successMessageCount; - this.failureMessageCount = failureMessageCount; - - } - - /** - * The constructor passing only transaction id - * - * @param id - */ - public TransactionObj(String id) { - this.id = id; - } - - /** - * Wrapping the data into json object - * - * @return JSONObject - */ - public JSONObject asJsonObject() { - final JSONObject full = new JSONObject(fData, JSONObject.getNames(fData)); - return full; - } - - /** - * To get the transaction id - */ - public String getId() { - return id; - } - - /** - * To set the transaction id - */ - public void setId(String id) { - this.id = id; - } - - /** - * - * @return - */ - public String getCreateTime() { - return createTime; - } - - /** - * - * @param createTime - */ - public void setCreateTime(String createTime) { - this.createTime = createTime; - } - - @Override - public String serialize() { - fData.put("transactionId", id); - fData.put("totalMessageCount", totalMessageCount); - fData.put("successMessageCount", successMessageCount); - fData.put("failureMessageCount", failureMessageCount); - return fData.toString(); - } - - public long getTotalMessageCount() { - return totalMessageCount; - } - - public void setTotalMessageCount(long totalMessageCount) { - this.totalMessageCount = totalMessageCount; - } - - public long getSuccessMessageCount() { - return successMessageCount; - } - - public void setSuccessMessageCount(long successMessageCount) { - this.successMessageCount = successMessageCount; - } - - public long getFailureMessageCount() { - return failureMessageCount; - } - - /** - * @param failureMessageCount - */ - public void setFailureMessageCount(long failureMessageCount) { - this.failureMessageCount = failureMessageCount; - } - - /** - * - * @return JSOnObject fData - */ - public JSONObject getfData() { - return fData; - } - - /** - * set the json object into data - * - * @param fData - */ - public void setfData(JSONObject fData) { - this.fData = fData; - } - - /** - * - * @return - */ - public TrnRequest getTrnRequest() { - return trnRequest; - } - - /** - * - * @param trnRequest - */ - public void setTrnRequest(TrnRequest trnRequest) { - this.trnRequest = trnRequest; - } - -} diff --git a/src/main/java/com/att/dmf/mr/transaction/TrnRequest.java b/src/main/java/com/att/dmf/mr/transaction/TrnRequest.java deleted file mode 100644 index f7f18a2..0000000 --- a/src/main/java/com/att/dmf/mr/transaction/TrnRequest.java +++ /dev/null @@ -1,183 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.transaction; - -/** - * Created for transaction enable logging details, this is nothing but a bean - * class. - * - * @author nilanjana.maity - * - */ -public class TrnRequest { - - private String id; - private String requestCreate; - private String requestHost; - private String serverHost; - private String messageProceed; - private String totalMessage; - private String clientType; - private String url; - - /** - * - * - * - * @return id - * - */ - public String getId() { - return id; - } - - /** - * - * - * @param id - */ - public void setId(String id) { - this.id = id; - } - - /** - * - * - * @return requestCreate - */ - public String getRequestCreate() { - return requestCreate; - } - - /** - * - * @param requestCreate - */ - public void setRequestCreate(String requestCreate) { - this.requestCreate = requestCreate; - } - - /** - * - * @return - */ - public String getRequestHost() { - return requestHost; - } - - /** - * - * @param requestHost - */ - public void setRequestHost(String requestHost) { - this.requestHost = requestHost; - } - - /** - * - * - * - * @return - */ - public String getServerHost() { - return serverHost; - } - - /** - * - * @param serverHost - */ - public void setServerHost(String serverHost) { - this.serverHost = serverHost; - } - - /** - * - * - * - * @return - */ - public String getMessageProceed() { - return messageProceed; - } - - /** - * - * @param messageProceed - */ - public void setMessageProceed(String messageProceed) { - this.messageProceed = messageProceed; - } - - /** - * - * @return - */ - public String getTotalMessage() { - return totalMessage; - } - - /** - * - * @param totalMessage - * - * - */ - public void setTotalMessage(String totalMessage) { - this.totalMessage = totalMessage; - } - - /** - * - * @return - */ - public String getClientType() { - return clientType; - } - - /** - * - * @param clientType - * - */ - public void setClientType(String clientType) { - this.clientType = clientType; - } - - /** - * - * @return - */ - public String getUrl() { - return url; - } - - /** - * - * @param url - * - */ - public void setUrl(String url) { - this.url = url; - } - -} diff --git a/src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java b/src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java deleted file mode 100644 index c54f2db..0000000 --- a/src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java +++ /dev/null @@ -1,62 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.transaction.impl; - -import org.json.JSONObject; - -import com.att.dmf.mr.transaction.DMaaPTransactionFactory; -import com.att.dmf.mr.transaction.DMaaPTransactionObj; -import com.att.dmf.mr.transaction.TransactionObj; - -/** - * A factory for the simple Transaction implementation - * - * - * @author nilanjana.maity - * - */ -public class DMaaPSimpleTransactionFactory implements DMaaPTransactionFactory { - /** - * - * @param data - * @return DMaaPTransactionObj - */ - @Override - public DMaaPTransactionObj makeNewTransactionObj(String data) { - JSONObject jsonObject = new JSONObject(data); - return new TransactionObj(jsonObject.getString("transactionId"), jsonObject.getLong("totalMessageCount"), - jsonObject.getLong("successMessageCount"), jsonObject.getLong("failureMessageCount")); - } - - /** - * - * @param id - * @return TransactionObj - * - * - */ - @Override - public DMaaPTransactionObj makeNewTransactionId(String id) { - return new TransactionObj(id); - } - -} diff --git a/src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java b/src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java deleted file mode 100644 index aebca34..0000000 --- a/src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java +++ /dev/null @@ -1,492 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.utils; - -import javax.servlet.ServletException; - -import org.I0Itec.zkclient.ZkClient; -import org.apache.curator.framework.CuratorFramework; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.stereotype.Component; - -import com.att.dmf.mr.backends.ConsumerFactory; -import com.att.dmf.mr.backends.MetricsSet; -import com.att.dmf.mr.backends.Publisher; -import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException; -import com.att.dmf.mr.backends.memory.MemoryConsumerFactory; -import com.att.dmf.mr.backends.memory.MemoryMetaBroker; -import com.att.dmf.mr.backends.memory.MemoryQueue; -import com.att.dmf.mr.backends.memory.MemoryQueuePublisher; -import com.att.dmf.mr.beans.DMaaPCambriaLimiter; -import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker; -import com.att.dmf.mr.beans.DMaaPZkConfigDb; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.metabroker.Broker; - -import com.att.dmf.mr.metabroker.Broker1; -import com.att.dmf.mr.security.DMaaPAuthenticator; -import com.att.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.configs.confimpl.MemConfigDb; -import com.att.nsa.drumlin.till.nv.rrNvReadable; -import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; -import com.att.nsa.limits.Blacklist; -import com.att.nsa.security.NsaAuthenticatorService; - -import com.att.nsa.security.db.BaseNsaApiDbImpl; -import com.att.nsa.security.db.NsaApiDb; -import com.att.nsa.security.db.NsaApiDb.KeyExistsException; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; -import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory; - -/** - * Class is created for all the configuration for rest and service layer - * integration. - * - */ -@Component -public class ConfigurationReader { - - - private Broker1 fMetaBroker; - private ConsumerFactory fConsumerFactory; - private Publisher fPublisher; - private MetricsSet fMetrics; - @Autowired - private DMaaPCambriaLimiter fRateLimiter; - private NsaApiDb fApiKeyDb; - - private DMaaPAuthenticator fSecurityManager; - private NsaAuthenticatorService nsaSecurityManager; - private static CuratorFramework curator; - private ZkClient zk; - private DMaaPZkConfigDb fConfigDb; - private MemoryQueue q; - private MemoryMetaBroker mmb; - private Blacklist fIpBlackList; - private Emailer fEmailer; - - private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class); - - - /** - * constructor to initialize all the values - * - * @param settings - * @param fMetrics - * @param zk - * @param fConfigDb - * @param fPublisher - * @param curator - * @param fConsumerFactory - * @param fMetaBroker - * @param q - * @param mmb - * @param fApiKeyDb - * @param fSecurityManager - * @throws missingReqdSetting - * @throws invalidSettingValue - * @throws ServletException - * @throws KafkaConsumerCacheException - * @throws ConfigDbException - * @throws KeyExistsException - */ - @Autowired - public ConfigurationReader(@Qualifier("propertyReader") rrNvReadable settings, - @Qualifier("dMaaPMetricsSet") MetricsSet fMetrics, @Qualifier("dMaaPZkClient") ZkClient zk, - @Qualifier("dMaaPZkConfigDb") DMaaPZkConfigDb fConfigDb, @Qualifier("kafkaPublisher") Publisher fPublisher, - @Qualifier("curator") CuratorFramework curator, - @Qualifier("dMaaPKafkaConsumerFactory") ConsumerFactory fConsumerFactory, - @Qualifier("dMaaPKafkaMetaBroker") Broker1 fMetaBroker, - @Qualifier("q") MemoryQueue q, - @Qualifier("mmb") MemoryMetaBroker mmb, @Qualifier("dMaaPNsaApiDb") NsaApiDb fApiKeyDb, - /* - * @Qualifier("dMaaPTranDb") - * DMaaPTransactionObjDB fTranDb, - */ - @Qualifier("dMaaPAuthenticatorImpl") DMaaPAuthenticator fSecurityManager - ) - throws missingReqdSetting, invalidSettingValue, ServletException, KafkaConsumerCacheException, ConfigDbException, KeyExistsException { - - this.fMetrics = fMetrics; - this.zk = zk; - this.fConfigDb = fConfigDb; - this.fPublisher = fPublisher; - ConfigurationReader.curator = curator; - this.fConsumerFactory = fConsumerFactory; - this.fMetaBroker = fMetaBroker; - - this.q = q; - this.mmb = mmb; - this.fApiKeyDb = fApiKeyDb; - - this.fSecurityManager = fSecurityManager; - - long allowedtimeSkewMs=600000L; - String strallowedTimeSkewM= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"authentication.allowedTimeSkewMs"); - if(null!=strallowedTimeSkewM)allowedtimeSkewMs= Long.parseLong(strallowedTimeSkewM); - - - //String strrequireSecureChannel= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"aauthentication.requireSecureChannel"); - //if(strrequireSecureChannel!=null)requireSecureChannel=Boolean.parseBoolean(strrequireSecureChannel); - //this.nsaSecurityManager = new NsaAuthenticatorService(this.fApiKeyDb, settings.getLong("authentication.allowedTimeSkewMs", 600000L), settings.getBoolean("authentication.requireSecureChannel", true)); - //this.nsaSecurityManager = new NsaAuthenticatorService(this.fApiKeyDb, allowedtimeSkewMs, requireSecureChannel); - - servletSetup(); - } - - protected void servletSetup() - throws rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, ConfigDbException, KeyExistsException { - try { - - fMetrics.toJson(); - fMetrics.setupCambriaSender(); - // add the admin authenticator - - final String adminSecret = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_AdminSecret); - - if ( adminSecret != null && adminSecret.length () > 0 ) - { - - final NsaApiDb adminDb = new BaseNsaApiDbImpl ( new MemConfigDb(), new NsaSimpleApiKeyFactory() ); - adminDb.createApiKey ( "admin", adminSecret ); - - fSecurityManager.addAuthenticator ( new DMaaPOriginalUebAuthenticator ( adminDb, 10*60*1000 ) ); - - } - - // setup a backend - - String type = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kBrokerType); - if (type==null) type = CambriaConstants.kBrokerType_Kafka; - if (CambriaConstants.kBrokerType_Kafka.equalsIgnoreCase(type)) { - log.info("Broker Type is:" + CambriaConstants.kBrokerType_Kafka); - } else if (CambriaConstants.kBrokerType_Memory.equalsIgnoreCase(type)) { - log.info("Broker Type is:" + CambriaConstants.kBrokerType_Memory); - fPublisher = new MemoryQueuePublisher(q, mmb); - //Ramkumar remove below - // fMetaBroker = mmb; - fConsumerFactory = new MemoryConsumerFactory(q); - } else { - throw new IllegalArgumentException( - "Unrecognized type for " + CambriaConstants.kBrokerType + ": " + type + "."); - } - fIpBlackList = new Blacklist ( getfConfigDb(), getfConfigDb().parse ( "/ipBlacklist" ) ); - this.fEmailer = new Emailer(); - log.info("Broker Type is:" + type); - - } catch (SecurityException e) { - throw new ServletException(e); - } - } - - /** - * method returns metaBroker - * - * @return - */ - public Broker1 getfMetaBroker() { - return fMetaBroker; - } - - /** - * method to set the metaBroker - * - * @param fMetaBroker - */ - public void setfMetaBroker(Broker1 fMetaBroker) { - this.fMetaBroker = fMetaBroker; - } - - /** - * method to get ConsumerFactory Object - * - * @return - */ - public ConsumerFactory getfConsumerFactory() { - return fConsumerFactory; - } - - /** - * method to set the consumerfactory object - * - * @param fConsumerFactory - */ - public void setfConsumerFactory(ConsumerFactory fConsumerFactory) { - this.fConsumerFactory = fConsumerFactory; - } - - /** - * method to get Publisher object - * - * @return - */ - public Publisher getfPublisher() { - return fPublisher; - } - - /** - * method to set Publisher object - * - * @param fPublisher - */ - public void setfPublisher(Publisher fPublisher) { - this.fPublisher = fPublisher; - } - - /** - * method to get MetricsSet Object - * - * @return - */ - public MetricsSet getfMetrics() { - return fMetrics; - } - - /** - * method to set MetricsSet Object - * - * @param fMetrics - */ - public void setfMetrics(MetricsSet fMetrics) { - this.fMetrics = fMetrics; - } - - /** - * method to get DMaaPCambriaLimiter object - * - * @return - */ - public DMaaPCambriaLimiter getfRateLimiter() { - return fRateLimiter; - } - - /** - * method to set DMaaPCambriaLimiter object - * - * @param fRateLimiter - */ - public void setfRateLimiter(DMaaPCambriaLimiter fRateLimiter) { - this.fRateLimiter = fRateLimiter; - } - - /** - * Method to get DMaaPAuthenticator object - * - * @return - */ - public DMaaPAuthenticator getfSecurityManager() { - return fSecurityManager; - } - - /** - * method to set DMaaPAuthenticator object - * - * @param fSecurityManager - */ - public void setfSecurityManager(DMaaPAuthenticator fSecurityManager) { - this.fSecurityManager = fSecurityManager; - } - - /** - * method to get rrNvReadable object - * - * @return - */ - /*public rrNvReadable getSettings() { - return settings; - }*/ - - /** - * method to set rrNvReadable object - * - * @param settings - */ - /*public void setSettings(rrNvReadable settings) { - this.settings = settings; - }*/ - - /** - * method to get CuratorFramework object - * - * @return - */ - public static CuratorFramework getCurator() { - return curator; - } - - /** - * method to set CuratorFramework object - * - * @param curator - */ - public static void setCurator(CuratorFramework curator) { - ConfigurationReader.curator = curator; - } - - /** - * method to get ZkClient object - * - * @return - */ - public ZkClient getZk() { - return zk; - } - - /** - * method to set ZkClient object - * - * @param zk - */ - public void setZk(ZkClient zk) { - this.zk = zk; - } - - /** - * method to get DMaaPZkConfigDb object - * - * @return - */ - public DMaaPZkConfigDb getfConfigDb() { - return fConfigDb; - } - - /** - * method to set DMaaPZkConfigDb object - * - * @param fConfigDb - */ - public void setfConfigDb(DMaaPZkConfigDb fConfigDb) { - this.fConfigDb = fConfigDb; - } - - /** - * method to get MemoryQueue object - * - * @return - */ - public MemoryQueue getQ() { - return q; - } - - /** - * method to set MemoryQueue object - * - * @param q - */ - public void setQ(MemoryQueue q) { - this.q = q; - } - - /** - * method to get MemoryMetaBroker object - * - * @return - */ - public MemoryMetaBroker getMmb() { - return mmb; - } - - /** - * method to set MemoryMetaBroker object - * - * @param mmb - */ - public void setMmb(MemoryMetaBroker mmb) { - this.mmb = mmb; - } - - /** - * method to get NsaApiDb object - * - * @return - */ - public NsaApiDb getfApiKeyDb() { - return fApiKeyDb; - } - - /** - * method to set NsaApiDb object - * - * @param fApiKeyDb - */ - public void setfApiKeyDb(NsaApiDb fApiKeyDb) { - this.fApiKeyDb = fApiKeyDb; - } - - /* - * public DMaaPTransactionObjDB getfTranDb() { return - * fTranDb; } - * - * public void setfTranDb(DMaaPTransactionObjDB - * fTranDb) { this.fTranDb = fTranDb; } - */ - /** - * method to get the zookeeper connection String - * - * @param settings - * @return - */ - public static String getMainZookeeperConnectionString() { - //return settings.getString(CambriaConstants.kSetting_ZkConfigDbServers, CambriaConstants.kDefault_ZkConfigDbServers); - - String zkServername = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers); - if (zkServername==null) zkServername=CambriaConstants.kDefault_ZkConfigDbServers; - return zkServername; - } - - public static String getMainZookeeperConnectionSRoot(){ - String strVal=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot); - - if (null==strVal) - strVal=CambriaConstants.kDefault_ZkConfigDbRoot; - - return strVal; - } - - public Blacklist getfIpBlackList() { - return fIpBlackList; - } - - public void setfIpBlackList(Blacklist fIpBlackList) { - this.fIpBlackList = fIpBlackList; - } - - public NsaAuthenticatorService getNsaSecurityManager() { - return nsaSecurityManager; - } - - public void setNsaSecurityManager(NsaAuthenticatorService nsaSecurityManager) { - this.nsaSecurityManager = nsaSecurityManager; - } - - public Emailer getSystemEmailer() - { - return this.fEmailer; - } - - -} diff --git a/src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java b/src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java deleted file mode 100644 index 5a9968d..0000000 --- a/src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java +++ /dev/null @@ -1,69 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.utils; - -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.CuratorFrameworkFactory; -import org.apache.curator.retry.ExponentialBackoffRetry; - -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.nsa.drumlin.till.nv.rrNvReadable; - -/** - * - * - * @author anowarul.islam - * - * - */ -public class DMaaPCuratorFactory { - /** - * - * method provide CuratorFramework object - * - * @param settings - * @return - * - * - * - */ - public static CuratorFramework getCurator(rrNvReadable settings) { - String Setting_ZkConfigDbServers =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkConfigDbServers); - - if(null==Setting_ZkConfigDbServers) - Setting_ZkConfigDbServers =CambriaConstants.kDefault_ZkConfigDbServers; - - String strSetting_ZkSessionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs); - if (strSetting_ZkSessionTimeoutMs==null) strSetting_ZkSessionTimeoutMs = CambriaConstants.kDefault_ZkSessionTimeoutMs+""; - int Setting_ZkSessionTimeoutMs = Integer.parseInt(strSetting_ZkSessionTimeoutMs); - - String str_ZkConnectionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs); - if (str_ZkConnectionTimeoutMs==null) str_ZkConnectionTimeoutMs = CambriaConstants.kDefault_ZkConnectionTimeoutMs+""; - int setting_ZkConnectionTimeoutMs = Integer.parseInt(str_ZkConnectionTimeoutMs); - - - CuratorFramework curator = CuratorFrameworkFactory.newClient( - Setting_ZkConfigDbServers,Setting_ZkSessionTimeoutMs,setting_ZkConnectionTimeoutMs - ,new ExponentialBackoffRetry(1000, 5)); - return curator; - } -} diff --git a/src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java b/src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java deleted file mode 100644 index 72db9de..0000000 --- a/src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java +++ /dev/null @@ -1,370 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.utils; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.PrintWriter; -import java.io.Writer; - -import javax.servlet.http.HttpServletResponse; - -import org.json.JSONException; -import org.json.JSONObject; - -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * class is used to create response object which is given to user - * - * @author nilanjana.maity - * - */ - -public class DMaaPResponseBuilder { - - - private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPResponseBuilder.class); - protected static final int kBufferLength = 4096; - - public static void setNoCacheHeadings(DMaaPContext ctx) { - HttpServletResponse response = ctx.getResponse(); - response.addHeader("Cache-Control", "no-store, no-cache, must-revalidate"); - response.addHeader("Pragma", "no-cache"); - response.addHeader("Expires", "0"); - } - - /** - * static method is used to create response object associated with - * JSONObject - * - * @param ctx - * @param result - * @throws JSONException - * @throws IOException - */ - public static void respondOk(DMaaPContext ctx, JSONObject result) throws JSONException, IOException { - - respondOkWithStream(ctx, "application/json", new ByteArrayInputStream(result.toString(4).getBytes())); - - } - - /** - * method used to set staus to 204 - * - * @param ctx - */ - public static void respondOkNoContent(DMaaPContext ctx) { - try { - ctx.getResponse().setStatus(204); - } catch (Exception excp) { - log.error(excp.getMessage(), excp); - } - } - - /** - * static method is used to create response object associated with html - * - * @param ctx - * @param html - */ - public static void respondOkWithHtml(DMaaPContext ctx, String html) { - try { - respondOkWithStream(ctx, "text/html", new ByteArrayInputStream(html.toString().getBytes())); - } catch (Exception excp) { - log.error(excp.getMessage(), excp); - } - } - - /** - * method used to create response object associated with InputStream - * - * @param ctx - * @param mediaType - * @param is - * @throws IOException - */ - public static void respondOkWithStream(DMaaPContext ctx, String mediaType, final InputStream is) - throws IOException { - /* - * creates response object associated with streamwriter - */ - respondOkWithStream(ctx, mediaType, new StreamWriter() { - - public void write(OutputStream os) throws IOException { - copyStream(is, os); - } - }); - - } - - /** - * - * @param ctx - * @param mediaType - * @param writer - * @throws IOException - */ - public static void respondOkWithStream(DMaaPContext ctx, String mediaType, StreamWriter writer) throws IOException { - ctx.getResponse().setStatus(200); - try(OutputStream os = getStreamForBinaryResponse(ctx, mediaType)) { - writer.write(os); - } - - - } - - /** - * static method to create error objects - * - * @param ctx - * @param errCode - * @param msg - */ - public static void respondWithError(DMaaPContext ctx, int errCode, String msg) { - try { - ctx.getResponse().sendError(errCode, msg); - } catch (IOException excp) { - log.error(excp.getMessage(), excp); - } - } - - /** - * method to create error objects - * - * @param ctx - * @param errCode - * @param body - */ - public static void respondWithError(DMaaPContext ctx, int errCode, JSONObject body) { - try { - sendErrorAndBody(ctx, errCode, body.toString(4), "application/json"); - } catch (Exception excp) { - log.error(excp.getMessage(), excp); - } - } - - /** - * static method creates error object in JSON - * - * @param ctx - * @param errCode - * @param msg - */ - public static void respondWithErrorInJson(DMaaPContext ctx, int errCode, String msg) { - try { - JSONObject o = new JSONObject(); - o.put("status", errCode); - o.put("message", msg); - respondWithError(ctx, errCode, o); - - } catch (Exception excp) { - log.error(excp.getMessage(), excp); - } - } - - /** - * static method used to copy the stream with the help of another method - * copystream - * - * @param in - * @param out - * @throws IOException - */ - public static void copyStream(InputStream in, OutputStream out) throws IOException { - copyStream(in, out, 4096); - } - - /** - * static method to copy the streams - * - * @param in - * @param out - * @param bufferSize - * @throws IOException - */ - public static void copyStream(InputStream in, OutputStream out, int bufferSize) throws IOException { - byte[] buffer = new byte[bufferSize]; - int len; - while ((len = in.read(buffer)) != -1) { - out.write(buffer, 0, len); - } - out.close(); - } - - /** - * interface used to define write method for outputStream - */ - public abstract static interface StreamWriter { - /** - * abstract method used to write the response - * - * @param paramOutputStream - * @throws IOException - */ - public abstract void write(OutputStream paramOutputStream) throws IOException; - } - - /** - * static method returns stream for binary response - * - * @param ctx - * @return - * @throws IOException - */ - public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx) throws IOException { - return getStreamForBinaryResponse(ctx, "application/octet-stream"); - } - - /** - * static method returns stream for binaryResponses - * - * @param ctx - * @param contentType - * @return - * @throws IOException - */ - public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx, String contentType) throws IOException { - ctx.getResponse().setContentType(contentType); - - - boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD"))); - - if (fResponseEntityAllowed) { - try(OutputStream os = ctx.getResponse().getOutputStream()){ - return os; - }catch (Exception e){ - log.error("Exception in getStreamForBinaryResponse",e); - throw new IOException(); - } - } else { - try(OutputStream os = new NullStream()){ - return os; - }catch (Exception e){ - log.error("Exception in getStreamForBinaryResponse",e); - throw new IOException(); - } - } - } - - /** - * - * @author anowarul.islam - * - */ - private static class NullStream extends OutputStream { - /** - * @param b - * integer - */ - public void write(int b) { - } - } - - private static class NullWriter extends Writer { - /** - * write method - * @param cbuf - * @param off - * @param len - */ - public void write(char[] cbuf, int off, int len) { - } - - /** - * flush method - */ - public void flush() { - } - - /** - * close method - */ - public void close() { - } - } - - /** - * sttaic method fetch stream for text - * - * @param ctx - * @param err - * @param content - * @param mimeType - */ - public static void sendErrorAndBody(DMaaPContext ctx, int err, String content, String mimeType) { - try { - setStatus(ctx, err); - getStreamForTextResponse(ctx, mimeType).println(content); - } catch (IOException e) { - log.error(new StringBuilder().append("Error sending error response: ").append(e.getMessage()).toString(), - e); - } - } - - /** - * method to set the code - * - * @param ctx - * @param code - */ - public static void setStatus(DMaaPContext ctx, int code) { - ctx.getResponse().setStatus(code); - } - - /** - * static method returns stream for text response - * - * @param ctx - * @return - * @throws IOException - */ - public static PrintWriter getStreamForTextResponse(DMaaPContext ctx) throws IOException { - return getStreamForTextResponse(ctx, "text/html"); - } - - /** - * static method returns stream for text response - * - * @param ctx - * @param contentType - * @return - * @throws IOException - */ - public static PrintWriter getStreamForTextResponse(DMaaPContext ctx, String contentType) throws IOException { - ctx.getResponse().setContentType(contentType); - - PrintWriter pw = null; - boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD"))); - - if (fResponseEntityAllowed) { - pw = ctx.getResponse().getWriter(); - } else { - pw = new PrintWriter(new NullWriter()); - } - return pw; - } -} \ No newline at end of file diff --git a/src/main/java/com/att/dmf/mr/utils/Emailer.java b/src/main/java/com/att/dmf/mr/utils/Emailer.java deleted file mode 100644 index 1b68216..0000000 --- a/src/main/java/com/att/dmf/mr/utils/Emailer.java +++ /dev/null @@ -1,211 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.utils; - -import java.io.IOException; -import java.util.Properties; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import javax.mail.BodyPart; -import javax.mail.Message; -import javax.mail.Multipart; -import javax.mail.PasswordAuthentication; -import javax.mail.Session; -import javax.mail.Transport; -import javax.mail.internet.InternetAddress; -import javax.mail.internet.MimeBodyPart; -import javax.mail.internet.MimeMessage; -import javax.mail.internet.MimeMultipart; - - - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * Send an email from a message. - * - * @author peter - */ -public class Emailer -{ - public static final String kField_To = "to"; - public static final String kField_Subject = "subject"; - public static final String kField_Message = "message"; - - public Emailer() - { - fExec = Executors.newCachedThreadPool (); - - } - - public void send ( String to, String subj, String body ) throws IOException - { - final String[] addrs = to.split ( "," ); - - if ( to.length () > 0 ) - { - final MailTask mt = new MailTask ( addrs, subj, body ); - fExec.submit ( mt ); - } - else - { - log.warn ( "At least one address is required." ); - } - } - - public void close () - { - fExec.shutdown (); - } - - private final ExecutorService fExec; - - - - - private static final EELFLogger log = EELFManager.getInstance().getLogger(Emailer.class); - - public static final String kSetting_MailAuthUser = "mailLogin"; - public static final String kSetting_MailFromEmail = "mailFromEmail"; - public static final String kSetting_MailFromName = "mailFromName"; - public static final String kSetting_SmtpServer = "mailSmtpServer"; - public static final String kSetting_SmtpServerPort = "mailSmtpServerPort"; - public static final String kSetting_SmtpServerSsl = "mailSmtpServerSsl"; - public static final String kSetting_SmtpServerUseAuth = "mailSmtpServerUseAuth"; - - private class MailTask implements Runnable - { - public MailTask ( String[] to, String subject, String msgBody ) - { - fToAddrs = to; - fSubject = subject; - fBody = msgBody; - } - - private String getSetting ( String settingKey, String defval ) - { - - String strSet = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,settingKey); - if(strSet==null)strSet=defval; - return strSet; - } - - // we need to get setting values from the evaluator but also the channel config - private void makeSetting ( Properties props, String propKey, String settingKey, String defval ) - { - props.put ( propKey, getSetting ( settingKey, defval ) ); - } - - private void makeSetting ( Properties props, String propKey, String settingKey, int defval ) - { - makeSetting ( props, propKey, settingKey, "" + defval ); - } - - private void makeSetting ( Properties props, String propKey, String settingKey, boolean defval ) - { - makeSetting ( props, propKey, settingKey, "" + defval ); - } - - @Override - public void run () - { - final StringBuffer tag = new StringBuffer (); - final StringBuffer addrList = new StringBuffer (); - tag.append ( "(" ); - for ( String to : fToAddrs ) - { - if ( addrList.length () > 0 ) - { - addrList.append ( ", " ); - } - addrList.append ( to ); - } - tag.append ( addrList.toString () ); - tag.append ( ") \"" ); - tag.append ( fSubject ); - tag.append ( "\"" ); - - log.info ( "sending mail to " + tag ); - - try - { - final Properties prop = new Properties (); - makeSetting ( prop, "mail.smtp.port", kSetting_SmtpServerPort, 587 ); - prop.put ( "mail.smtp.socketFactory.fallback", "false" ); - prop.put ( "mail.smtp.quitwait", "false" ); - makeSetting ( prop, "mail.smtp.host", kSetting_SmtpServer, "smtp.it.att.com" ); - makeSetting ( prop, "mail.smtp.auth", kSetting_SmtpServerUseAuth, true ); - makeSetting ( prop, "mail.smtp.starttls.enable", kSetting_SmtpServerSsl, true ); - - final String un = getSetting ( kSetting_MailAuthUser, "" ); - final String value=(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"mailPassword")!=null)?AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"mailPassword"):""; - final Session session = Session.getInstance ( prop, - new javax.mail.Authenticator() - { - @Override - protected PasswordAuthentication getPasswordAuthentication() - { - return new PasswordAuthentication ( un, value ); - } - } - ); - - final Message msg = new MimeMessage ( session ); - - final InternetAddress from = new InternetAddress ( - getSetting ( kSetting_MailFromEmail, "team@sa2020.it.att.com" ), - getSetting ( kSetting_MailFromName, "The GFP/SA2020 Team" ) ); - msg.setFrom ( from ); - msg.setReplyTo ( new InternetAddress[] { from } ); - msg.setSubject ( fSubject ); - - for ( String toAddr : fToAddrs ) - { - final InternetAddress to = new InternetAddress ( toAddr ); - msg.addRecipient ( Message.RecipientType.TO, to ); - } - - final Multipart multipart = new MimeMultipart ( "related" ); - final BodyPart htmlPart = new MimeBodyPart (); - htmlPart.setContent ( fBody, "text/plain" ); - multipart.addBodyPart ( htmlPart ); - msg.setContent ( multipart ); - - Transport.send ( msg ); - - log.info ( "mailing " + tag + " off without error" ); - } - catch ( Exception e ) - { - log.warn ( "Exception caught for " + tag, e ); - } - } - - private final String[] fToAddrs; - private final String fSubject; - private final String fBody; - } -} diff --git a/src/main/java/com/att/dmf/mr/utils/PropertyReader.java b/src/main/java/com/att/dmf/mr/utils/PropertyReader.java deleted file mode 100644 index 000869e..0000000 --- a/src/main/java/com/att/dmf/mr/utils/PropertyReader.java +++ /dev/null @@ -1,125 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.utils; - -import java.util.Map; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.drumlin.till.nv.impl.nvReadableStack; - -/** - * - * @author nilesh.labde - * - * - */ -public class PropertyReader extends nvReadableStack { - /** - * - * initializing logger - * - */ - - private static final EELFLogger log = EELFManager.getInstance().getLogger(PropertyReader.class); - - - /** - * constructor initialization - * - * @throws loadException - * - */ - public PropertyReader() throws loadException { - - - - - - } - - /** - * - * - * @param argMap - * @param key - * @param defaultValue - * @return - * - */ - @SuppressWarnings("unused") - private static String getSetting(Map argMap, final String key, final String defaultValue) { - String val = (String) argMap.get(key); - if (null == val) { - return defaultValue; - } - return val; - } - - /** - * - * @param resourceName - * @param clazz - * @return - * @exception MalformedURLException - * - */ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -} diff --git a/src/main/java/com/att/dmf/mr/utils/Utils.java b/src/main/java/com/att/dmf/mr/utils/Utils.java deleted file mode 100644 index 5f84d85..0000000 --- a/src/main/java/com/att/dmf/mr/utils/Utils.java +++ /dev/null @@ -1,175 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.dmf.mr.utils; - -import java.io.IOException; -import java.io.InputStream; -import java.text.DecimalFormat; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Enumeration; -import java.util.LinkedList; -import java.util.List; -import java.util.Properties; - -import javax.servlet.http.HttpServletRequest; - -import com.att.dmf.mr.backends.kafka.KafkaPublisher; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -/** - * This is an utility class for various operations for formatting - * @author nilanjana.maity - * - */ -public class Utils { - - private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS"; - public static final String CAMBRIA_AUTH_HEADER = "X-CambriaAuth"; - private static final String BATCH_ID_FORMAT = "000000"; - private static final EELFLogger log = EELFManager.getInstance().getLogger(Utils.class); - - private Utils() { - super(); - } - - /** - * Formatting the date - * @param date - * @return date or null - */ - public static String getFormattedDate(Date date) { - SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT); - if (null != date){ - return sdf.format(date); - } - return null; - } - /** - * to get the details of User Api Key - * @param request - * @return authkey or null - */ - public static String getUserApiKey(HttpServletRequest request) { - final String auth = request.getHeader(CAMBRIA_AUTH_HEADER); - if (null != auth) { - final String[] splittedAuthKey = auth.split(":"); - return splittedAuthKey[0]; - }else if (null!=request.getHeader("Authorization")){ - /** - * AAF implementation enhancement - */ - String user= request.getUserPrincipal().getName().toString(); - return user.substring(0, user.lastIndexOf("@")); - } - return null; - } - /** - * to format the batch sequence id - * @param batchId - * @return batchId - */ - public static String getFromattedBatchSequenceId(Long batchId) { - DecimalFormat format = new DecimalFormat(BATCH_ID_FORMAT); - return format.format(batchId); - } - - /** - * to get the message length in bytes - * @param message - * @return bytes or 0 - */ - public static long messageLengthInBytes(String message) { - if (null != message) { - return message.getBytes().length; - } - return 0; - } - /** - * To get transaction id details - * @param transactionId - * @return transactionId or null - */ - public static String getResponseTransactionId(String transactionId) { - if (null != transactionId && !transactionId.isEmpty()) { - return transactionId.substring(0, transactionId.lastIndexOf("::")); - } - return null; - } - - /** - * get the thread sleep time - * @param ratePerMinute - * @return ratePerMinute or 0 - */ - public static long getSleepMsForRate ( double ratePerMinute ) - { - if ( ratePerMinute <= 0.0 ) return 0; - return Math.max ( 1000, Math.round ( 60 * 1000 / ratePerMinute ) ); - } - - public static String getRemoteAddress(DMaaPContext ctx) - { - String reqAddr = ctx.getRequest().getRemoteAddr(); - String fwdHeader = getFirstHeader("X-Forwarded-For",ctx); - return ((fwdHeader != null) ? fwdHeader : reqAddr); - } - public static String getFirstHeader(String h,DMaaPContext ctx) - { - List l = getHeader(h,ctx); - return ((l.size() > 0) ? (String)l.iterator().next() : null); - } - public static List getHeader(String h,DMaaPContext ctx) - { - LinkedList list = new LinkedList(); - Enumeration e = ctx.getRequest().getHeaders(h); - while (e.hasMoreElements()) - { - list.add(e.nextElement().toString()); - } - return list; - } - - public static String getKafkaproperty(){ - InputStream input = new Utils().getClass().getResourceAsStream("/kafka.properties"); - Properties props = new Properties(); - try { - props.load(input); - } catch (IOException e) { - log.error("failed to read kafka.properties"); - } - return props.getProperty("key"); - - - } - - public static boolean isCadiEnabled(){ - boolean enableCadi=false; - if(System.getenv("enableCadi")!=null){ - enableCadi=Boolean.getBoolean(System.getenv("enableCadi")); - } - - return enableCadi; - } - -} diff --git a/src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java b/src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java deleted file mode 100644 index 0e2804e..0000000 --- a/src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java +++ /dev/null @@ -1,197 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.apiServer.metrics.cambria; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.TimeUnit; - -import org.json.JSONException; -import org.json.JSONObject; -//import org.slf4j.Logger; -//import org.slf4j.LoggerFactory; - -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.metrics.publisher.CambriaPublisher; -import com.att.dmf.mr.metrics.publisher.DMaaPCambriaClientFactory; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.att.nsa.apiServer.metrics.cambria.MetricsSender; -import com.att.nsa.metrics.CdmMetricsRegistry; -import com.att.nsa.metrics.impl.CdmConstant; - -/** - * MetricsSender will send the given metrics registry content as an event on the - * Cambria event broker to the given topic. - * - * @author peter - * - */ -public class DMaaPMetricsSender implements Runnable { - public static final String kSetting_CambriaEnabled = "metrics.send.cambria.enabled"; - public static final String kSetting_CambriaBaseUrl = "metrics.send.cambria.baseUrl"; - public static final String kSetting_CambriaTopic = "metrics.send.cambria.topic"; - public static final String kSetting_CambriaSendFreqSecs = "metrics.send.cambria.sendEverySeconds"; - - /** - * Schedule a periodic send of the given metrics registry using the given - * settings container for the Cambria location, topic, and send frequency. - *
- *
- * If the enabled flag is false, this method returns null. - * - * @param scheduler - * @param metrics - * @param settings - * @param defaultTopic - * @return a handle to the scheduled task - */ - public static ScheduledFuture sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics, - String defaultTopic) { - log.info("Inside : DMaaPMetricsSender : sendPeriodically"); - String cambriaSetting= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled); - boolean setEnable=true; - if (cambriaSetting!=null && cambriaSetting.equals("false") ) - setEnable= false; - - if (setEnable) { - String Setting_CambriaBaseUrl=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled); - - Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?"localhost":Setting_CambriaBaseUrl; - - String Setting_CambriaTopic=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaTopic); - if(Setting_CambriaTopic==null) Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap"; - - - - String Setting_CambriaSendFreqSecs=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaSendFreqSecs); - - int _CambriaSendFreqSecs =30; - if(Setting_CambriaSendFreqSecs!=null){ - _CambriaSendFreqSecs = Integer.parseInt(Setting_CambriaSendFreqSecs); - } - - - return DMaaPMetricsSender.sendPeriodically(scheduler, metrics, - Setting_CambriaBaseUrl,Setting_CambriaTopic,_CambriaSendFreqSecs - ); - /*return DMaaPMetricsSender.sendPeriodically(scheduler, metrics, - settings.getString(kSetting_CambriaBaseUrl, "localhost"), - settings.getString(kSetting_CambriaTopic, defaultTopic), - settings.getInt(kSetting_CambriaSendFreqSecs, 30));*/ - } else { - return null; - } - } - - /** - * Schedule a periodic send of the metrics registry to the given Cambria - * broker and topic. - * - * @param scheduler - * @param metrics - * the registry to send - * @param cambriaBaseUrl - * the base URL for Cambria - * @param topic - * the topic to publish on - * @param everySeconds - * how frequently to publish - * @return a handle to the scheduled task - */ - public static ScheduledFuture sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics, - String cambriaBaseUrl, String topic, int everySeconds) { - return scheduler.scheduleAtFixedRate(new DMaaPMetricsSender(metrics, cambriaBaseUrl, topic), everySeconds, - everySeconds, TimeUnit.SECONDS); - } - - /** - * Create a metrics sender. - * - * @param metrics - * @param cambriaBaseUrl - * @param topic - */ - public DMaaPMetricsSender(CdmMetricsRegistry metrics, String cambriaBaseUrl, String topic) { - try { - fMetrics = metrics; - fHostname = InetAddress.getLocalHost().getHostName(); - - // setup a "simple" publisher that will send metrics immediately - fCambria = DMaaPCambriaClientFactory.createSimplePublisher(cambriaBaseUrl, topic); - } catch (UnknownHostException e) { - log.warn("Unable to get localhost address in MetricsSender constructor.", e); - throw new RuntimeException(e); - } - } - - /** - * Send on demand. - */ - public void send() { - try { - final JSONObject o = fMetrics.toJson(); - o.put("hostname", fHostname); - o.put("now", System.currentTimeMillis()); - o.put("metricsSendTime", addTimeStamp()); - o.put("transactionEnabled", false); - fCambria.send(fHostname, o.toString()); - } catch (JSONException e) { - log.warn("Error posting metrics to Cambria: " + e.getMessage()); - } catch (IOException e) { - log.warn("Error posting metrics to Cambria: " + e.getMessage()); - } - } - - /** - * Run() calls send(). It's meant for use in a background-scheduled task. - */ - @Override - public void run() { - send(); - } - - private final CdmMetricsRegistry fMetrics; - private final CambriaPublisher fCambria; - private final String fHostname; - - - - private static final EELFLogger log = EELFManager.getInstance().getLogger(MetricsSender.class); - /** - * method creates and returnd CdmConstant object using current timestamp - * - * @return - */ - public CdmConstant addTimeStamp() { - // Add the timestamp with every metrics send - final long metricsSendTime = System.currentTimeMillis(); - final Date d = new Date(metricsSendTime); - final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d); - return new CdmConstant(metricsSendTime / 1000, "Metrics Send Time (epoch); " + text); - } -} diff --git a/src/main/java/com/att/mr/filter/ContentLengthFilter.java b/src/main/java/com/att/mr/filter/ContentLengthFilter.java deleted file mode 100644 index 26f58e0..0000000 --- a/src/main/java/com/att/mr/filter/ContentLengthFilter.java +++ /dev/null @@ -1,134 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.filter; - -import java.io.IOException; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletRequest; - -import org.apache.http.HttpStatus; -import org.json.JSONObject; -import org.springframework.context.ApplicationContext; -import org.springframework.web.context.support.WebApplicationContextUtils; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.exception.DMaaPErrorMessages; -import com.att.dmf.mr.exception.DMaaPResponseCode; -import com.att.dmf.mr.exception.ErrorResponse; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -/** - * Servlet Filter implementation class ContentLengthFilter - */ -public class ContentLengthFilter implements Filter { - - private DefaultLength defaultLength; - - private FilterConfig filterConfig = null; - DMaaPErrorMessages errorMessages = null; - - private static final EELFLogger log = EELFManager.getInstance().getLogger(ContentLengthFilter.class); - /** - * Default constructor. - */ - - public ContentLengthFilter() { - // TODO Auto-generated constructor stub - } - - /** - * @see Filter#destroy() - */ - public void destroy() { - // TODO Auto-generated method stub - } - - /** - * @see Filter#doFilter(ServletRequest, ServletResponse, FilterChain) - */ - public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, - ServletException { - // TODO Auto-generated method stub - // place your code here - log.info("inside servlet do filter content length checking before pub/sub"); - HttpServletRequest request = (HttpServletRequest) req; - JSONObject jsonObj = null; - int requestLength = 0; - try { - // retrieving content length from message header - - if (null != request.getHeader("Content-Length")) { - requestLength = Integer.parseInt(request.getHeader("Content-Length")); - } - // retrieving encoding from message header - String transferEncoding = request.getHeader("Transfer-Encoding"); - // checking for no encoding, chunked and requestLength greater then - // default length - if (null != transferEncoding && !(transferEncoding.contains("chunked")) - && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) { - jsonObj = new JSONObject().append("defaultlength", defaultLength) - .append("requestlength", requestLength); - log.error("message length is greater than default"); - throw new CambriaApiException(jsonObj); - } else if (null == transferEncoding && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) { - jsonObj = new JSONObject().append("defaultlength", defaultLength.getDefaultLength()).append( - "requestlength", requestLength); - log.error("Request message is not chunked or request length is greater than default length"); - throw new CambriaApiException(jsonObj); - } else { - chain.doFilter(req, res); - } - } catch (CambriaApiException | NumberFormatException e) { - log.error("message size is greater then default"); - ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED, - DMaaPResponseCode.MSG_SIZE_EXCEEDS_MSG_LIMIT.getResponseCode(), errorMessages.getMsgSizeExceeds() - + jsonObj.toString()); - log.info(errRes.toString()); - - } - - } - - /** - * @see Filter#init(FilterConfig) - */ - public void init(FilterConfig fConfig) throws ServletException { - // TODO Auto-generated method stub - this.filterConfig = fConfig; - log.info("Filter Content Length Initialize"); - ApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(fConfig - .getServletContext()); - DefaultLength defLength = (DefaultLength) ctx.getBean("defLength"); - DMaaPErrorMessages errorMessages = (DMaaPErrorMessages) ctx.getBean("DMaaPErrorMessages"); - this.errorMessages = errorMessages; - this.defaultLength = defLength; - - } - -} diff --git a/src/main/java/com/att/mr/filter/DefaultLength.java b/src/main/java/com/att/mr/filter/DefaultLength.java deleted file mode 100644 index 43169e5..0000000 --- a/src/main/java/com/att/mr/filter/DefaultLength.java +++ /dev/null @@ -1,37 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 -* - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.filter; - - -public class DefaultLength { - - String defaultLength; - - public String getDefaultLength() { - return defaultLength; - } - - public void setDefaultLength(String defaultLength) { - this.defaultLength = defaultLength; - } - -} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/CambriaApiException.java b/src/main/java/org/onap/dmaap/dmf/mr/CambriaApiException.java new file mode 100644 index 0000000..59d6115 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/CambriaApiException.java @@ -0,0 +1,80 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr; + +import org.json.JSONObject; + +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; +import com.att.nsa.apiServer.NsaAppException; + +public class CambriaApiException extends NsaAppException +{ + /* + * defined long type constant serialVersionUID + */ + private static final long serialVersionUID = 1L; + + private transient ErrorResponse errRes; + /** + * Implements constructor CambriaApiException + * @param jsonObject + * + */ + public CambriaApiException ( JSONObject jsonObject ) + { + super ( jsonObject ); + } + + /** + * Implements constructor CambriaApiException + * @param status + * @param msg + */ + public CambriaApiException ( int status, String msg ) + { + super ( status, msg ); + } + + /** + * Implements constructor CambriaApiException + * @param status + * @param jsonObject + */ + public CambriaApiException ( int status, JSONObject jsonObject ) + { + super ( status, jsonObject ); + } + + public CambriaApiException (ErrorResponse errRes) + { + super(errRes.getHttpStatusCode(),errRes.getErrorMessage()); + this.errRes = errRes; + } + + public ErrorResponse getErrRes() { + return errRes; + } + + public void setErrRes(ErrorResponse errRes) { + this.errRes = errRes; + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/CambriaApiVersionInfo.java b/src/main/java/org/onap/dmaap/dmf/mr/CambriaApiVersionInfo.java new file mode 100644 index 0000000..6858fe4 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/CambriaApiVersionInfo.java @@ -0,0 +1,88 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +/** + * CambriaApiVersionInfo will provide the version of cambria code + * + * @author peter + * + */ +public class CambriaApiVersionInfo { + + /** + * 3 constants are defined:- + * PROPS,VERSION and LOG + */ + + private static final Properties PROPS = new Properties(); + private static final String VERSION; + + + private static final EELFLogger LOG = EELFManager.getInstance().getLogger(CambriaApiVersionInfo.class); + + /** + * private constructor created with no argument + * to avoid default constructor + */ + private CambriaApiVersionInfo() + { + + } + + /** + * returns version of String type + */ + public static String getVersion() { + return VERSION; + } + + /** + * + * defines static initialization method + * It initializes VERSION Constant + * it handles exception in try catch block + * and throws IOException + * + */ + + static { + String use = null; + try { + final InputStream is = CambriaApiVersionInfo.class + .getResourceAsStream("/cambriaApiVersion.properties"); + if (is != null) { + PROPS.load(is); + use = PROPS.getProperty("cambriaApiVersion", null); + } + } catch (IOException e) { + LOG.error("Failed due to IO EXception:"+e); + } + VERSION = use; + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/Consumer.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/Consumer.java new file mode 100644 index 0000000..cba3696 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/Consumer.java @@ -0,0 +1,105 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends; + + +/** + * A consumer interface. Consumers pull the next message from a given topic. + * @author peter + */ +public interface Consumer +{ + /** + * A message interface provide the offset and message + * @author nilanjana.maity + * + */ + public interface Message + { + /** + * returning the offset of that particular message + * @return long + */ + long getOffset (); + /** + * returning the message + * @return message + */ + String getMessage (); + } + + /** + * Get this consumer's name + * @return name + */ + String getName (); + + /** + * Get creation time in ms + * @return + */ + long getCreateTimeMs (); + + /** + * Get last access time in ms + * @return + */ + long getLastAccessMs (); + + /** + * Get the next message from this source. This method must not block. + * @return the next message, or null if none are waiting + */ + Message nextMessage (); + + /** + * Get the next message from this source. This method must not block. + * @param atOffset start with the next message at or after atOffset. -1 means next from last request + * @return the next message, or null if none are waiting + */ + + + + /** + * Close/clean up this consumer + * @return + */ + boolean close(); + + /** + * Commit the offset of the last consumed message + * + */ + void commitOffsets(); + + /** + * Get the offset this consumer is currently at + * @return offset + */ + long getOffset(); + + void setOffset(long offset); + + + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/ConsumerFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/ConsumerFactory.java new file mode 100644 index 0000000..a857fc3 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/ConsumerFactory.java @@ -0,0 +1,118 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends; + +import java.util.Collection; +import java.util.HashMap; + +import org.onap.dmaap.dmf.mr.CambriaApiException; + +/** + * This is the factory class to instantiate the consumer + * + * @author nilanjana.maity + * + */ + +public interface ConsumerFactory { + public static final String kSetting_EnableCache = "cambria.consumer.cache.enabled"; + public static boolean kDefault_IsCacheEnabled = true; + + /** + * User defined exception for Unavailable Exception + * + * @author nilanjana.maity + * + */ + public class UnavailableException extends Exception { + /** + * Unavailable Exception with message + * + * @param msg + */ + public UnavailableException(String msg) { + super(msg); + } + + /** + * Unavailable Exception with the throwable object + * + * @param t + */ + public UnavailableException(Throwable t) { + super(t); + } + + /** + * Unavailable Exception with the message and cause + * + * @param msg + * @param cause + */ + public UnavailableException(String msg, Throwable cause) { + super(msg, cause); + } + + private static final long serialVersionUID = 1L; + } + + /** + * For admin use, drop all cached consumers. + */ + public void dropCache(); + + /** + * Get or create a consumer for the given set of info (topic, group, id) + * + * @param topic + * @param consumerGroupId + * @param clientId + * @param timeoutMs + * @return + * @throws UnavailableException + */ + + + /** + * For factories that employ a caching mechanism, this allows callers to + * explicitly destory a consumer that resides in the factory's cache. + * + * @param topic + * @param consumerGroupId + * @param clientId + */ + public void destroyConsumer(String topic, String consumerGroupId, + String clientId); + + /** + * For admin/debug, we provide access to the consumers + * + * @return a collection of consumers + */ + public Collection getConsumers(); + + public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException; + public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException; + + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/MetricsSet.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/MetricsSet.java new file mode 100644 index 0000000..bc0f4c6 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/MetricsSet.java @@ -0,0 +1,71 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends; + +import com.att.nsa.metrics.CdmMetricsRegistry; +/** + * This interface will help to generate metrics + * @author nilanjana.maity + * + */ +public interface MetricsSet extends CdmMetricsRegistry{ + + /** + * This method will setup cambria sender code + */ + public void setupCambriaSender (); + /** + * This method will define on route complete + * @param name + * @param durationMs + */ + public void onRouteComplete ( String name, long durationMs ); + /** + * This method will help the kafka publisher while publishing the messages + * @param amount + */ + public void publishTick ( int amount ); + /** + * This method will help the kafka consumer while consuming the messages + * @param amount + */ + public void consumeTick ( int amount ); + /** + * This method will call if the kafka consumer cache missed + */ + public void onKafkaConsumerCacheMiss (); + /** + * This method will call if the kafka consumer cache will be hit while publishing/consuming the messages + */ + public void onKafkaConsumerCacheHit (); + /** + * This method will call if the kafka consumer cache claimed + */ + public void onKafkaConsumerClaimed (); + /** + * This method will call if Kafka consumer is timed out + */ + public void onKafkaConsumerTimeout (); + + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/Publisher.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/Publisher.java new file mode 100644 index 0000000..ac7977b --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/Publisher.java @@ -0,0 +1,99 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.kafka.clients.producer.ProducerRecord; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +/** + * A publisher interface. Publishers receive messages and post them to a topic. + * @author peter + */ +public interface Publisher +{ + /** + * A message interface. The message has a key and a body. + * @author peter + */ + public interface message + { + /** + * Get the key for this message. The key is used to partition messages + * into "sub-streams" that have guaranteed order. The key can be null, + * which means the message can be processed without any concern for order. + * + * @return a key, possibly null + */ + String getKey(); + + /** + * Get the message body. + * @return a message body + */ + String getMessage(); + /** + * set the logging params for transaction enabled logging + * @param logDetails + */ + void setLogDetails (LogDetails logDetails); + /** + * Get the log details for transaction enabled logging + * @return LogDetails + */ + LogDetails getLogDetails (); + + /** + * boolean transactionEnabled + * @return true/false + */ + boolean isTransactionEnabled(); + /** + * Set the transaction enabled flag from prop file or topic based implementation + * @param transactionEnabled + */ + void setTransactionEnabled(boolean transactionEnabled); + } + + /** + * Send a single message to a topic. Equivalent to sendMessages with a list of size 1. + * @param topic + * @param msg + * @throws IOException + */ + public void sendMessage ( String topic, message msg ) throws IOException; + + /** + * Send messages to a topic. + * @param topic + * @param msgs + * @throws IOException + */ + public void sendMessages ( String topic, List msgs ) throws IOException; + + public void sendBatchMessageNew(String topic ,ArrayList> kms) throws IOException; + public void sendMessagesNew( String topic, List msgs ) throws IOException; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011Consumer.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011Consumer.java new file mode 100644 index 0000000..347f625 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011Consumer.java @@ -0,0 +1,397 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.kafka; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.FutureTask; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.RunnableFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.KafkaException; + +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; + + + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * A consumer instance that's created per-request. These are stateless so that + * clients can connect to this service as a proxy. + * + * @author Ram + * + */ +public class Kafka011Consumer implements Consumer { + private enum State { + OPENED, CLOSED + } + + + /** + * KafkaConsumer() is constructor. It has following 4 parameters:- + * + * @param topic + * @param group + * @param id + * @param cc + * + */ + + public Kafka011Consumer(String topic, String group, String id, KafkaConsumer cc, + KafkaLiveLockAvoider2 klla) throws Exception { + fTopic = topic; + fGroup = group; + fId = id; + fCreateTimeMs = System.currentTimeMillis(); + fLastTouch = fCreateTimeMs; + fPendingMsgs = new LinkedBlockingQueue>(); + fLogTag = fGroup + "(" + fId + ")/" + fTopic; + offset = 0; + state = Kafka011Consumer.State.OPENED; + kConsumer = cc; + fKafkaLiveLockAvoider = klla; + synchronized (kConsumer) { + kConsumer.subscribe(Arrays.asList(topic)); + } + } + + private Consumer.Message makeMessage(final ConsumerRecord msg) { + return new Consumer.Message() { + @Override + public long getOffset() { + offset = msg.offset(); + return offset; + } + + @Override + public String getMessage() { + return new String(msg.value()); + } + }; + } + + @Override + public synchronized Consumer.Message nextMessage() { + + try { + if (fPendingMsgs.size() > 0) { + return makeMessage(fPendingMsgs.take()); + } + } catch (InterruptedException x) { + log.warn("After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage() + ")", + x); + } + + Callable run = new Callable() { + @Override + public Boolean call() throws Exception { + try { + ConsumerRecords records; + synchronized (kConsumer) { + records = kConsumer.poll(500); + } + for (ConsumerRecord record : records) { + + fPendingMsgs.offer(record); + } + + } catch (KafkaException x) { + log.debug(fLogTag + ": KafkaException " + x.getMessage()); + + } catch (java.lang.IllegalStateException | java.lang.IllegalArgumentException x) { + log.error(fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. " + + x.getMessage()); + + } + + + return true; + } + }; + + @SuppressWarnings({ "rawtypes", "unchecked" }) + RunnableFuture future = new FutureTask(run); + ExecutorService service = Executors.newSingleThreadExecutor(); + service.execute(future); + try { + future.get(5, TimeUnit.SECONDS); // wait 1 + // second + } catch (TimeoutException ex) { + // timed out. Try to stop the code if possible. + String apiNodeId = null; + try { + apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port; + } catch (UnknownHostException e1) { + // TODO Auto-generated catch block + log.error("unable to get the localhost address"); + } + + try { + if (fKafkaLiveLockAvoider != null) + fKafkaLiveLockAvoider.unlockConsumerGroup(apiNodeId, fTopic + "::" + fGroup); + } catch (Exception e) { + log.error("unlockConsumerGroup(" + apiNodeId + "," + fTopic + "::" + fGroup); + } + + forcePollOnConsumer(); + future.cancel(true); + } catch (Exception ex) { + // timed out. Try to stop the code if possible. + future.cancel(true); + } + service.shutdown(); + + return null; + + } + + /** + * getName() method returns string type value. returns 3 parameters in + * string:- fTopic,fGroup,fId + * + * @Override + */ + public String getName() { + return fTopic + " : " + fGroup + " : " + fId; + } + + /** + * getCreateTimeMs() method returns long type value. returns fCreateTimeMs + * variable value + * + * @Override + * + */ + public long getCreateTimeMs() { + return fCreateTimeMs; + } + + public org.apache.kafka.clients.consumer.KafkaConsumer getConsumer() { + return kConsumer; + } + + /** + * getLastAccessMs() method returns long type value. returns fLastTouch + * variable value + * + * @Override + * + */ + public long getLastAccessMs() { + return fLastTouch; + } + + /** + * getOffset() method returns long type value. returns offset variable value + * + * @Override + * + */ + public long getOffset() { + return offset; + } + + /** + * commit offsets commitOffsets() method will be called on closed of + * KafkaConsumer. + * + * @Override + * + * + * public void commitOffsets() { if (getState() == + * KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called + * on closed KafkaConsumer " + getName()); return; } + * fConnector.commitOffsets(); } + */ + + /** + * updating fLastTouch with current time in ms + */ + public void touch() { + fLastTouch = System.currentTimeMillis(); + } + + /** + * getLastTouch() method returns long type value. returns fLastTouch + * variable value + * + */ + public long getLastTouch() { + return fLastTouch; + } + + /** + * setting the kafkaConsumer state to closed + */ + + public boolean close() { + if (getState() == Kafka011Consumer.State.CLOSED) { + + log.error("close() called on closed KafkaConsumer " + getName()); + return true; + } + + + boolean retVal = kafkaConnectorshuttask(); + return retVal; + + } + + /* time out if the kafka shutdown fails for some reason */ + + private boolean kafkaConnectorshuttask() { + Callable run = new Callable() { + @Override + public Boolean call() throws Exception { + + try { + + kConsumer.close(); + + } catch (Exception e) { + log.info("@Kafka Stream shutdown erorr occurred " + getName() + " " + e); + throw new Exception("@Kafka Stream shutdown erorr occurred " + getName() + " " + e); + + } + log.info("Kafka connection closure with in 15 seconds by a Executors task"); + + return true; + } + }; + + @SuppressWarnings({ "rawtypes", "unchecked" }) + RunnableFuture future = new FutureTask(run); + ExecutorService service = Executors.newSingleThreadExecutor(); + service.execute(future); + try { + future.get(200, TimeUnit.SECONDS); // wait 1 + // second + } catch (TimeoutException ex) { + // timed out. Try to stop the code if possible. + log.info("Timeout Occured - Kafka connection closure with in 300 seconds by a Executors task"); + future.cancel(true); + setState(Kafka011Consumer.State.OPENED); + } catch (Exception ex) { + // timed out. Try to stop the code if possible. + log.error("Exception occured Occured - Kafka connection closure with in 300 seconds by a Executors task" + + ex); + future.cancel(true); + setState(Kafka011Consumer.State.OPENED); + return false; + } + service.shutdown(); + setState(Kafka011Consumer.State.CLOSED); + return true; + } + + public void forcePollOnConsumer() { + Kafka011ConsumerUtil.forcePollOnConsumer(fTopic, fGroup, fId); + + } + + /** + * getConsumerGroup() returns Consumer group + * + * @return + */ + public String getConsumerGroup() { + return fGroup; + } + + /** + * getConsumerId returns Consumer Id + * + * @return + */ + public String getConsumerId() { + return fId; + } + + /** + * getState returns kafkaconsumer state + * + * @return + */ + private Kafka011Consumer.State getState() { + return this.state; + } + + /** + * setState() sets the kafkaConsumer state + * + * @param state + */ + private void setState(Kafka011Consumer.State state) { + this.state = state; + } + + + private final String fTopic; + private final String fGroup; + private final String fId; + private final String fLogTag; + + private KafkaConsumer kConsumer; + private long fCreateTimeMs; + private long fLastTouch; + private long offset; + private Kafka011Consumer.State state; + private KafkaLiveLockAvoider2 fKafkaLiveLockAvoider; + private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011Consumer.class); + private final LinkedBlockingQueue> fPendingMsgs; + + @Override + public void commitOffsets() { + if (getState() == Kafka011Consumer.State.CLOSED) { + log.warn("commitOffsets() called on closed KafkaConsumer " + getName()); + return; + } + kConsumer.commitSync(); + + + } + + @Override + public void setOffset(long offsetval) { + offset = offsetval; + } + + + public void setConsumerCache(KafkaConsumerCache cache) { + } + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java new file mode 100644 index 0000000..a93ac33 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java @@ -0,0 +1,123 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.kafka; + +import java.util.ArrayList; + +import org.apache.kafka.clients.consumer.ConsumerRecords; + + + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * A consumer Util class for force polling when a rebalance issue is anticipated + * + * @author Ram + * + */ +public class Kafka011ConsumerUtil { + private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011ConsumerUtil.class); + + /** + * @param fconsumercache + * @param fTopic + * @param fGroup + * @param fId + * @return + */ + public static boolean forcePollOnConsumer(final String fTopic, final String fGroup, final String fId) { + + Thread forcepollThread = new Thread(new Runnable() { + public void run() { + try { + + ArrayList kcsList = null; + + kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(fTopic + "::" + fGroup + "::", fId); + if (null != kcsList) { + for (int counter = 0; counter < kcsList.size(); counter++) { + + Kafka011Consumer kc1 = kcsList.get(counter); + + try { + ConsumerRecords recs = kc1.getConsumer().poll(0); + log.info("soft poll on " + kc1); + } catch (java.util.ConcurrentModificationException e) { + log.error("Error occurs for " + e); + } + + } + + } + + } catch (Exception e) { + log.error("Failed and go to Exception block for " + fGroup + " " + e.getMessage()); + } + } + }); + + forcepollThread.start(); + + return false; + + } + + /** + * @param fconsumercache + * @param group + * @return + */ + public static boolean forcePollOnConsumer(final String group) { + + Thread forcepollThread = new Thread(new Runnable() { + public void run() { + try { + ArrayList kcsList = new ArrayList(); + kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(group); + + if (null != kcsList) { + + for (int counter = 0; counter < kcsList.size(); counter++) { + + Kafka011Consumer kc1 = kcsList.get(counter); + log.info("soft poll on remote nodes " + kc1); + ConsumerRecords recs = kc1.getConsumer().poll(0); + } + + } + + } catch (java.util.ConcurrentModificationException e) { + log.error("Error occurs for " + e); + } catch (Exception e) { + log.error("Failed and go to Exception block for " + group + " " + e.getMessage()); + } + } + }); + + forcepollThread.start(); + return false; + + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumer.txt b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumer.txt new file mode 100644 index 0000000..dd6259f --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumer.txt @@ -0,0 +1,386 @@ +package com.att.dmf.mr.backends.kafka; + +import java.util.Arrays; +import java.util.Properties; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.FutureTask; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.RunnableFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.common.KafkaException; + +import com.att.dmf.mr.backends.Consumer; + +//import org.slf4j.Logger; +//import org.slf4j.LoggerFactory; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * A consumer instance that's created per-request. These are stateless so that + * clients can connect to this service as a proxy. + * + * @author peter + * + */ +public class KafkaConsumer implements Consumer { + private enum State { + OPENED, CLOSED + } + + /** + * KafkaConsumer() is constructor. It has following 4 parameters:- + * + * @param topic + * @param group + * @param id + * @param cc + * + */ + + public KafkaConsumer(String topic, String group, String id, Properties prop) throws Exception { + fTopic = topic; + fGroup = group; + fId = id; + // fConnector = cc; + + fCreateTimeMs = System.currentTimeMillis(); + fLastTouch = fCreateTimeMs; + fPendingMsgs = new LinkedBlockingQueue> (); + fLogTag = fGroup + "(" + fId + ")/" + fTopic; + offset = 0; + + state = KafkaConsumer.State.OPENED; + + // final Map topicCountMap = new HashMap(); + // topicCountMap.put(fTopic, 1); + // log.info(fLogTag +" kafka Consumer started at " + // +System.currentTimeMillis()); + // final Map>> consumerMap = + // fConnector.createMessageStreams(topicCountMap); + // final List> streams = + // consumerMap.get(fTopic); + + kConsumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(prop); + // System.out.println("I am in Consumer APP " + topic + "-- " + + // fConsumer); + kConsumer.subscribe(Arrays.asList(topic)); + log.info(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs)); + System.out.println("-----id " +id); + + + try { ConsumerRecords records = + kConsumer.poll(500); System.out.println("---" + + records.count()); + + for (ConsumerRecord record : records) { + System.out.printf("offset = %d, key = %s, value = %s", + record.offset(), record.key(), record.value()); String t = + record.value(); + + } + }catch(Exception e){ + System.out.println( e); + } + System.out.println(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs)); + kConsumer.commitSync(); + // fConsumer.close(); + + + /* + * ConsumerRecords records = fConsumer.poll(500); + * System.out.println("---" + records.count()); + * + * for (ConsumerRecord record : records) { + * System.out.printf("offset = %d, key = %s, value = %s", + * record.offset(), record.key(), record.value()); String t = + * record.value(); + * + * } + * + * + * fConsumer.commitSync(); fConsumer.close(); + */ + + // fStream = streams.iterator().next(); + } + + + + private Consumer.Message makeMessage ( final ConsumerRecord msg ) + { + return new Consumer.Message() + { + @Override + public long getOffset () + { + return msg.offset (); + } + + @Override + public String getMessage () + { + return new String ( msg.value () ); + } + }; + } + + @Override + public synchronized Consumer.Message nextMessage () + { + + try + { + if ( fPendingMsgs.size () > 0 ) + { + return makeMessage ( fPendingMsgs.take () ); + } + } + catch ( InterruptedException x ) + { + log.warn ( "After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage () + ")", x ); + } + + + try + { + boolean foundMsgs = false; + System.out.println("entering into pollingWWWWWWWWWWWWWWWWW"); + final ConsumerRecords records = kConsumer.poll ( 100 ); + System.out.println("polling doneXXXXXXXXXXXXXXXXXXXXXXXXXXX...."); + for ( ConsumerRecord record : records ) + { + foundMsgs = true; + fPendingMsgs.offer ( record ); + } + + } + catch ( KafkaException x ) + { + log.debug ( fLogTag + ": KafkaException " + x.getMessage () ); + + } + catch ( java.lang.IllegalStateException | java.lang.IllegalArgumentException x ) + { + log.error ( fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. " + x.getMessage () ); + + } + + return null; + } + + + + /** + * getName() method returns string type value. returns 3 parameters in + * string:- fTopic,fGroup,fId + * + * @Override + */ + public String getName() { + return fTopic + " : " + fGroup + " : " + fId; + } + + /** + * getCreateTimeMs() method returns long type value. returns fCreateTimeMs + * variable value + * + * @Override + * + */ + public long getCreateTimeMs() { + return fCreateTimeMs; + } + + public org.apache.kafka.clients.consumer.KafkaConsumer getConsumer() { + return kConsumer; + } + + /** + * getLastAccessMs() method returns long type value. returns fLastTouch + * variable value + * + * @Override + * + */ + public long getLastAccessMs() { + return fLastTouch; + } + + + + /** + * getOffset() method returns long type value. returns offset variable value + * + * @Override + * + */ + public long getOffset() { + return offset; + } + + /** + * commit offsets commitOffsets() method will be called on closed of + * KafkaConsumer. + * + * @Override + * + * + * public void commitOffsets() { if (getState() == + * KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called + * on closed KafkaConsumer " + getName()); return; } + * fConnector.commitOffsets(); } + */ + + /** + * updating fLastTouch with current time in ms + */ + public void touch() { + fLastTouch = System.currentTimeMillis(); + } + + /** + * getLastTouch() method returns long type value. returns fLastTouch + * variable value + * + */ + public long getLastTouch() { + return fLastTouch; + } + + /** + * setting the kafkaConsumer state to closed + */ + public synchronized boolean close() { + + if (getState() == KafkaConsumer.State.CLOSED) { + + log.warn("close() called on closed KafkaConsumer " + getName()); + return true; + } + + setState(KafkaConsumer.State.CLOSED); + // fConnector.shutdown(); + boolean retVal = kafkaConnectorshuttask(); + return retVal; + + } + + /* time out if the kafka shutdown fails for some reason */ + + private boolean kafkaConnectorshuttask() { + Callable run = new Callable() { + @Override + public Boolean call() throws Exception { + // your code to be timed + try { + System.out.println("consumer closing....." + kConsumer); + kConsumer.close(); + } catch (Exception e) { + log.info("@@@@@@Kafka Stream shutdown erorr occurred " + getName() + " " + e); + } + log.info("Kafka connection closure with in 15 seconds by a Executors task"); + return true; + } + }; + + RunnableFuture future = new FutureTask(run); + ExecutorService service = Executors.newSingleThreadExecutor(); + service.execute(future); + Boolean result = null; + try { + result = (Boolean) future.get(15, TimeUnit.SECONDS); // wait 1 + // second + } catch (TimeoutException ex) { + // timed out. Try to stop the code if possible. + log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task"); + future.cancel(true); + } catch (Exception ex) { + // timed out. Try to stop the code if possible. + log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task" + ex); + future.cancel(true); + return false; + } + service.shutdown(); + return true; + } + + /** + * getConsumerGroup() returns Consumer group + * + * @return + */ + public String getConsumerGroup() { + return fGroup; + } + + /** + * getConsumerId returns Consumer Id + * + * @return + */ + public String getConsumerId() { + return fId; + } + + /** + * getState returns kafkaconsumer state + * + * @return + */ + private KafkaConsumer.State getState() { + return this.state; + } + + /** + * setState() sets the kafkaConsumer state + * + * @param state + */ + private void setState(KafkaConsumer.State state) { + this.state = state; + } + + // private ConsumerConnector fConnector; + private final String fTopic; + private final String fGroup; + private final String fId; + private final String fLogTag; + // private final KafkaStream fStream; + private final org.apache.kafka.clients.consumer.KafkaConsumer kConsumer; + private long fCreateTimeMs; + private long fLastTouch; + private long offset; + private KafkaConsumer.State state; + private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class); + private final LinkedBlockingQueue> fPendingMsgs; + // private static final Logger log = + // LoggerFactory.getLogger(KafkaConsumer.class); + + @Override + public void commitOffsets() { + if (getState() == KafkaConsumer.State.CLOSED) { + log.warn("commitOffsets() called on closed KafkaConsumer " + getName()); + return; + } + kConsumer.commitSync(); + // fConsumer.close(); + + } + + + + @Override + public void setOffset(long offsetval) { + // TODO Auto-generated method stub + offset = offsetval; + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumerCache.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumerCache.java new file mode 100644 index 0000000..a38d77b --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaConsumerCache.java @@ -0,0 +1,742 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.kafka; + +import java.io.IOException; +import java.net.InetAddress; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Enumeration; +import java.util.LinkedList; +import java.util.List; +import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import javax.annotation.Resource; + +import org.I0Itec.zkclient.exception.ZkException; +import org.I0Itec.zkclient.exception.ZkInterruptedException; +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.imps.CuratorFrameworkState; +import org.apache.curator.framework.recipes.cache.ChildData; +import org.apache.curator.framework.recipes.cache.PathChildrenCache; +import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; +import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; +import org.apache.curator.framework.state.ConnectionState; +import org.apache.curator.framework.state.ConnectionStateListener; +import org.apache.curator.utils.EnsurePath; +import org.apache.curator.utils.ZKPaths; +import org.apache.http.annotation.NotThreadSafe; +import org.apache.zookeeper.KeeperException; +import org.apache.zookeeper.KeeperException.NoNodeException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.ComponentScan; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; + + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.metrics.CdmTimer; + +/** + * @NotThreadSafe but expected to be used within KafkaConsumerFactory, which + * must be + * @author peter + * + */ +@NotThreadSafe +public class KafkaConsumerCache { + + private static KafkaConsumerCache kafkaconscache = null; + + public static KafkaConsumerCache getInstance() { + if (kafkaconscache == null) + kafkaconscache = new KafkaConsumerCache(); + + return kafkaconscache; + } + + private static final String kSetting_ConsumerHandoverWaitMs = "cambria.consumer.cache.handoverWaitMs"; + private static final int kDefault_ConsumerHandoverWaitMs = 500; + + private static final String kSetting_SweepEverySeconds = "cambria.consumer.cache.sweepFreqSeconds"; + private static final String kSetting_TouchEveryMs = "cambria.consumer.cache.touchFreqMs"; + + private static final String kSetting_ZkBasePath = "cambria.consumer.cache.zkBasePath"; + private static final String kDefault_ZkBasePath = CambriaConstants.kDefault_ZkRoot + "/consumerCache"; + + // kafka defaults to timing out a client after 6 seconds of inactivity, but + // it heartbeats even when the client isn't fetching. Here, we don't + // want to prematurely rebalance the consumer group. Assuming clients are + // hitting + // the server at least every 30 seconds, timing out after 2 minutes should + // be okay. + // FIXME: consider allowing the client to specify its expected call rate? + private static final long kDefault_MustTouchEveryMs = 1000L*60*2; + + // check for expirations pretty regularly + private static final long kDefault_SweepEverySeconds = 15; + + private enum Status { + NOT_STARTED, CONNECTED, DISCONNECTED, SUSPENDED + } + + + + + @Autowired + private DMaaPErrorMessages errorMessages; + + + /** + * User defined exception class for kafka consumer cache + * + * @author nilanjana.maity + * + */ + public class KafkaConsumerCacheException extends Exception { + /** + * To throw the exception + * + * @param t + */ + KafkaConsumerCacheException(Throwable t) { + super(t); + } + + /** + * + * @param s + */ + public KafkaConsumerCacheException(String s) { + super(s); + } + + private static final long serialVersionUID = 1L; + } + + /** + * Creates a KafkaConsumerCache object. Before it is used, you must call + * startCache() + * + * @param apiId + * @param s + * @param metrics + */ + public KafkaConsumerCache() { + + String strkSetting_ZkBasePath = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + kSetting_ZkBasePath); + if (null == strkSetting_ZkBasePath) + strkSetting_ZkBasePath = kDefault_ZkBasePath; + fBaseZkPath = strkSetting_ZkBasePath; + + fConsumers = new ConcurrentHashMap<>(); + fSweepScheduler = Executors.newScheduledThreadPool(1); + + curatorConsumerCache = null; + + status = Status.NOT_STARTED; + // Watcher for consumer rebalancing across nodes. Kafka011 rebalancing + // work around + + listener = new ConnectionStateListener() { + public void stateChanged(CuratorFramework client, ConnectionState newState) { + if (newState == ConnectionState.LOST) { + + log.info("ZooKeeper connection expired"); + handleConnectionLoss(); + } else if (newState == ConnectionState.READ_ONLY) { + log.warn("ZooKeeper connection set to read only mode."); + } else if (newState == ConnectionState.RECONNECTED) { + log.info("ZooKeeper connection re-established"); + handleReconnection(); + } else if (newState == ConnectionState.SUSPENDED) { + log.warn("ZooKeeper connection has been suspended."); + handleConnectionSuspended(); + } + } + }; + } + + /** + * Start the cache service. This must be called before any get/put + * operations. + * + * @param mode + * DMAAP or cambria + * @param curator + * @throws IOException + * @throws KafkaConsumerCacheException + */ + public void startCache(String mode, CuratorFramework curator) throws KafkaConsumerCacheException { + + if (fApiId == null) { + throw new IllegalArgumentException("API Node ID must be specified."); + } + + try { + + if (mode != null && mode.equals(CambriaConstants.DMAAP)) { + curator = getCuratorFramework(curator); + } + curator.getConnectionStateListenable().addListener(listener); + setStatus(Status.CONNECTED); + curatorConsumerCache = new PathChildrenCache(curator, fBaseZkPath, true); + curatorConsumerCache.start(); + curatorConsumerCache.getListenable().addListener(new PathChildrenCacheListener() { + public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception { + switch (event.getType()) { + case CHILD_ADDED: { + try { + final String apiId = new String(event.getData().getData()); + final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath()); + + log.info(apiId + " started consumer " + consumer); + } catch (Exception ex) { + log.info("#Error Occured during Adding child" + ex); + } + break; + } + case CHILD_UPDATED: { + final String apiId = new String(event.getData().getData()); + final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath()); + + if (fConsumers.containsKey(consumer)) { + log.info(apiId + " claimed consumer " + consumer + " from " + fApiId + + " but wont hand over"); + // Commented so that it dont give the connection + // until the active node is running for this client + // id. + dropClaimedConsumer(consumer); + } + + break; + } + case CHILD_REMOVED: { + final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath()); + + if (fConsumers.containsKey(consumer)) { + log.info("Someone wanted consumer " + consumer + + " gone; but not removing it from the cache"); + dropConsumer(consumer, false); + } + + break; + } + + default: + break; + } + } + }); + + // initialize the ZK path + EnsurePath ensurePath = new EnsurePath(fBaseZkPath); + ensurePath.ensure(curator.getZookeeperClient()); + + + + long freq = kDefault_SweepEverySeconds; + String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + kSetting_SweepEverySeconds); + if (null != strkSetting_SweepEverySeconds) { + freq = Long.parseLong(strkSetting_SweepEverySeconds); + } + + fSweepScheduler.scheduleAtFixedRate(new sweeper(), freq, freq, TimeUnit.SECONDS); + log.info("KafkaConsumerCache started"); + log.info("sweeping cached clients every " + freq + " seconds"); + } catch (ZkException e) { + log.error("@@@@@@ ZK Exception occured for " + e); + throw new KafkaConsumerCacheException(e); + } catch (Exception e) { + log.error("@@@@@@ Exception occured for " + e); + throw new KafkaConsumerCacheException(e); + } + } + + /** + * Getting the curator oject to start the zookeeper connection estabished + * + * @param curator + * @return curator object + */ + public static CuratorFramework getCuratorFramework(CuratorFramework curator) { + if (curator.getState() == CuratorFrameworkState.LATENT) { + curator.start(); + + try { + curator.blockUntilConnected(); + } catch (InterruptedException e) { + log.error("error while setting curator framework :",e); + Thread.currentThread().interrupt(); + } + } + + return curator; + } + + /** + * Stop the cache service. + */ + public void stopCache() { + setStatus(Status.DISCONNECTED); + + final CuratorFramework curator = ConfigurationReader.getCurator(); + + if (curator != null) { + try { + curator.getConnectionStateListenable().removeListener(listener); + curatorConsumerCache.close(); + log.info("Curator client closed"); + } catch (ZkInterruptedException e) { + log.warn("Curator client close interrupted: " + e.getMessage()); + } catch (IOException e) { + log.warn("Error while closing curator PathChildrenCache for KafkaConsumerCache" + e.getMessage()); + } + + curatorConsumerCache = null; + } + + if (fSweepScheduler != null) { + fSweepScheduler.shutdownNow(); + log.info("cache sweeper stopped"); + } + + if (fConsumers != null) { + fConsumers.clear(); + fConsumers = null; + } + + setStatus(Status.NOT_STARTED); + + log.info("Consumer cache service stopped"); + } + + /** + * Get a cached consumer by topic, group, and id, if it exists (and remains + * valid) In addition, this method waits for all other consumer caches in + * the cluster to release their ownership and delete their version of this + * consumer. + * + * @param topic + * @param consumerGroupId + * @param clientId + * @return a consumer, or null + */ + public Kafka011Consumer getConsumerFor(String topic, String consumerGroupId, String clientId) + throws KafkaConsumerCacheException { + if (getStatus() != KafkaConsumerCache.Status.CONNECTED) + throw new KafkaConsumerCacheException("The cache service is unavailable."); + + final String consumerKey = makeConsumerKey(topic, consumerGroupId, clientId); + final Kafka011Consumer kc = fConsumers.get(consumerKey); + + if (kc != null) { + log.debug("Consumer cache hit for [" + consumerKey + "], last was at " + kc.getLastTouch()); + kc.touch(); + fMetrics.onKafkaConsumerCacheHit(); + } else { + log.debug("Consumer cache miss for [" + consumerKey + "]"); + fMetrics.onKafkaConsumerCacheMiss(); + } + + return kc; + } + + /** + * Get a cached consumer by topic, group, and id, if it exists (and remains + * valid) In addition, this method waits for all other consumer caches in + * the cluster to release their ownership and delete their version of this + * consumer. + * + * @param topic + * @param consumerGroupId + * @param clientId + * @return a consumer, or null + */ + public ArrayList getConsumerListForCG(String topicgroup, String clientId) + throws KafkaConsumerCacheException { + if (getStatus() != KafkaConsumerCache.Status.CONNECTED) + throw new KafkaConsumerCacheException("The cache service is unavailable."); + ArrayList kcl = new ArrayList<>(); + + + Enumeration strEnum = fConsumers.keys(); + String consumerLocalKey = null; + while (strEnum.hasMoreElements()) { + consumerLocalKey = strEnum.nextElement(); + + if (consumerLocalKey.startsWith(topicgroup) && (!consumerLocalKey.endsWith("::" + clientId))) { + + + + + kcl.add(fConsumers.get(consumerLocalKey)); + + } + } + + return kcl; + } + + public ArrayList getConsumerListForCG(String group) throws KafkaConsumerCacheException { + if (getStatus() != KafkaConsumerCache.Status.CONNECTED) + throw new KafkaConsumerCacheException("The cache service is unavailable."); + ArrayList kcl = new ArrayList<>(); + + Enumeration strEnum = fConsumers.keys(); + String consumerLocalKey = null; + while (strEnum.hasMoreElements()) { + consumerLocalKey = strEnum.nextElement(); + + if (consumerLocalKey.startsWith(group)) { + + + kcl.add(fConsumers.get(consumerLocalKey)); + + } + } + + return kcl; + } + + /** + * Put a consumer into the cache by topic, group and ID + * + * @param topic + * @param consumerGroupId + * @param consumerId + * @param consumer + * @throws KafkaConsumerCacheException + */ + public void putConsumerFor(String topic, String consumerGroupId, String consumerId, Kafka011Consumer consumer) + throws KafkaConsumerCacheException { + if (getStatus() != KafkaConsumerCache.Status.CONNECTED) + throw new KafkaConsumerCacheException("The cache service is unavailable."); + + final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId); + fConsumers.put(consumerKey, consumer); + + + + log.info("^@ Consumer Added to Cache Consumer Key" + consumerKey + " ApiId" + fApiId); + } + + public Collection getConsumers() { + return new LinkedList<>(fConsumers.values()); + } + + /** + * This method is to drop all the consumer + */ + public void dropAllConsumers() { + for (Entry entry : fConsumers.entrySet()) { + dropConsumer(entry.getKey(), true); + } + + // consumers should be empty here + if (fConsumers.size() > 0) { + log.warn("During dropAllConsumers, the consumer map is not empty."); + fConsumers.clear(); + } + } + + /** + * Drop a consumer from our cache due to a timeout + * + * @param key + */ + private void dropTimedOutConsumer(String key) { + fMetrics.onKafkaConsumerTimeout(); + + if (!fConsumers.containsKey(key)) { + log.warn("Attempted to drop a timed out consumer which was not in our cache: " + key); + return; + } + + // First, drop this consumer from our cache + boolean isdrop = dropConsumer(key, true); + if (!isdrop) { + return; + } + final CuratorFramework curator = ConfigurationReader.getCurator(); + + try { + curator.delete().guaranteed().forPath(fBaseZkPath + "/" + key); + log.info(" ^ deleted " + fBaseZkPath + "/" + key); + } catch (NoNodeException e) { + log.warn("A consumer was deleted from " + fApiId + + "'s cache, but no Cambria API node had ownership of it in ZooKeeper"); + } catch (Exception e) { + log.debug("Unexpected exception while deleting consumer: " + e.getMessage()); + log.info(" %%%%%%@# Unexpected exception while deleting consumer: " + e.getMessage()); + } + + try { + int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs; + String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + kSetting_ConsumerHandoverWaitMs); + if (strkSetting_ConsumerHandoverWaitMs != null) + consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs); + Thread.sleep(consumerHandoverWaitMs); + } catch (InterruptedException e) { + log.error("InterruptedException in dropTimedOutConsumer",e); + Thread.currentThread().interrupt(); + } + log.info("Dropped " + key + " consumer due to timeout"); + } + + /** + * Drop a consumer from our cache due to another API node claiming it as + * their own. + * + * @param key + */ + private void dropClaimedConsumer(String key) { + // if the consumer is still in our cache, it implies a claim. + if (fConsumers.containsKey(key)) { + fMetrics.onKafkaConsumerClaimed(); + log.info("Consumer [" + key + "] claimed by another node."); + } + log.info("^dropping claimed Kafka consumer " + key); + dropConsumer(key, false); + } + + /** + * Removes the consumer from the cache and closes its connection to the + * kafka broker(s). + * + * @param key + * @param dueToTimeout + */ + private boolean dropConsumer(String key, boolean dueToTimeout) { + final Kafka011Consumer kc = fConsumers.get(key); + log.info("closing Kafka consumer " + key + " object " + kc); + if (kc != null) { + + if (kc.close()) { + fConsumers.remove(key); + + } else { + return false; + } + } + return true; + } + + // private final rrNvReadable fSettings; + private MetricsSet fMetrics; + private final String fBaseZkPath; + private final ScheduledExecutorService fSweepScheduler; + private String fApiId; + + public void setfMetrics(final MetricsSet metrics) { + this.fMetrics = metrics; + } + + public void setfApiId(final String id) { + this.fApiId = id; + } + + private final ConnectionStateListener listener; + + private ConcurrentHashMap fConsumers; + private PathChildrenCache curatorConsumerCache; + + private volatile Status status; + + private void handleReconnection() { + + log.info("Reading current cache data from ZK and synchronizing local cache"); + final List cacheData = curatorConsumerCache.getCurrentData(); + // Remove all the consumers in this API nodes cache that now belong to + // other API nodes. + for (ChildData cachedConsumer : cacheData) { + final String consumerId = ZKPaths.getNodeFromPath(cachedConsumer.getPath()); + final String owningApiId = (cachedConsumer.getData() != null) ? new String(cachedConsumer.getData()) + : "undefined"; + if (!fApiId.equals(owningApiId)) { + fConsumers.remove(consumerId); // Commented to avoid removing + // the value cache hashmap but the lock still exists. + // This is not considered in kafka consumer Factory + log.info("@@@ Validating current cache data from ZK and synchronizing local cache" + owningApiId + + " removing " + consumerId); + } + } + + setStatus(Status.CONNECTED); + } + + private void handleConnectionSuspended() { + log.info("Suspending cache until ZK connection is re-established"); + + setStatus(Status.SUSPENDED); + } + + private void handleConnectionLoss() { + log.info("Clearing consumer cache (shutting down all Kafka consumers on this node)"); + + setStatus(Status.DISCONNECTED); + + closeAllCachedConsumers(); + fConsumers.clear(); + } + + private void closeAllCachedConsumers() { + for (Entry entry : fConsumers.entrySet()) { + try { + entry.getValue().close(); + } catch (Exception e) { + log.info("@@@@@@ Error occurd while closing Clearing All cache " + e); + } + } + } + + private static String makeConsumerKey(String topic, String consumerGroupId, String clientId) { + return topic + "::" + consumerGroupId + "::" + clientId; + } + + /** + * This method is to get a lock + * + * @param topic + * @param consumerGroupId + * @param consumerId + * @throws KafkaConsumerCacheException + */ + public void signalOwnership(final String topic, final String consumerGroupId, final String consumerId) + throws KafkaConsumerCacheException { + // get a lock at /:::: + final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId); + + try(final CdmTimer timer = new CdmTimer(fMetrics, "CacheSignalOwnership")) { + final String consumerPath = fBaseZkPath + "/" + consumerKey; + log.debug(fApiId + " attempting to claim ownership of consumer " + consumerKey); + final CuratorFramework curator = ConfigurationReader.getCurator(); + + try { + curator.setData().forPath(consumerPath, fApiId.getBytes()); + } catch (KeeperException.NoNodeException e) { + curator.create().creatingParentsIfNeeded().forPath(consumerPath, fApiId.getBytes()); + } + log.info(fApiId + " successfully claimed ownership of consumer " + consumerKey); + timer.end(); + } catch (Exception e) { + log.error(fApiId + " failed to claim ownership of consumer " + consumerKey); + throw new KafkaConsumerCacheException(e); + } + + log.info("Backing off to give the Kafka broker time to clean up the ZK data for this consumer"); + + try { + int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs; + String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + kSetting_ConsumerHandoverWaitMs); + if (strkSetting_ConsumerHandoverWaitMs != null) + consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs); + Thread.sleep(consumerHandoverWaitMs); + } catch (InterruptedException e) { + log.error("InterruptedException in signalOwnership",e); + Thread.currentThread().interrupt(); + } + } + + public KafkaLiveLockAvoider2 getkafkaLiveLockAvoiderObj() { + return null; + } + + public void sweep() { + final LinkedList removals = new LinkedList(); + long mustTouchEveryMs = kDefault_MustTouchEveryMs; + String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + kSetting_TouchEveryMs); + if (null != strkSetting_TouchEveryMs) { + mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs); + } + + + final long oldestAllowedTouchMs = System.currentTimeMillis() - mustTouchEveryMs; + + for (Entry e : fConsumers.entrySet()) { + final long lastTouchMs = e.getValue().getLastTouch(); + log.debug("consumer #####1" + e.getKey() + " " + lastTouchMs + " < " + oldestAllowedTouchMs); + + if (lastTouchMs < oldestAllowedTouchMs) { + log.info("consumer " + e.getKey() + " has expired"); + removals.add(e.getKey()); + } + } + + for (String key : removals) { + dropTimedOutConsumer(key); + } + } + + /** + * Creating a thread to run the sweep method + * + * @author nilanjana.maity + * + */ + private class sweeper implements Runnable { + /** + * run method + */ + public void run() { + sweep(); + } + } + + /** + * This method is to drop consumer + * + * @param topic + * @param consumerGroup + * @param clientId + */ + public void dropConsumer(String topic, String consumerGroup, String clientId) { + dropConsumer(makeConsumerKey(topic, consumerGroup, clientId), false); + } + + private Status getStatus() { + return this.status; + } + + private void setStatus(Status status) { + this.status = status; + } + + private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumerCache.class); + +} \ No newline at end of file diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java new file mode 100644 index 0000000..0bfbcb5 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java @@ -0,0 +1,159 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.kafka; + + +import java.util.List; +import java.util.concurrent.TimeUnit; + +import javax.annotation.PostConstruct; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.recipes.locks.InterProcessMutex; +import org.apache.zookeeper.CreateMode; +import org.apache.zookeeper.Watcher; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Component; + +//@ComponentScan(basePackages="com.att.dmf.mr.backends.kafka") +@Component +public class KafkaLiveLockAvoider2 { + + public static final String ZNODE_ROOT = "/live-lock-avoid"; + public static final String ZNODE_LOCKS = "/locks"; + public static final String ZNODE_UNSTICK_TASKS ="/unstick-tasks"; + + private static String locksPath = ZNODE_ROOT+ZNODE_LOCKS; + private static String tasksPath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS; + private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaLiveLockAvoider2.class.getName()); + + @Autowired + @Qualifier("curator") + private CuratorFramework curatorFramework; + + @PostConstruct + public void init() { + log.info("Welcome......................................................................................"); + try { + if (curatorFramework.checkExists().forPath(locksPath) == null) { + curatorFramework.create().creatingParentsIfNeeded().forPath(locksPath); + } + if (curatorFramework.checkExists().forPath(tasksPath) == null) { + curatorFramework.create().creatingParentsIfNeeded().forPath(tasksPath); + } + + } catch (Exception e) { + + log.error("Error during creation of permanent Znodes under /live-lock-avoid ",e); + + } + + + } + public void unlockConsumerGroup(String appId, String groupName) throws Exception { + + log.info("Signalling unlock to all conumsers of in group [{}] now, " , groupName); + + String fullLockPath = String.format("%s/%s", locksPath, groupName ); + String fullTasksPath = null; + + try { + + //Use the Curator recipe for a Mutex lock, only one process can be broadcasting unlock instructions for a group + InterProcessMutex lock = new InterProcessMutex(curatorFramework, fullLockPath); + if ( lock.acquire(100L, TimeUnit.MILLISECONDS) ) + { + try + { + List taskNodes = curatorFramework.getChildren().forPath(tasksPath); + for (String taskNodeName : taskNodes) { + if(!taskNodeName.equals(appId)) { + + fullTasksPath = String.format("%s/%s/%s", tasksPath, taskNodeName, groupName); + log.info("Writing groupName {} to path {}",groupName, fullTasksPath); + + + if(curatorFramework.checkExists().forPath(fullTasksPath) != null) { + curatorFramework.delete().forPath(fullTasksPath); + } + curatorFramework.create().withMode(CreateMode.EPHEMERAL).forPath(fullTasksPath); + } + } + + + } + finally + { + //Curator lock recipe requires a acquire() to be followed by a release() + lock.release(); + } + }else { + log.info("Could not obtain the avoider lock, another process has the avoider lock? {}", !lock.isAcquiredInThisProcess() ); + } + + + } catch (Exception e) { + log.error("Error setting up either lock ZNode {} or task ZNode {}",fullLockPath, fullTasksPath,e); + throw e; + } + + + } + + /* + * Shoud be called once per MR server instance. + * + */ + public void startNewWatcherForServer(String appId, LiveLockAvoidance avoidanceCallback) { + LockInstructionWatcher instructionWatcher = new LockInstructionWatcher(curatorFramework,avoidanceCallback,this); + assignNewProcessNode(appId, instructionWatcher); + + } + + + protected void assignNewProcessNode(String appId, Watcher processNodeWatcher ) { + + String taskHolderZnodePath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS+"/"+appId; + + + try { + + if(curatorFramework.checkExists().forPath(taskHolderZnodePath) != null) { + curatorFramework.delete().deletingChildrenIfNeeded().forPath(taskHolderZnodePath); + + } + curatorFramework.create().forPath(taskHolderZnodePath); + //setup the watcher + curatorFramework.getChildren().usingWatcher(processNodeWatcher).inBackground().forPath(taskHolderZnodePath); + log.info("Done creating task holder and watcher for APP name: {}",appId); + + } catch (Exception e) { + log.error("Could not add new processing node for name {}", appId, e); + } + + } + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaPublisher.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaPublisher.java new file mode 100644 index 0000000..2a9e0ab --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/KafkaPublisher.java @@ -0,0 +1,228 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.kafka; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Properties; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.json.JSONException; +import org.springframework.beans.factory.annotation.Qualifier; + +import org.onap.dmaap.dmf.mr.backends.Publisher; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.utils.Utils; +//import org.slf4j.Logger; +//import org.slf4j.LoggerFactory; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.drumlin.till.nv.rrNvReadable; + + + +/** + * Sends raw JSON objects into Kafka. + * + * Could improve space: BSON rather than JSON? + * + * @author peter + * + */ + +public class KafkaPublisher implements Publisher { + /** + * constructor initializing + * + * @param settings + * @throws rrNvReadable.missingReqdSetting + */ + public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting { + //fSettings = settings; + + final Properties props = new Properties(); + /*transferSetting(fSettings, props, "metadata.broker.list", "localhost:9092"); + transferSetting(fSettings, props, "request.required.acks", "1"); + transferSetting(fSettings, props, "message.send.max.retries", "5"); + transferSetting(fSettings, props, "retry.backoff.ms", "150"); */ + String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list"); + if(null==kafkaConnUrl){ + + kafkaConnUrl="localhost:9092"; + } + + + if(Utils.isCadiEnabled()){ + transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';"); + transferSetting( props, "security.protocol", "SASL_PLAINTEXT"); + transferSetting( props, "sasl.mechanism", "PLAIN"); + } + transferSetting( props, "bootstrap.servers",kafkaConnUrl); + + transferSetting( props, "request.required.acks", "1"); + transferSetting( props, "message.send.max.retries", "5"); + transferSetting(props, "retry.backoff.ms", "150"); + + + + props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + + + + fProducer = new KafkaProducer<>(props); + } + + /** + * Send a message with a given topic and key. + * + * @param msg + * @throws FailedToSendMessageException + * @throws JSONException + */ + @Override + public void sendMessage(String topic, message msg) throws IOException{ + final List msgs = new LinkedList(); + msgs.add(msg); + sendMessages(topic, msgs); + } + + /** + * method publishing batch messages + * This method is commented from 0.8 to 0.11 upgrade + * @param topic + * @param kms + * throws IOException + * + public void sendBatchMessage(String topic, ArrayList> kms) throws IOException { + try { + fProducer.send(kms); + + } catch (FailedToSendMessageException excp) { + log.error("Failed to send message(s) to topic [" + topic + "].", excp); + throw new FailedToSendMessageException(excp.getMessage(), excp); + } + + } */ + + + /* + * Kafka 11.0 Interface + * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList) + */ + public void sendBatchMessageNew(String topic, ArrayList > kms) throws IOException { + try { + for (ProducerRecord km : kms) { + fProducer.send(km); + } + + } catch (Exception excp) { + log.error("Failed to send message(s) to topic [" + topic + "].", excp); + throw new IOException(excp.getMessage(), excp); + } + + } + + /** + * Send a set of messages. Each must have a "key" string value. + * + * @param topic + * @param msg + * @throws FailedToSendMessageException + * @throws JSONException + * + @Override + public void sendMessages(String topic, List msgs) + throws IOException, FailedToSendMessageException { + log.info("sending " + msgs.size() + " events to [" + topic + "]"); + + final List> kms = new ArrayList>(msgs.size()); + for (message o : msgs) { + final KeyedMessage data = new KeyedMessage(topic, o.getKey(), o.toString()); + kms.add(data); + } + try { + fProducer.send(kms); + + } catch (FailedToSendMessageException excp) { + log.error("Failed to send message(s) to topic [" + topic + "].", excp); + throw new FailedToSendMessageException(excp.getMessage(), excp); + } + } */ + @Override + public void sendMessagesNew(String topic, List msgs) + throws IOException { + log.info("sending " + msgs.size() + " events to [" + topic + "]"); +try{ + final List> kms = new ArrayList<>(msgs.size()); + for (message o : msgs) { + + final ProducerRecord data = new ProducerRecord<>(topic, o.getKey(), o.toString()); + + + try { + + fProducer.send(data); + + } catch (Exception excp) { + log.error("Failed to send message(s) to topic [" + topic + "].", excp); + throw new Exception(excp.getMessage(), excp); + } + } + + }catch(Exception e){} +} + //private final rrNvReadable fSettings; + + + private Producer fProducer; + + /** + * It sets the key value pair + * @param topic + * @param msg + * @param key + * @param defVal + */ + private void transferSetting(Properties props, String key, String defVal) { + String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key); + if (null==kafka_prop) kafka_prop=defVal; + //props.put(key, settings.getString("kafka." + key, defVal)); + props.put(key, kafka_prop); + } + + //private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class); + + private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class); + + @Override + public void sendMessages(String topic, List msgs) throws IOException { + // TODO Auto-generated method stub + + } + + +} \ No newline at end of file diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LiveLockAvoidance.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LiveLockAvoidance.java new file mode 100644 index 0000000..4aa8a97 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LiveLockAvoidance.java @@ -0,0 +1,45 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.kafka; + + + +/** + * Live Lock Avoidance interface. To be implemented by the main message router client + * + */ +public interface LiveLockAvoidance { + + /** + * Gets the unique id + * @return the unique id for the Message Router server instance + */ + String getAppId(); + + + /** + * Main callback to inform the local MR server instance that all consumers in a group need to soft poll + * @param groupName name of the Kafka consumer group needed a soft poll + */ + void handleRebalanceUnlock( String groupName); + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LockInstructionWatcher.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LockInstructionWatcher.java new file mode 100644 index 0000000..1d31a44 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/kafka/LockInstructionWatcher.java @@ -0,0 +1,100 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.kafka; + +import java.util.List; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.zookeeper.WatchedEvent; +import org.apache.zookeeper.Watcher; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * + * LockInstructionWatcher + * A package-private class used internally by the KafkaLiveLockAvoider. + * + * This class implements the zookeeper Watcher callback and listens for changes on child nodes changing. + * Each child node is actually a Kafka group name that needs to be soft polled. Deletion of the child nodes + * after soft poll unlocking is finished. + * + * + */ +public class LockInstructionWatcher implements Watcher { + + private CuratorFramework curatorFramework; + private LiveLockAvoidance avoidanceCallback; + private KafkaLiveLockAvoider2 avoider; + + private static final EELFLogger log = EELFManager.getInstance().getLogger(LockInstructionWatcher.class.getName()); + + + public LockInstructionWatcher(CuratorFramework curatorFramework, LiveLockAvoidance avoidanceCallback, + KafkaLiveLockAvoider2 avoider) { + super(); + this.curatorFramework = curatorFramework; + this.avoidanceCallback = avoidanceCallback; + this.avoider = avoider; + } + + + @Override + public void process(WatchedEvent event) { + + switch (event.getType()) { + case NodeChildrenChanged: + + + try { + + log.info("node children changed at path: {}", event.getPath()); + + List children = curatorFramework.getChildren().forPath(event.getPath()); + + log.info("found children nodes prodcessing now"); + for (String child : children) { + String childPath = String.format("%s/%s", event.getPath(), child); + log.info("Processing child task at node {}",childPath); + avoidanceCallback.handleRebalanceUnlock( child); + log.info("Deleting child task at node {}",childPath); + curatorFramework.delete().forPath(childPath); + } + //reset the watch with the avoider + avoider.assignNewProcessNode(avoidanceCallback.getAppId(), this); + + + } catch (Exception e) { + log.error("Error manipulating ZNode data in watcher",e); + } + + break; + + default: + log.info("Listner fired on path: {}, with event: {}", event.getPath(), event.getType()); + break; + } + } + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryConsumerFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryConsumerFactory.java new file mode 100644 index 0000000..d5dc33b --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryConsumerFactory.java @@ -0,0 +1,184 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.memory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +/** + * + * @author anowarul.islam + * + */ +public class MemoryConsumerFactory implements ConsumerFactory +{ + + private final MemoryQueue fQueue; + + /** + * + * Initializing constructor + * @param q + */ + public MemoryConsumerFactory ( MemoryQueue q ) + { + fQueue = q; + } + + /** + * + * @param topic + * @param consumerGroupId + * @param clientId + * @param timeoutMs + * @return Consumer + */ + @Override + public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs, String remotehost ) + { + return new MemoryConsumer ( topic, consumerGroupId ); + } + + /** + * + * Define nested inner class + * + */ + private class MemoryConsumer implements Consumer + { + + private final String fTopic; + private final String fConsumer; + private final long fCreateMs; + private long fLastAccessMs; + + /** + * + * Initializing MemoryConsumer constructor + * @param topic + * @param consumer + * + */ + public MemoryConsumer ( String topic, String consumer ) + { + fTopic = topic; + fConsumer = consumer; + fCreateMs = System.currentTimeMillis (); + fLastAccessMs = fCreateMs; + } + + @Override + /** + * + * return consumer details + */ + public Message nextMessage () + { + return fQueue.get ( fTopic, fConsumer ); + } + + @Override + public boolean close() { + //Nothing to close/clean up. + return true; + } + /** + * + */ + public void commitOffsets() + { + // ignoring this aspect + } + /** + * get offset + */ + public long getOffset() + { + return 0; + } + + @Override + /** + * get consumer topic name + */ + public String getName () + { + return fTopic + "/" + fConsumer; + } + + @Override + public long getCreateTimeMs () + { + return fCreateMs; + } + + @Override + public long getLastAccessMs () + { + return fLastAccessMs; + } + + + + @Override + public void setOffset(long offset) { + // TODO Auto-generated method stub + + } + + + } + + @Override + public void destroyConsumer(String topic, String consumerGroupId, + String clientId) { + //No cache for memory consumers, so NOOP + } + + @Override + public void dropCache () + { + // nothing to do - there's no cache here + } + + @Override + /** + * @return ArrayList + */ + public Collection getConsumers () + { + return new ArrayList<> (); + } + + @Override + public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs, + String remotehost) throws UnavailableException, CambriaApiException { + // TODO Auto-generated method stub + return null; + } + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryMetaBroker.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryMetaBroker.java new file mode 100644 index 0000000..fc3ba98 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryMetaBroker.java @@ -0,0 +1,201 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.memory; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; + +import org.onap.dmaap.dmf.mr.metabroker.Broker; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import com.att.nsa.configs.ConfigDb; +import com.att.nsa.security.NsaAcl; +import com.att.nsa.security.NsaApiKey; + +/** + * + * @author anowarul.islam + * + */ +public class MemoryMetaBroker implements Broker { + + private final MemoryQueue fQueue; + private final HashMap fTopics; + + /** + * + * @param mq + * @param configDb + * @param settings + */ + public MemoryMetaBroker(MemoryQueue mq, ConfigDb configDb) { + + fQueue = mq; + fTopics = new HashMap<>(); + } + + @Override + public List getAllTopics() { + return new LinkedList(fTopics.values()); + } + + @Override + public Topic getTopic(String topic) { + return fTopics.get(topic); + } + + @Override + public Topic createTopic(String topic, String desc, String ownerApiId, int partitions, int replicas, + boolean transactionEnabled) throws TopicExistsException { + if (getTopic(topic) != null) { + throw new TopicExistsException(topic); + } + fQueue.createTopic(topic); + fTopics.put(topic, new MemTopic(topic, desc, ownerApiId, transactionEnabled)); + return getTopic(topic); + } + + @Override + public void deleteTopic(String topic) { + fTopics.remove(topic); + fQueue.removeTopic(topic); + } + + private static class MemTopic implements Topic { + + private final String fName; + private final String fDesc; + private final String fOwner; + private NsaAcl fReaders; + private NsaAcl fWriters; + private boolean ftransactionEnabled; + private String accessDenied = "User does not own this topic "; + + /** + * constructor initialization + * + * @param name + * @param desc + * @param owner + * @param transactionEnabled + */ + public MemTopic(String name, String desc, String owner, boolean transactionEnabled) { + fName = name; + fDesc = desc; + fOwner = owner; + ftransactionEnabled = transactionEnabled; + fReaders = null; + fWriters = null; + } + + @Override + public String getOwner() { + return fOwner; + } + + @Override + public NsaAcl getReaderAcl() { + return fReaders; + } + + @Override + public NsaAcl getWriterAcl() { + return fWriters; + } + + @Override + public void checkUserRead(NsaApiKey user) throws AccessDeniedException { + if (fReaders != null && (user == null || !fReaders.canUser(user.getKey()))) { + throw new AccessDeniedException(user == null ? "" : user.getKey()); + } + } + + @Override + public void checkUserWrite(NsaApiKey user) throws AccessDeniedException { + if (fWriters != null && (user == null || !fWriters.canUser(user.getKey()))) { + throw new AccessDeniedException(user == null ? "" : user.getKey()); + } + } + + @Override + public String getName() { + return fName; + } + + @Override + public String getDescription() { + return fDesc; + } + + @Override + public void permitWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException { + if (!fOwner.equals(asUser.getKey())) { + throw new AccessDeniedException(accessDenied + fName); + } + if (fWriters == null) { + fWriters = new NsaAcl(); + } + fWriters.add(publisherId); + } + + @Override + public void denyWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException { + if (!fOwner.equals(asUser.getKey())) { + throw new AccessDeniedException(accessDenied + fName); + } + fWriters.remove(publisherId); + } + + @Override + public void permitReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException { + if (!fOwner.equals(asUser.getKey())) { + throw new AccessDeniedException(accessDenied + fName); + } + if (fReaders == null) { + fReaders = new NsaAcl(); + } + fReaders.add(consumerId); + } + + @Override + public void denyReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException { + if (!fOwner.equals(asUser.getKey())) { + throw new AccessDeniedException(accessDenied + fName); + } + fReaders.remove(consumerId); + } + + @Override + public boolean isTransactionEnabled() { + return ftransactionEnabled; + } + + @Override + public Set getOwners() { + final TreeSet set = new TreeSet<> (); + set.add ( fOwner ); + return set; + } + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueue.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueue.java new file mode 100644 index 0000000..809b7ae --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueue.java @@ -0,0 +1,207 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.memory; + +import java.util.ArrayList; +import java.util.HashMap; + +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; + +/** + * When broker type is memory, then this class is doing all the topic related + * operations + * + * @author anowarul.islam + * + */ +public class MemoryQueue { + // map from topic to list of msgs + private HashMap fQueue; + private HashMap> fOffsets; + + /** + * constructor storing hashMap objects in Queue and Offsets object + */ + public MemoryQueue() { + fQueue = new HashMap<>(); + fOffsets = new HashMap<>(); + } + + /** + * method used to create topic + * + * @param topic + */ + public synchronized void createTopic(String topic) { + LogBuffer q = fQueue.get(topic); + if (q == null) { + q = new LogBuffer(1024 * 1024); + fQueue.put(topic, q); + } + } + + /** + * method used to remove topic + * + * @param topic + */ + public synchronized void removeTopic(String topic) { + LogBuffer q = fQueue.get(topic); + if (q != null) { + fQueue.remove(topic); + } + } + + /** + * method to write message on topic + * + * @param topic + * @param m + */ + public synchronized void put(String topic, message m) { + LogBuffer q = fQueue.get(topic); + if (q == null) { + createTopic(topic); + q = fQueue.get(topic); + } + q.push(m.getMessage()); + } + + /** + * method to read consumer messages + * + * @param topic + * @param consumerName + * @return + */ + public synchronized Consumer.Message get(String topic, String consumerName) { + final LogBuffer q = fQueue.get(topic); + if (q == null) { + return null; + } + + HashMap offsetMap = fOffsets.get(consumerName); + if (offsetMap == null) { + offsetMap = new HashMap<>(); + fOffsets.put(consumerName, offsetMap); + } + Integer offset = offsetMap.get(topic); + if (offset == null) { + offset = 0; + } + + final msgInfo result = q.read(offset); + if (result != null && result.msg != null) { + offsetMap.put(topic, result.offset + 1); + } + return result; + } + + /** + * static inner class used to details about consumed messages + * + * @author anowarul.islam + * + */ + private static class msgInfo implements Consumer.Message { + /** + * published message which is consumed + */ + public String msg; + /** + * offset associated with message + */ + public int offset; + + /** + * get offset of messages + */ + @Override + public long getOffset() { + return offset; + } + + /** + * get consumed message + */ + @Override + public String getMessage() { + return msg; + } + } + + /** + * + * @author sneha.d.desai + * + * private LogBuffer class has synchronized push and read method + */ + private class LogBuffer { + private int fBaseOffset; + private final int fMaxSize; + private final ArrayList fList; + + /** + * constructor initializing the offset, maxsize and list + * + * @param maxSize + */ + public LogBuffer(int maxSize) { + fBaseOffset = 0; + fMaxSize = maxSize; + fList = new ArrayList<>(); + } + + /** + * pushing message + * + * @param msg + */ + public synchronized void push(String msg) { + fList.add(msg); + while (fList.size() > fMaxSize) { + fList.remove(0); + fBaseOffset++; + } + } + + /** + * reading messages + * + * @param offset + * @return + */ + public synchronized msgInfo read(int offset) { + final int actual = Math.max(0, offset - fBaseOffset); + + final msgInfo mi = new msgInfo(); + mi.msg = (actual >= fList.size()) ? null : fList.get(actual); + if (mi.msg == null) + return null; + + mi.offset = actual + fBaseOffset; + return mi; + } + + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueuePublisher.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueuePublisher.java new file mode 100644 index 0000000..a9c7470 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MemoryQueuePublisher.java @@ -0,0 +1,92 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.memory; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.kafka.clients.producer.ProducerRecord; + +import org.onap.dmaap.dmf.mr.backends.Publisher; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; + + + +/** + * + * @author anowarul.islam + * + */ +public class MemoryQueuePublisher implements Publisher { + /** + * + * @param q + * @param b + */ + public MemoryQueuePublisher(MemoryQueue q, MemoryMetaBroker b) { + fBroker = b; + fQueue = q; + } + + + /** + * + * @param topic + * @param msg + * @throws IOException + */ + @Override + public void sendMessage(String topic, message msg) throws IOException { + if (null == fBroker.getTopic(topic)) { + try { + fBroker.createTopic(topic, topic, null, 8, 3, false); + } catch (TopicExistsException e) { + throw new RuntimeException(e); + } + } + fQueue.put(topic, msg); + } + + @Override + /** + * @param topic + * @param msgs + * @throws IOException + */ + + public void sendBatchMessageNew(String topic, ArrayList> kms) throws IOException { + + } + + public void sendMessagesNew(String topic, List msgs) throws IOException { + } + + public void sendMessages(String topic, List msgs) throws IOException { + for (message m : msgs) { + sendMessage(topic, m); + } + } + + private final MemoryMetaBroker fBroker; + private final MemoryQueue fQueue; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MessageLogger.java b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MessageLogger.java new file mode 100644 index 0000000..d88771e --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/backends/memory/MessageLogger.java @@ -0,0 +1,109 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.backends.memory; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.kafka.clients.producer.ProducerRecord; + +import org.onap.dmaap.dmf.mr.backends.Publisher; + + + +/** + * class used for logging perspective + * + * @author anowarul.islam + * + */ +public class MessageLogger implements Publisher { + public MessageLogger() { + } + + public void setFile(File f) throws FileNotFoundException { + fStream = new FileOutputStream(f, true); + } + + /** + * + * @param topic + * @param msg + * @throws IOException + */ + @Override + public void sendMessage(String topic, message msg) throws IOException { + logMsg(msg); + } + + /** + * @param topic + * @param msgs + * @throws IOException + */ + @Override + public void sendMessages(String topic, List msgs) throws IOException { + for (message m : msgs) { + logMsg(m); + } + } + + /** + * @param topic + * @param kms + * @throws IOException + + @Override + public void sendBatchMessage(String topic, ArrayList> kms) throws + + IOException { + } + */ + private FileOutputStream fStream; + + /** + * + * @param msg + * @throws IOException + */ + private void logMsg(message msg) throws IOException { + String key = msg.getKey(); + if (key == null) + key = ""; + + fStream.write('['); + fStream.write(key.getBytes()); + fStream.write("] ".getBytes()); + fStream.write(msg.getMessage().getBytes()); + fStream.write('\n'); + } + public void sendBatchMessageNew(String topic, ArrayList> kms) throws IOException { + + } + + public void sendMessagesNew(String topic, List msgs) throws IOException { + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/ApiKeyBean.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/ApiKeyBean.java new file mode 100644 index 0000000..4356aa7 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/ApiKeyBean.java @@ -0,0 +1,88 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import java.io.Serializable; + +import javax.xml.bind.annotation.XmlRootElement; + +import com.att.nsa.drumlin.till.data.uniqueStringGenerator; +/** + * + * @author anowarul.islam + * + */ +@XmlRootElement +public class ApiKeyBean implements Serializable { + + private static final long serialVersionUID = -8219849086890567740L; + + private static final String KEY_CHARS = "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + + private String email; + private String description; + /** + * constructor + */ + public ApiKeyBean() { + super(); + } +/** + * + * @param email + * @param description + */ + public ApiKeyBean(String email, String description) { + super(); + this.email = email; + this.description = description; + } + + public String getEmail() { + return email; + } + + public void setEmail(String email) { + this.email = email; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getKey() { + return generateKey(16); + } + + public String getSharedSecret() { + return generateKey(24); + } + + private static String generateKey ( int length ) { + return uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length ); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPCambriaLimiter.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPCambriaLimiter.java new file mode 100644 index 0000000..7db9f54 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPCambriaLimiter.java @@ -0,0 +1,288 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import java.util.Date; +import java.util.HashMap; +import java.util.concurrent.TimeUnit; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Component; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; +import org.onap.dmaap.dmf.mr.utils.Utils; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.drumlin.service.standards.HttpStatusCodes; +import com.att.nsa.drumlin.till.nv.rrNvReadable; +import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.metrics.impl.CdmRateTicker; + +/** + * class provide rate information + * + * @author anowarul.islam + * + */ +@Component +public class DMaaPCambriaLimiter { + private final HashMap fRateInfo; + private final HashMap fRateInfoCheck; + private final double fMaxEmptyPollsPerMinute; + private final double fMaxPollsPerMinute; + private final int fWindowLengthMins; + private final long fSleepMs; + private final long fSleepMs1; + private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPCambriaLimiter.class); + + /** + * constructor initializes + * + * @param settings + * @throws missingReqdSetting + * @throws invalidSettingValue + */ + @Autowired + public DMaaPCambriaLimiter(@Qualifier("propertyReader") rrNvReadable settings) { + fRateInfo = new HashMap<>(); + fRateInfoCheck = new HashMap<>(); + fMaxEmptyPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxEmptyPollsPerMinute, + CambriaConstants.kDefault_MaxEmptyPollsPerMinute); + fMaxPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxPollsPerMinute, + 30); + fWindowLengthMins = settings.getInt(CambriaConstants.kSetting_RateLimitWindowLength, + CambriaConstants.kDefault_RateLimitWindowLength); + fSleepMs = settings.getLong(CambriaConstants.kSetting_SleepMsOnRateLimit, + CambriaConstants.kDefault_SleepMsOnRateLimit); + fSleepMs1 = settings.getLong(CambriaConstants.kSetting_SleepMsRealOnRateLimit, + 5000); + + } + + /** + * Construct a rate limiter. + * + * @param maxEmptyPollsPerMinute + * Pass <= 0 to deactivate rate limiting. + * @param windowLengthMins + */ + public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, double maxPollsPerMinute,int windowLengthMins) { + this(maxEmptyPollsPerMinute,maxPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute),getSleepMsForRate(1)); + } + + /** + * Construct a rate limiter + * + * @param maxEmptyPollsPerMinute + * Pass <= 0 to deactivate rate limiting. + * @param sleepMs + * @param windowLengthMins + */ + public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute,double maxPollsPerMinute, int windowLengthMins, long sleepMs ,long sleepMS1) { + fRateInfo = new HashMap<>(); + fRateInfoCheck = new HashMap<>(); + fMaxEmptyPollsPerMinute = Math.max(0, maxEmptyPollsPerMinute); + fMaxPollsPerMinute = Math.max(0, maxPollsPerMinute); + fWindowLengthMins = windowLengthMins; + fSleepMs = Math.max(0, sleepMs); + fSleepMs1 = Math.max(0, sleepMS1); + } + + /** + * static method provide the sleep time + * + * @param ratePerMinute + * @return + */ + public static long getSleepMsForRate(double ratePerMinute) { + if (ratePerMinute <= 0.0) + return 0; + return Math.max(1000, Math.round(60 * 1000 / ratePerMinute)); + } + + /** + * Tell the rate limiter about a call to a topic/group/id. If the rate is + * too high, this call delays its return and throws an exception. + * + * @param topic + * @param consumerGroup + * @param clientId + * @throws CambriaApiException + */ + public void onCall(String topic, String consumerGroup, String clientId,String remoteHost) throws CambriaApiException { + // do nothing if rate is configured 0 or less + if (fMaxEmptyPollsPerMinute <= 0) { + return; + } + // setup rate info for this tuple + final RateInfo ri = getRateInfo(topic, consumerGroup, clientId); + final double rate = ri.onCall(); + log.info(ri.getLabel() + ": " + rate + " empty replies/minute."); + if (rate > fMaxEmptyPollsPerMinute) { + try { + log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxPollsPerMinute + + "."); + if (fSleepMs > 0) { + log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs + + " ms sleep, then responding in error."); + Thread.sleep(fSleepMs); + + } else { + log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error."); + } + } catch (InterruptedException e) { + log.error("Exception "+ e); + // ignore + } + + + ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests, + DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(), + "This client is making too many requests. Please use a long poll " + + "setting to decrease the number of requests that result in empty responses. ","",Utils.getFormattedDate(new Date()),topic,"","",consumerGroup+"/"+clientId,remoteHost); + + log.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + + + } + + /** + * + * @param topic + * @param consumerGroup + * @param clientId + * @param sentCount + */ + public void onSend(String topic, String consumerGroup, String clientId, long sentCount) { + // check for good replies + if (sentCount > 0) { + // that was a good send, reset the metric + getRateInfo(topic, consumerGroup, clientId).reset(); + } + } + + private static class RateInfo { + private final String fLabel; + private final CdmRateTicker fCallRateSinceLastMsgSend; + /** + * constructor initialzes + * + * @param label + * @param windowLengthMinutes + */ + public RateInfo(String label, int windowLengthMinutes) { + fLabel = label; + fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES, + windowLengthMinutes, TimeUnit.MINUTES); + } + + public String getLabel() { + return fLabel; + } + + /** + * CdmRateTicker is reset + */ + public void reset() { + fCallRateSinceLastMsgSend.reset(); + } + + /** + * + * @return + */ + public double onCall() { + fCallRateSinceLastMsgSend.tick(); + return fCallRateSinceLastMsgSend.getRate(); + } + } + + + + private static class RateInfoCheck { + + private final String fLabel; + private final CdmRateTicker fCallRateSinceLastMsgSend; + /** + * constructor initialzes + * + * @param label + * @param windowLengthMinutes + */ + public RateInfoCheck(String label, int windowLengthMinutes) { + fLabel = label; + fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES, + windowLengthMinutes, TimeUnit.MINUTES); + } + + public String getLabel() { + return fLabel; + } + + /** + * CdmRateTicker is reset + */ + public void reset() { + fCallRateSinceLastMsgSend.reset(); + } + + /** + * + * @return + */ + public double onCall() { + fCallRateSinceLastMsgSend.tick(); + return fCallRateSinceLastMsgSend.getRate(); + } + } + + + + + private RateInfo getRateInfo(String topic, String consumerGroup, String clientId) { + final String key = makeKey(topic, consumerGroup, clientId); + RateInfo ri = fRateInfo.get(key); + if (ri == null) { + ri = new RateInfo(key, fWindowLengthMins); + fRateInfo.put(key, ri); + } + return ri; + } + + + + + + + + private String makeKey(String topic, String group, String id) { + return topic + "::" + group + "::" + id; + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPContext.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPContext.java new file mode 100644 index 0000000..c4b6412 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPContext.java @@ -0,0 +1,104 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import java.text.SimpleDateFormat; +import java.util.Date; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpSession; + +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; + +/** + * DMaaPContext provide and maintain all the configurations , Http request/response + * Session and consumer Request Time + * @author nilanjana.maity + * + */ +public class DMaaPContext { + + private ConfigurationReader configReader; + private HttpServletRequest request; + private HttpServletResponse response; + private HttpSession session; + private String consumerRequestTime; + static int i=0; + + public synchronized static long getBatchID() { + try{ + final long metricsSendTime = System.currentTimeMillis(); + final Date d = new Date(metricsSendTime); + final String text = new SimpleDateFormat("ddMMyyyyHHmmss").format(d); + long dt= Long.valueOf(text)+i; + i++; + return dt; + } + catch(NumberFormatException ex){ + return 0; + } + } + + public HttpServletRequest getRequest() { + return request; + } + + public void setRequest(HttpServletRequest request) { + this.request = request; + } + + public HttpServletResponse getResponse() { + return response; + } + + public void setResponse(HttpServletResponse response) { + this.response = response; + } + + public HttpSession getSession() { + this.session = request.getSession(); + return session; + } + + public void setSession(HttpSession session) { + this.session = session; + } + + public ConfigurationReader getConfigReader() { + return configReader; + } + + public void setConfigReader(ConfigurationReader configReader) { + this.configReader = configReader; + } + + public String getConsumerRequestTime() { + return consumerRequestTime; + } + + public void setConsumerRequestTime(String consumerRequestTime) { + this.consumerRequestTime = consumerRequestTime; + } + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaConsumerFactory.java new file mode 100644 index 0000000..0a909ff --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaConsumerFactory.java @@ -0,0 +1,361 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Collection; +import java.util.HashMap; +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.recipes.locks.InterProcessMutex; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.springframework.beans.factory.annotation.Qualifier; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.dmf.mr.backends.kafka.Kafka011Consumer; +import org.onap.dmaap.dmf.mr.backends.kafka.Kafka011ConsumerUtil; +import org.onap.dmaap.dmf.mr.backends.kafka.KafkaConsumerCache; +import org.onap.dmaap.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException; +import org.onap.dmaap.dmf.mr.backends.kafka.KafkaLiveLockAvoider2; +import org.onap.dmaap.dmf.mr.backends.kafka.LiveLockAvoidance; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.Utils; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; + +/** + * @author nilanjana.maity + * + */ +public class DMaaPKafkaConsumerFactory implements ConsumerFactory { + + + private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class); + + + /** + * constructor initialization + * + * @param settings + * @param metrics + * @param curator + * @throws missingReqdSetting + * @throws KafkaConsumerCacheException + * @throws UnknownHostException + */ + + public DMaaPKafkaConsumerFactory(@Qualifier("dMaaPMetricsSet") MetricsSet metrics, + @Qualifier("curator") CuratorFramework curator, + @Qualifier("kafkalockavoid") KafkaLiveLockAvoider2 kafkaLiveLockAvoider) + throws missingReqdSetting, KafkaConsumerCacheException, UnknownHostException { + + String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + CambriaConstants.kSetting_ApiNodeIdentifier); + if (apiNodeId == null) { + + apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port; + } + + log.info("This Cambria API Node identifies itself as [" + apiNodeId + "]."); + final String mode = CambriaConstants.DMAAP; + + fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "kafka.metadata.broker.list"); + if (null == fkafkaBrokers) { + + fkafkaBrokers = "localhost:9092"; + } + + boolean kSetting_EnableCache = kDefault_IsCacheEnabled; + String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "cambria.consumer.cache.enabled"); + if (null != strkSetting_EnableCache) + kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache); + + final boolean isCacheEnabled = kSetting_EnableCache; + + + fCache = null; + if (isCacheEnabled) { + fCache = KafkaConsumerCache.getInstance(); + + } + if (fCache != null) { + fCache.setfMetrics(metrics); + fCache.setfApiId(apiNodeId); + fCache.startCache(mode, curator); + if(kafkaLiveLockAvoider!=null){ + kafkaLiveLockAvoider.startNewWatcherForServer(apiNodeId, makeAvoidanceCallback(apiNodeId)); + fkafkaLiveLockAvoider = kafkaLiveLockAvoider; + } + } + } + + /* + * getConsumerFor + * + * @see + * com.att.dmf.mr.backends.ConsumerFactory#getConsumerFor(java.lang.String, + * java.lang.String, java.lang.String, int, java.lang.String) This method is + * used by EventServiceImpl.getEvents() method to get a Kakfa consumer + * either from kafkaconsumer cache or create a new connection This also get + * the list of other consumer objects for the same consumer group and set to + * KafkaConsumer object. This list may be used during poll-rebalancing + * issue. + */ + @Override + public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs, + String remotehost) throws UnavailableException, CambriaApiException { + Kafka011Consumer kc; + + // To synchronize based on the consumer group. + + Object syncObject = synchash.get(topic + consumerGroupName); + if (null == syncObject) { + syncObject = new Object(); + synchash.put(topic + consumerGroupName, syncObject); + } + + synchronized (syncObject) { + try { + kc = (fCache != null) ? fCache.getConsumerFor(topic, consumerGroupName, consumerId) : null; // consumerId + + } catch (KafkaConsumerCacheException e) { + log.info("######@@@@### Error occured in Kafka Caching" + e + " " + topic + "::" + consumerGroupName + + "::" + consumerId); + log.error("####@@@@## Error occured in Kafka Caching" + e + " " + topic + "::" + consumerGroupName + + "::" + consumerId); + throw new UnavailableException(e); + } + + // Ideally if cache exists below flow should be skipped. If cache + // didnt + // exist, then create this first time on this node. + if (kc == null) { + + log.info("^Kafka consumer cache value " + topic + "::" + consumerGroupName + "::" + consumerId + " =>" + + kc); + + final InterProcessMutex ipLock = new InterProcessMutex(ConfigurationReader.getCurator(), + "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId); + boolean locked = false; + + try { + + locked = ipLock.acquire(30, TimeUnit.SECONDS); + if (!locked) { + + log.info("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic + + ", " + consumerGroupName + ", " + consumerId + ") from " + remotehost); + throw new UnavailableException( + "Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic + + ", " + consumerGroupName + ", " + consumerId + ") " + remotehost); + } + + // ConfigurationReader.getCurator().checkExists().forPath("S"). + + log.info("Creating Kafka consumer for group [" + consumerGroupName + "], consumer [" + consumerId + + "], on topic [" + topic + "]."); + + if (fCache != null) { + fCache.signalOwnership(topic, consumerGroupName, consumerId); + } + + final Properties props = createConsumerConfig(topic,consumerGroupName, consumerId); + long fCreateTimeMs = System.currentTimeMillis(); + KafkaConsumer cc = new KafkaConsumer<>(props); + kc = new Kafka011Consumer(topic, consumerGroupName, consumerId, cc, fkafkaLiveLockAvoider); + log.info(" kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs)); + + if (fCache != null) { + fCache.putConsumerFor(topic, consumerGroupName, consumerId, kc); // + } + + } catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) { + log.info( + "Kafka consumer couldn't connect to ZK. " + x + " " + consumerGroupName + "/" + consumerId); + throw new UnavailableException("Couldn't connect to ZK."); + } catch (KafkaConsumerCacheException e) { + log.info("Failed to cache consumer (this may have performance implications): " + e.getMessage() + + " " + consumerGroupName + "/" + consumerId); + } catch (UnavailableException u) { + log.info("Failed and in UnavailableException block " + u.getMessage() + " " + consumerGroupName + + "/" + consumerId); + throw new UnavailableException("Error while acquiring consumer factory lock " + u.getMessage(), u); + } catch (Exception e) { + log.info("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/" + + consumerId); + log.error("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/" + + consumerId); + + } finally { + if (locked) { + try { + ipLock.release(); + } catch (Exception e) { + throw new UnavailableException("Error while releasing consumer factory lock" + e, e); + } + } + } + } + } + return kc; + } + + @Override + public synchronized void destroyConsumer(String topic, String consumerGroup, String clientId) { + if (fCache != null) { + fCache.dropConsumer(topic, consumerGroup, clientId); + } + } + + @Override + public synchronized Collection getConsumers() { + return fCache.getConsumers(); + } + + @Override + public synchronized void dropCache() { + fCache.dropAllConsumers(); + } + + + private KafkaConsumerCache fCache; + private KafkaLiveLockAvoider2 fkafkaLiveLockAvoider; + private String fkafkaBrokers; + + + + private static String makeLongKey(String key, String prefix) { + return prefix + "." + key; + } + + private void transferSettingIfProvided(Properties target, String key, String prefix) { + String keyVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, makeLongKey(key, prefix)); + + + if (null != keyVal) { + + log.info("Setting [" + key + "] to " + keyVal + "."); + target.put(key, keyVal); + } + } + + /** + * Name CreateConsumerconfig + * @param topic + * @param groupId + * @param consumerId + * @return Properties + * + * This method is to create Properties required to create kafka connection + * Group name is replaced with different format groupid--topic to address same + * groupids for multiple topics. Same groupid with multiple topics + * may start frequent consumer rebalancing on all the topics . Replacing them makes it unique + */ + private Properties createConsumerConfig(String topic ,String groupId, String consumerId) { + final Properties props = new Properties(); + //fakeGroupName is added to avoid multiple consumer group for multiple topics.Donot Change this logic + //Fix for CPFMF-644 : + final String fakeGroupName = groupId + "--" + topic; + props.put("group.id", fakeGroupName); + props.put("enable.auto.commit", "false"); // 0.11 + props.put("bootstrap.servers", fkafkaBrokers); + if(Utils.isCadiEnabled()){ + props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';"); + props.put("security.protocol", "SASL_PLAINTEXT"); + props.put("sasl.mechanism", "PLAIN"); + } + props.put("client.id", consumerId); + + // additional settings: start with our defaults, then pull in configured + // overrides + populateKafkaInternalDefaultsMap(); + for (String key : KafkaConsumerKeys) { + transferSettingIfProvided(props, key, "kafka"); + } + + props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); + props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); + + return props; + } + + + private static final String KafkaConsumerKeys[] = { "bootstrap.servers", "heartbeat.interval.ms", + "auto.offset.reset", "exclude.internal.topics", "session.timeout.ms", "fetch.max.bytes", + "auto.commit.interval.ms", "connections.max.idle.ms", "fetch.min.bytes", "isolation.level", + "fetch.max.bytes", "request.timeout.ms", "fetch.max.wait.bytes", "reconnect.backoff.max.ms", + "max.partition.fetch.bytes", "reconnect.backoff.max.ms", "reconnect.backoff.ms", "retry.backoff.ms", + "max.poll.interval.ms", "max.poll.records", "receive.buffer.bytes", "metadata.max.age.ms" }; + + /** + * putting values in hashmap like consumer timeout, zookeeper time out, etc + * + * @param setting + */ + private static void populateKafkaInternalDefaultsMap() { } + + /* + * The starterIncremnt value is just to emulate calling certain consumers, + * in this test app all the consumers are local + * + */ + private LiveLockAvoidance makeAvoidanceCallback(final String appId) { + + return new LiveLockAvoidance() { + + @Override + public String getAppId() { + return appId; + } + + @Override + public void handleRebalanceUnlock(String groupName) { + log.info("FORCE A POLL NOW FOR appId: [{}] group: [{}]", getAppId(), groupName); + Kafka011ConsumerUtil.forcePollOnConsumer(groupName + "::"); + } + + }; + + } + + @SuppressWarnings("rawtypes") + @Override + public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs, + String remotehost) throws UnavailableException, CambriaApiException { + // TODO Auto-generated method stub + return null; + } + + private HashMap synchash = new HashMap(); + +} \ No newline at end of file diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaMetaBroker.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaMetaBroker.java new file mode 100644 index 0000000..a4ae2be --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPKafkaMetaBroker.java @@ -0,0 +1,495 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Properties; +import java.util.Set; +import java.util.TreeSet; +import java.util.concurrent.ExecutionException; + +import org.I0Itec.zkclient.ZkClient; +import org.I0Itec.zkclient.exception.ZkNoNodeException; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.admin.CreateTopicsResult; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.common.KafkaFuture; +import org.json.JSONObject; +import org.json.JSONArray; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Component; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.metabroker.Broker; +import org.onap.dmaap.dmf.mr.metabroker.Broker1; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.Utils; +//import org.apache.log4-j.Logger; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +import com.att.nsa.configs.ConfigDb; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.configs.ConfigPath; +import com.att.nsa.drumlin.service.standards.HttpStatusCodes; +import com.att.nsa.drumlin.till.nv.rrNvReadable; +import com.att.nsa.security.NsaAcl; +import com.att.nsa.security.NsaAclUtils; +import com.att.nsa.security.NsaApiKey; + + +/** + * class performing all topic operations + * + * @author anowarul.islam + * + */ +//@Component +public class DMaaPKafkaMetaBroker implements Broker1 { + + public DMaaPKafkaMetaBroker() { + fZk = null; + fCambriaConfig = null; + fBaseTopicData = null; + final Properties props = new Properties (); + String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "kafka.metadata.broker.list"); + if (null == fkafkaBrokers) { + + fkafkaBrokers = "localhost:9092"; + } + + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers ); + if(Utils.isCadiEnabled()){ + props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';"); + props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); + props.put("sasl.mechanism", "PLAIN"); + } + + fKafkaAdminClient=AdminClient.create ( props ); + + } + + //private static final Logger log = Logger.getLogger(DMaaPKafkaMetaBroker.class); + private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class); + private final AdminClient fKafkaAdminClient; + + + + /** + * DMaaPKafkaMetaBroker constructor initializing + * + * @param settings + * @param zk + * @param configDb + */ + public DMaaPKafkaMetaBroker(@Qualifier("propertyReader") rrNvReadable settings, + @Qualifier("dMaaPZkClient") ZkClient zk, @Qualifier("dMaaPZkConfigDb") ConfigDb configDb) { + //fSettings = settings; + fZk = zk; + fCambriaConfig = configDb; + fBaseTopicData = configDb.parse("/topics"); + final Properties props = new Properties (); + String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "kafka.metadata.broker.list"); + if (null == fkafkaBrokers) { + + fkafkaBrokers = "localhost:9092"; + } + + if(Utils.isCadiEnabled()){ + props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';"); + props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); + props.put("sasl.mechanism", "PLAIN"); + } + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers ); + + fKafkaAdminClient=AdminClient.create ( props ); + + + + } + + public DMaaPKafkaMetaBroker( rrNvReadable settings, + ZkClient zk, ConfigDb configDb,AdminClient client) { + + fZk = zk; + fCambriaConfig = configDb; + fBaseTopicData = configDb.parse("/topics"); + fKafkaAdminClient= client; + + + + } + + @Override + public List getAllTopics() throws ConfigDbException { + log.info("Retrieving list of all the topics."); + final LinkedList result = new LinkedList(); + try { + log.info("Retrieving all topics from root: " + zkTopicsRoot); + final List topics = fZk.getChildren(zkTopicsRoot); + for (String topic : topics) { + result.add(new KafkaTopic(topic, fCambriaConfig, fBaseTopicData)); + } + JSONObject dataObj = new JSONObject(); + dataObj.put("topics", new JSONObject()); + + for (String topic : topics) { + dataObj.getJSONObject("topics").put(topic, new JSONObject()); + } + } catch (ZkNoNodeException excp) { + // very fresh kafka doesn't have any topics or a topics node + log.error("ZK doesn't have a Kakfa topics node at " + zkTopicsRoot, excp); + } + return result; + } + + @Override + public Topic getTopic(String topic) throws ConfigDbException { + if (fZk.exists(zkTopicsRoot + "/" + topic)) { + return getKafkaTopicConfig(fCambriaConfig, fBaseTopicData, topic); + } + // else: no such topic in kafka + return null; + } + + /** + * static method get KafkaTopic object + * + * @param db + * @param base + * @param topic + * @return + * @throws ConfigDbException + */ + public static KafkaTopic getKafkaTopicConfig(ConfigDb db, ConfigPath base, String topic) throws ConfigDbException { + return new KafkaTopic(topic, db, base); + } + + /** + * creating topic + */ + @Override + public Topic createTopic(String topic, String desc, String ownerApiKey, int partitions, int replicas, + boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException { + log.info("Creating topic: " + topic); + try { + log.info("Check if topic [" + topic + "] exist."); + // first check for existence "our way" + final Topic t = getTopic(topic); + if (t != null) { + log.info("Could not create topic [" + topic + "]. Topic Already exists."); + throw new TopicExistsException("Could not create topic [" + topic + "]. Topic Alreay exists."); + } + } catch (ConfigDbException e1) { + log.error("Topic [" + topic + "] could not be created. Couldn't check topic data in config db.", e1); + throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable, + "Couldn't check topic data in config db."); + } + + // we only allow 3 replicas. (If we don't test this, we get weird + // results from the cluster, + // so explicit test and fail.) + if (replicas < 1 || replicas > 3) { + log.info("Topic [" + topic + "] could not be created. The replica count must be between 1 and 3."); + throw new CambriaApiException(HttpStatusCodes.k400_badRequest, + "The replica count must be between 1 and 3."); + } + if (partitions < 1) { + log.info("Topic [" + topic + "] could not be created. The partition count must be at least 1."); + throw new CambriaApiException(HttpStatusCodes.k400_badRequest, "The partition count must be at least 1."); + } + + // create via kafka + + try + { + final NewTopic topicRequest = new NewTopic ( topic, partitions, new Integer(replicas).shortValue () ); + final CreateTopicsResult ctr = fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) ); + final KafkaFuture ctrResult = ctr.all (); + ctrResult.get (); + // underlying Kafka topic created. now setup our API info + return createTopicEntry ( topic, desc, ownerApiKey, transactionEnabled ); + } + catch ( InterruptedException e ) + { + + log.warn ( "Execution of describeTopics timed out." ); + throw new ConfigDbException ( e ); + } + catch ( ExecutionException e ) + { + + log.warn ( "Execution of describeTopics failed: " + e.getCause ().getMessage (), e.getCause () ); + throw new ConfigDbException ( e.getCause () ); + } + + } + + @Override + public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException,ConfigDbException { + log.info("Deleting topic: " + topic); + ZkClient zkClient = null; + try { + log.info("Loading zookeeper client for topic deletion."); + // topic creation. (Otherwise, the topic is only partially created + // in ZK.) + + + fKafkaAdminClient.deleteTopics(Arrays.asList(topic)); + log.info("Zookeeper client loaded successfully. Deleting topic."); + + } catch (Exception e) { + log.error("Failed to delete topic [" + topic + "]. " + e.getMessage(), e); + throw new ConfigDbException(e); + } finally { + log.info("Closing zookeeper connection."); + if (zkClient != null) + zkClient.close(); + } + + // throw new UnsupportedOperationException ( "We can't programmatically + // delete Kafka topics yet." ); + } + + //private final rrNvReadable fSettings; + private final ZkClient fZk; + private final ConfigDb fCambriaConfig; + private final ConfigPath fBaseTopicData; + + private static final String zkTopicsRoot = "/brokers/topics"; + private static final JSONObject kEmptyAcl = new JSONObject(); + + /** + * method Providing KafkaTopic Object associated with owner and + * transactionenabled or not + * + * @param name + * @param desc + * @param owner + * @param transactionEnabled + * @return + * @throws ConfigDbException + */ + public KafkaTopic createTopicEntry(String name, String desc, String owner, boolean transactionEnabled) + throws ConfigDbException { + return createTopicEntry(fCambriaConfig, fBaseTopicData, name, desc, owner, transactionEnabled); + } + + /** + * static method giving kafka topic object + * + * @param db + * @param basePath + * @param name + * @param desc + * @param owner + * @param transactionEnabled + * @return + * @throws ConfigDbException + */ + public static KafkaTopic createTopicEntry(ConfigDb db, ConfigPath basePath, String name, String desc, String owner, + boolean transactionEnabled) throws ConfigDbException { + final JSONObject o = new JSONObject(); + o.put("owner", owner); + o.put("description", desc); + o.put("txenabled", transactionEnabled); + db.store(basePath.getChild(name), o.toString()); + return new KafkaTopic(name, db, basePath); + } + + /** + * class performing all user opearation like user is eligible to read, + * write. permitting a user to write and read, + * + * @author anowarul.islam + * + */ + public static class KafkaTopic implements Topic { + /** + * constructor initializes + * + * @param name + * @param configdb + * @param baseTopic + * @throws ConfigDbException + */ + public KafkaTopic(String name, ConfigDb configdb, ConfigPath baseTopic) throws ConfigDbException { + fName = name; + fConfigDb = configdb; + fBaseTopicData = baseTopic; + + String data = fConfigDb.load(fBaseTopicData.getChild(fName)); + if (data == null) { + data = "{}"; + } + + final JSONObject o = new JSONObject(data); + fOwner = o.optString("owner", ""); + fDesc = o.optString("description", ""); + fTransactionEnabled = o.optBoolean("txenabled", false);// default + // value is + // false + // if this topic has an owner, it needs both read/write ACLs. If there's no + // owner (or it's empty), null is okay -- this is for existing or implicitly + // created topics. + JSONObject readers = o.optJSONObject ( "readers" ); + if ( readers == null && fOwner.length () > 0 ) readers = kEmptyAcl; + fReaders = fromJson ( readers ); + + JSONObject writers = o.optJSONObject ( "writers" ); + if ( writers == null && fOwner.length () > 0 ) writers = kEmptyAcl; + fWriters = fromJson ( writers ); + } + + private NsaAcl fromJson(JSONObject o) { + NsaAcl acl = new NsaAcl(); + if (o != null) { + JSONArray a = o.optJSONArray("allowed"); + if (a != null) { + for (int i = 0; i < a.length(); ++i) { + String user = a.getString(i); + acl.add(user); + } + } + } + return acl; + } + + @Override + public String getName() { + return fName; + } + + @Override + public String getOwner() { + return fOwner; + } + + @Override + public String getDescription() { + return fDesc; + } + + @Override + public NsaAcl getReaderAcl() { + return fReaders; + } + + @Override + public NsaAcl getWriterAcl() { + return fWriters; + } + + @Override + public void checkUserRead(NsaApiKey user) throws AccessDeniedException { + NsaAclUtils.checkUserAccess ( fOwner, getReaderAcl(), user ); + } + + @Override + public void checkUserWrite(NsaApiKey user) throws AccessDeniedException { + NsaAclUtils.checkUserAccess ( fOwner, getWriterAcl(), user ); + } + + @Override + public void permitWritesFromUser(String pubId, NsaApiKey asUser) + throws ConfigDbException, AccessDeniedException { + updateAcl(asUser, false, true, pubId); + } + + @Override + public void denyWritesFromUser(String pubId, NsaApiKey asUser) throws ConfigDbException, AccessDeniedException { + updateAcl(asUser, false, false, pubId); + } + + @Override + public void permitReadsByUser(String consumerId, NsaApiKey asUser) + throws ConfigDbException, AccessDeniedException { + updateAcl(asUser, true, true, consumerId); + } + + @Override + public void denyReadsByUser(String consumerId, NsaApiKey asUser) + throws ConfigDbException, AccessDeniedException { + updateAcl(asUser, true, false, consumerId); + } + + private void updateAcl(NsaApiKey asUser, boolean reader, boolean add, String key) + throws ConfigDbException, AccessDeniedException{ + try + { + final NsaAcl acl = NsaAclUtils.updateAcl ( this, asUser, key, reader, add ); + + // we have to assume we have current data, or load it again. for the expected use + // case, assuming we can overwrite the data is fine. + final JSONObject o = new JSONObject (); + o.put ( "owner", fOwner ); + o.put ( "readers", safeSerialize ( reader ? acl : fReaders ) ); + o.put ( "writers", safeSerialize ( reader ? fWriters : acl ) ); + fConfigDb.store ( fBaseTopicData.getChild ( fName ), o.toString () ); + + log.info ( "ACL_UPDATE: " + asUser.getKey () + " " + ( add ? "added" : "removed" ) + ( reader?"subscriber":"publisher" ) + " " + key + " on " + fName ); + + } + catch ( ConfigDbException x ) + { + throw x; + } + catch ( AccessDeniedException x ) + { + throw x; + } + + } + + private JSONObject safeSerialize(NsaAcl acl) { + return acl == null ? null : acl.serialize(); + } + + private final String fName; + private final ConfigDb fConfigDb; + private final ConfigPath fBaseTopicData; + private final String fOwner; + private final String fDesc; + private final NsaAcl fReaders; + private final NsaAcl fWriters; + private boolean fTransactionEnabled; + + public boolean isTransactionEnabled() { + return fTransactionEnabled; + } + + @Override + public Set getOwners() { + final TreeSet owners = new TreeSet (); + owners.add ( fOwner ); + return owners; + } + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPMetricsSet.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPMetricsSet.java new file mode 100644 index 0000000..0226b9b --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPMetricsSet.java @@ -0,0 +1,231 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.HashMap; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import org.onap.dmaap.dmf.mr.CambriaApiVersionInfo; +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.mr.apiServer.metrics.cambria.DMaaPMetricsSender; +import com.att.nsa.drumlin.till.nv.rrNvReadable; +import com.att.nsa.metrics.impl.CdmConstant; +import com.att.nsa.metrics.impl.CdmCounter; +import com.att.nsa.metrics.impl.CdmMetricsRegistryImpl; +import com.att.nsa.metrics.impl.CdmMovingAverage; +import com.att.nsa.metrics.impl.CdmRateTicker; +import com.att.nsa.metrics.impl.CdmSimpleMetric; +import com.att.nsa.metrics.impl.CdmStringConstant; +import com.att.nsa.metrics.impl.CdmTimeSince; + +/*@Component("dMaaPMetricsSet")*/ +/** + * Metrics related information + * + * @author anowarul.islam + * + */ +public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSet { + + private final CdmStringConstant fVersion; + private final CdmConstant fStartTime; + private final CdmTimeSince fUpTime; + + private final CdmCounter fRecvTotal; + private final CdmRateTicker fRecvEpsInstant; + private final CdmRateTicker fRecvEpsShort; + private final CdmRateTicker fRecvEpsLong; + + private final CdmCounter fSendTotal; + private final CdmRateTicker fSendEpsInstant; + private final CdmRateTicker fSendEpsShort; + private final CdmRateTicker fSendEpsLong; + + private final CdmCounter fKafkaConsumerCacheMiss; + private final CdmCounter fKafkaConsumerCacheHit; + + private final CdmCounter fKafkaConsumerClaimed; + private final CdmCounter fKafkaConsumerTimeout; + + private final CdmSimpleMetric fFanOutRatio; + + private final HashMap fPathUseRates; + private final HashMap fPathAvgs; + + private rrNvReadable fSettings; + + private final ScheduledExecutorService fScheduler; + + /** + * Constructor initialization + * + * @param cs + */ + + public DMaaPMetricsSet(rrNvReadable cs) { + + fVersion = new CdmStringConstant("Version " + CambriaApiVersionInfo.getVersion()); + super.putItem("version", fVersion); + + final long startTime = System.currentTimeMillis(); + final Date d = new Date(startTime); + final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d); + fStartTime = new CdmConstant(startTime / 1000, "Start Time (epoch); " + text); + super.putItem("startTime", fStartTime); + + fUpTime = new CdmTimeSince("seconds since start"); + super.putItem("upTime", fUpTime); + + fRecvTotal = new CdmCounter("Total events received since start"); + super.putItem("recvTotalEvents", fRecvTotal); + + fRecvEpsInstant = new CdmRateTicker("recv eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES); + super.putItem("recvEpsInstant", fRecvEpsInstant); + + fRecvEpsShort = new CdmRateTicker("recv eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES); + super.putItem("recvEpsShort", fRecvEpsShort); + + fRecvEpsLong = new CdmRateTicker("recv eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS); + super.putItem("recvEpsLong", fRecvEpsLong); + + fSendTotal = new CdmCounter("Total events sent since start"); + super.putItem("sendTotalEvents", fSendTotal); + + fSendEpsInstant = new CdmRateTicker("send eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES); + super.putItem("sendEpsInstant", fSendEpsInstant); + + fSendEpsShort = new CdmRateTicker("send eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES); + super.putItem("sendEpsShort", fSendEpsShort); + + fSendEpsLong = new CdmRateTicker("send eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS); + super.putItem("sendEpsLong", fSendEpsLong); + + fKafkaConsumerCacheMiss = new CdmCounter("Kafka Consumer Cache Misses"); + super.putItem("kafkaConsumerCacheMiss", fKafkaConsumerCacheMiss); + + fKafkaConsumerCacheHit = new CdmCounter("Kafka Consumer Cache Hits"); + super.putItem("kafkaConsumerCacheHit", fKafkaConsumerCacheHit); + + fKafkaConsumerClaimed = new CdmCounter("Kafka Consumers Claimed"); + super.putItem("kafkaConsumerClaims", fKafkaConsumerClaimed); + + fKafkaConsumerTimeout = new CdmCounter("Kafka Consumers Timedout"); + super.putItem("kafkaConsumerTimeouts", fKafkaConsumerTimeout); + + // FIXME: CdmLevel is not exactly a great choice + fFanOutRatio = new CdmSimpleMetric() { + @Override + public String getRawValueString() { + return getRawValue().toString(); + } + + @Override + public Number getRawValue() { + final double s = fSendTotal.getValue(); + final double r = fRecvTotal.getValue(); + return r == 0.0 ? 0.0 : s / r; + } + + @Override + public String summarize() { + return getRawValueString() + " sends per recv"; + } + + }; + super.putItem("fanOut", fFanOutRatio); + + // these are added to the metrics catalog as they're discovered + fPathUseRates = new HashMap(); + fPathAvgs = new HashMap(); + + fScheduler = Executors.newScheduledThreadPool(1); + } + + @Override + public void setupCambriaSender() { + DMaaPMetricsSender.sendPeriodically(fScheduler, this, "cambria.apinode.metrics.dmaap"); + } + + @Override + public void onRouteComplete(String name, long durationMs) { + CdmRateTicker ticker = fPathUseRates.get(name); + if (ticker == null) { + ticker = new CdmRateTicker("calls/min on path " + name + "", 1, TimeUnit.MINUTES, 1, TimeUnit.HOURS); + fPathUseRates.put(name, ticker); + super.putItem("pathUse_" + name, ticker); + } + ticker.tick(); + + CdmMovingAverage durs = fPathAvgs.get(name); + if (durs == null) { + durs = new CdmMovingAverage("ms avg duration on path " + name + ", last 10 minutes", 10, TimeUnit.MINUTES); + fPathAvgs.put(name, durs); + super.putItem("pathDurationMs_" + name, durs); + } + durs.tick(durationMs); + } + + @Override + public void publishTick(int amount) { + if (amount > 0) { + fRecvTotal.bumpBy(amount); + fRecvEpsInstant.tick(amount); + fRecvEpsShort.tick(amount); + fRecvEpsLong.tick(amount); + } + } + + @Override + public void consumeTick(int amount) { + if (amount > 0) { + fSendTotal.bumpBy(amount); + fSendEpsInstant.tick(amount); + fSendEpsShort.tick(amount); + fSendEpsLong.tick(amount); + } + } + + @Override + public void onKafkaConsumerCacheMiss() { + fKafkaConsumerCacheMiss.bump(); + } + + @Override + public void onKafkaConsumerCacheHit() { + fKafkaConsumerCacheHit.bump(); + } + + @Override + public void onKafkaConsumerClaimed() { + fKafkaConsumerClaimed.bump(); + } + + @Override + public void onKafkaConsumerTimeout() { + fKafkaConsumerTimeout.bump(); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPNsaApiDb.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPNsaApiDb.java new file mode 100644 index 0000000..8813f24 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPNsaApiDb.java @@ -0,0 +1,140 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import java.security.Key; + + +import org.springframework.beans.factory.annotation.Autowired; + +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDb; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.configs.confimpl.EncryptingLayer; +import com.att.nsa.drumlin.till.nv.rrNvReadable; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.security.db.BaseNsaApiDbImpl; +import com.att.nsa.security.db.EncryptingApiDbImpl; +import com.att.nsa.security.db.NsaApiDb; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; +import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory; +import com.att.nsa.util.rrConvertor; + +/** + * + * @author anowarul.islam + * + */ +public class DMaaPNsaApiDb { + + + private DMaaPZkConfigDb cdb; + + //private static final Logger log = Logger + + private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPNsaApiDb.class); + +/** + * + * Constructor initialized + * @param settings + * @param cdb + */ + @Autowired + public DMaaPNsaApiDb(rrNvReadable settings, DMaaPZkConfigDb cdb) { + + this.setCdb(cdb); + } + /** + * + * @param settings + * @param cdb + * @return + * @throws ConfigDbException + * @throws missingReqdSetting + */ + public static NsaApiDb buildApiKeyDb( + rrNvReadable settings, ConfigDb cdb) throws ConfigDbException, + missingReqdSetting { + // Cambria uses an encrypted api key db + + + final String keyBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.key"); + + + + final String initVectorBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.iv"); + // if neither value was provided, don't encrypt api key db + if (keyBase64 == null && initVectorBase64 == null) { + log.info("This server is configured to use an unencrypted API key database. See the settings documentation."); + return new BaseNsaApiDbImpl<>(cdb, + new NsaSimpleApiKeyFactory()); + } else if (keyBase64 == null) { + // neither or both, otherwise something's goofed + throw new missingReqdSetting("cambria.secureConfig.key"); + } else if (initVectorBase64 == null) { + // neither or both, otherwise something's goofed + throw new missingReqdSetting("cambria.secureConfig.iv"); + } else { + log.info("This server is configured to use an encrypted API key database."); + final Key key = EncryptingLayer.readSecretKey(keyBase64); + final byte[] iv = rrConvertor.base64Decode(initVectorBase64); + return new EncryptingApiDbImpl<>(cdb, + new NsaSimpleApiKeyFactory(), key, iv); + } + } + + /** + * @return + * returns settings + */ + + + + + /** + * @param settings + * set settings + */ + + + + + /** + * @return + * returns cbd + */ + public DMaaPZkConfigDb getCdb() { + return cdb; + } + /** + * @param cdb + * set cdb + */ + public void setCdb(DMaaPZkConfigDb cdb) { + this.cdb = cdb; + } + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkClient.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkClient.java new file mode 100644 index 0000000..7f6a8fe --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkClient.java @@ -0,0 +1,45 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import org.I0Itec.zkclient.ZkClient; +import org.springframework.beans.factory.annotation.Qualifier; + +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import com.att.nsa.drumlin.till.nv.rrNvReadable; + +/** + * Created for Zookeeper client which will read configuration and settings parameter + * @author nilanjana.maity + * + */ +public class DMaaPZkClient extends ZkClient { + + /** + * This constructor will get the settings value from rrNvReadable + * and ConfigurationReader's zookeeper connection + * @param settings + */ + public DMaaPZkClient(@Qualifier("propertyReader") rrNvReadable settings) { + super(ConfigurationReader.getMainZookeeperConnectionString()); + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkConfigDb.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkConfigDb.java new file mode 100644 index 0000000..ff0b136 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/DMaaPZkConfigDb.java @@ -0,0 +1,51 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import org.springframework.beans.factory.annotation.Qualifier; + +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import com.att.nsa.configs.confimpl.ZkConfigDb; +import com.att.nsa.drumlin.till.nv.rrNvReadable; + +/** + * Provide the zookeeper config db connection + * @author nilanjana.maity + * + */ +public class DMaaPZkConfigDb extends ZkConfigDb { + /** + * This Constructor will provide the configuration details from the property reader + * and DMaaPZkClient + * @param zk + * @param settings + */ + public DMaaPZkConfigDb(@Qualifier("dMaaPZkClient") DMaaPZkClient zk, + @Qualifier("propertyReader") rrNvReadable settings) { + + + super(ConfigurationReader.getMainZookeeperConnectionString(),ConfigurationReader.getMainZookeeperConnectionSRoot()); + + } + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/LogDetails.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/LogDetails.java new file mode 100644 index 0000000..b10cda6 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/LogDetails.java @@ -0,0 +1,214 @@ +/** + * + */ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import java.util.Date; + +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.utils.Utils; + +/** + * @author muzainulhaque.qazi + * + */ + +public class LogDetails { + + private String publisherId; + private String topicId; + private String subscriberGroupId; + private String subscriberId; + private String publisherIp; + private String messageBatchId; + private String messageSequence; + private String messageTimestamp; + private String consumeTimestamp; + private String transactionIdTs; + private String serverIp; + + private long messageLengthInBytes; + private long totalMessageCount; + + private boolean transactionEnabled; + /** + * This is for transaction enabled logging details + * + */ + public LogDetails() { + super(); + } + + public String getTransactionId() { + StringBuilder transactionId = new StringBuilder(); + transactionId.append(transactionIdTs); + transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR); + transactionId.append(publisherIp); + transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR); + transactionId.append(messageBatchId); + transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR); + transactionId.append(messageSequence); + + return transactionId.toString(); + } + + public String getPublisherId() { + return publisherId; + } + + public void setPublisherId(String publisherId) { + this.publisherId = publisherId; + } + + public String getTopicId() { + return topicId; + } + + public void setTopicId(String topicId) { + this.topicId = topicId; + } + + public String getSubscriberGroupId() { + return subscriberGroupId; + } + + public void setSubscriberGroupId(String subscriberGroupId) { + this.subscriberGroupId = subscriberGroupId; + } + + public String getSubscriberId() { + return subscriberId; + } + + public void setSubscriberId(String subscriberId) { + this.subscriberId = subscriberId; + } + + public String getPublisherIp() { + return publisherIp; + } + + public void setPublisherIp(String publisherIp) { + this.publisherIp = publisherIp; + } + + public String getMessageBatchId() { + return messageBatchId; + } + + public void setMessageBatchId(Long messageBatchId) { + this.messageBatchId = Utils.getFromattedBatchSequenceId(messageBatchId); + } + + public String getMessageSequence() { + return messageSequence; + } + + public void setMessageSequence(String messageSequence) { + this.messageSequence = messageSequence; + } + + public String getMessageTimestamp() { + return messageTimestamp; + } + + public void setMessageTimestamp(String messageTimestamp) { + this.messageTimestamp = messageTimestamp; + } + + public String getPublishTimestamp() { + return Utils.getFormattedDate(new Date()); + } + + public String getConsumeTimestamp() { + return consumeTimestamp; + } + + public void setConsumeTimestamp(String consumeTimestamp) { + this.consumeTimestamp = consumeTimestamp; + } + + public long getMessageLengthInBytes() { + return messageLengthInBytes; + } + + public void setMessageLengthInBytes(long messageLengthInBytes) { + this.messageLengthInBytes = messageLengthInBytes; + } + + public long getTotalMessageCount() { + return totalMessageCount; + } + + public void setTotalMessageCount(long totalMessageCount) { + this.totalMessageCount = totalMessageCount; + } + + public boolean isTransactionEnabled() { + return transactionEnabled; + } + + public void setTransactionEnabled(boolean transactionEnabled) { + this.transactionEnabled = transactionEnabled; + } + + public String getTransactionIdTs() { + return transactionIdTs; + } + + public void setTransactionIdTs(String transactionIdTs) { + this.transactionIdTs = transactionIdTs; + } + + public String getPublisherLogDetails() { + + StringBuilder buffer = new StringBuilder(); + buffer.append("[publisherId=" + publisherId); + buffer.append(", topicId=" + topicId); + buffer.append(", messageTimestamp=" + messageTimestamp); + buffer.append(", publisherIp=" + publisherIp); + buffer.append(", messageBatchId=" + messageBatchId); + buffer.append(", messageSequence=" + messageSequence ); + buffer.append(", messageLengthInBytes=" + messageLengthInBytes); + buffer.append(", transactionEnabled=" + transactionEnabled); + buffer.append(", transactionId=" + getTransactionId()); + buffer.append(", publishTimestamp=" + getPublishTimestamp()); + buffer.append(", serverIp=" + getServerIp()+"]"); + return buffer.toString(); + + } + + public String getServerIp() { + return serverIp; + } + + public void setServerIp(String serverIp) { + this.serverIp = serverIp; + } + + public void setMessageBatchId(String messageBatchId) { + this.messageBatchId = messageBatchId; + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/beans/TopicBean.java b/src/main/java/org/onap/dmaap/dmf/mr/beans/TopicBean.java new file mode 100644 index 0000000..127466b --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/beans/TopicBean.java @@ -0,0 +1,155 @@ +/** + * + */ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.beans; + +import java.io.Serializable; + +import javax.xml.bind.annotation.XmlRootElement; + +/** + * @author muzainulhaque.qazi + * + */ +@XmlRootElement +public class TopicBean implements Serializable { + + private static final long serialVersionUID = -8620390377775457949L; + private String topicName; + private String topicDescription; + + private int partitionCount; + private int replicationCount; + + private boolean transactionEnabled; + + /** + * constructor + */ + public TopicBean() { + super(); + } + + /** + * constructor initialization with topic details name, description, + * partition, replication, transaction + * + * @param topicName + * @param description + * @param partitionCount + * @param replicationCount + * @param transactionEnabled + */ + public TopicBean(String topicName, String topicDescription, int partitionCount, int replicationCount, + boolean transactionEnabled) { + super(); + this.topicName = topicName; + this.topicDescription = topicDescription; + this.partitionCount = partitionCount; + this.replicationCount = replicationCount; + this.transactionEnabled = transactionEnabled; + } + + /** + * @return + * returns topic name which is of String type + */ + public String getTopicName() { + return topicName; + } + + /** + * @param topicName + * set topic name + */ + public void setTopicName(String topicName) { + this.topicName = topicName; + } + + + /** + * @return + * returns partition count which is of int type + */ + public int getPartitionCount() { + return partitionCount; + } + + /** + * @param partitionCount + * set partition Count + */ + public void setPartitionCount(int partitionCount) { + this.partitionCount = partitionCount; + } + + /** + * @return + * returns replication count which is of int type + */ + public int getReplicationCount() { + return replicationCount; + } + + /** + * @param + * set replication count which is of int type + */ + public void setReplicationCount(int replicationCount) { + this.replicationCount = replicationCount; + } + + /** + * @return + * returns boolean value which indicates whether transaction is Enabled + */ + public boolean isTransactionEnabled() { + return transactionEnabled; + } + + /** + * @param + * sets boolean value which indicates whether transaction is Enabled + */ + public void setTransactionEnabled(boolean transactionEnabled) { + this.transactionEnabled = transactionEnabled; + } + + /** + * + * @return returns description which is of String type + */ + public String getTopicDescription() { + return topicDescription; + } + /** + * + * @param topicDescription + * set description which is of String type + */ + public void setTopicDescription(String topicDescription) { + this.topicDescription = topicDescription; + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java b/src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java new file mode 100644 index 0000000..fd342b7 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/constants/CambriaConstants.java @@ -0,0 +1,126 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.constants; + +import org.onap.dmaap.dmf.mr.utils.Utils; + +/** + * This is the constant files for all the property or parameters. + * @author nilanjana.maity + * + */ +public interface CambriaConstants { + + String CAMBRIA = "Cambria"; + String DMAAP = "DMaaP"; + + String kDefault_ZkRoot = "/fe3c/cambria"; + + String kSetting_ZkConfigDbRoot = "config.zk.root"; + String kDefault_ZkConfigDbRoot = kDefault_ZkRoot + "/config"; +String msgRtr_prop="MsgRtrApi.properties"; + String kBrokerType = "broker.type"; + + /** + * value to use to signal kafka broker type. + */ + String kBrokerType_Kafka = "kafka"; + String kBrokerType_Memory = "memory"; + String kSetting_AdminSecret = "authentication.adminSecret"; + + String kSetting_ApiNodeIdentifier = "cambria.api.node.identifier"; + + /** + * value to use to signal max empty poll per minute + */ + String kSetting_MaxEmptyPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute"; + String kSetting_MaxPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute"; + double kDefault_MaxEmptyPollsPerMinute = 10.0; + + String kSetting_SleepMsOnRateLimit = "cambria.rateLimit.delay.ms"; + String kSetting_SleepMsRealOnRateLimit = "cambria.rateLimitActual.delay.ms"; + long kDefault_SleepMsOnRateLimit = Utils.getSleepMsForRate ( kDefault_MaxEmptyPollsPerMinute ); + + String kSetting_RateLimitWindowLength = "cambria.rateLimit.window.minutes"; + int kDefault_RateLimitWindowLength = 5; + + String kConfig = "c"; + + String kSetting_Port = "cambria.service.port"; + /** + * value to use to signal default port + */ + int kDefault_Port = 3904; + + String kSetting_MaxThreads = "tomcat.maxthreads"; + int kDefault_MaxThreads = -1; + + + + //String kDefault_TomcatProtocolClass = Http11NioProtocol.class.getName (); + + String kSetting_ZkConfigDbServers = "config.zk.servers"; + + /** + * value to indicate localhost port number + */ + String kDefault_ZkConfigDbServers = "localhost:2181"; + + /** + * value to use to signal Session time out + */ + String kSetting_ZkSessionTimeoutMs = "cambria.consumer.cache.zkSessionTimeout"; + int kDefault_ZkSessionTimeoutMs = 20 * 1000; + + /** + * value to use to signal connection time out + */ + String kSetting_ZkConnectionTimeoutMs = "cambria.consumer.cache.zkConnectionTimeout"; + int kDefault_ZkConnectionTimeoutMs = 5 * 1000; + + String TRANSACTION_ID_SEPARATOR = "::"; + + /** + * value to use to signal there's no timeout on the consumer request. + */ + public static final int kNoTimeout = 10000; + + /** + * value to use to signal no limit in the number of messages returned. + */ + public static final int kNoLimit = 0; + + /** + * value to use to signal that the caller wants the next set of events + */ + public static final int kNextOffset = -1; + + /** + * value to use to signal there's no filter on the response stream. + */ + public static final String kNoFilter = ""; + + //Added for Metric publish + public static final int kStdCambriaServicePort = 3904; + public static final String kBasePath = "/events/"; + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPAccessDeniedException.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPAccessDeniedException.java new file mode 100644 index 0000000..c70190f --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPAccessDeniedException.java @@ -0,0 +1,42 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.exception; + +import org.onap.dmaap.dmf.mr.CambriaApiException; + +public class DMaaPAccessDeniedException extends CambriaApiException{ + + + + public DMaaPAccessDeniedException(ErrorResponse errRes) { + super(errRes); + + } + + /** + * + */ + private static final long serialVersionUID = 1L; + + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPCambriaExceptionMapper.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPCambriaExceptionMapper.java new file mode 100644 index 0000000..4e03a58 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPCambriaExceptionMapper.java @@ -0,0 +1,94 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.exception; + +import javax.inject.Singleton; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.ext.ExceptionMapper; +import javax.ws.rs.ext.Provider; + +import org.apache.http.HttpStatus; +import org.springframework.beans.factory.annotation.Autowired; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * Exception Mapper class to handle + * CambriaApiException + * @author rajashree.khare + * + */ +@Provider +@Singleton +public class DMaaPCambriaExceptionMapper implements ExceptionMapper{ + +private ErrorResponse errRes; + + +private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPCambriaExceptionMapper.class); + + @Autowired + private DMaaPErrorMessages msgs; + + public DMaaPCambriaExceptionMapper() { + super(); + LOGGER.info("Cambria Exception Mapper Created.."); + } + + @Override + public Response toResponse(CambriaApiException ex) { + + LOGGER.info("Reached Cambria Exception Mapper.."); + + /** + * Cambria Generic Exception + */ + if(ex instanceof CambriaApiException) + { + + errRes = ex.getErrRes(); + if(errRes!=null) { + + return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) + .build(); + } + else + { + return Response.status(ex.getStatus()).entity(ex.getMessage()).type(MediaType.APPLICATION_JSON) + .build(); + } + + + } + else + { + errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED, DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), msgs.getServerUnav()); + return Response.status(HttpStatus.SC_EXPECTATION_FAILED).entity(errRes).type(MediaType.APPLICATION_JSON).build(); + } + + } + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPErrorMessages.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPErrorMessages.java new file mode 100644 index 0000000..7b68b42 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPErrorMessages.java @@ -0,0 +1,248 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.exception; + +import javax.annotation.PostConstruct; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; +import org.springframework.web.context.support.SpringBeanAutowiringSupport; + +/** + * This Class reads the error message properties + * from the properties file + * @author rajashree.khare + * + */ +@Component +public class DMaaPErrorMessages { + + + + + //@Value("${resource.not.found}") + private String notFound="The requested resource was not found.Please verify the URL and try again"; + +// @Value("${server.unavailable}") + private String serverUnav="Server is temporarily unavailable or busy.Try again later, or try another server in the cluster."; + +// @Value("${http.method.not.allowed}") + private String methodNotAllowed="The specified HTTP method is not allowed for the requested resource.Enter a valid HTTP method and try again."; + + //@Value("${incorrect.request.json}") + private String badRequest="Incorrect JSON object. Please correct the JSON format and try again."; + +// @Value("${network.time.out}") + private String nwTimeout="Connection to the DMaaP MR was timed out.Please try again."; + + //@Value("${get.topic.failure}") + private String topicsfailure="Failed to retrieve list of all topics."; + + //@Value("${not.permitted.access.1}") + private String notPermitted1="Access Denied.User does not have permission to perform"; + + //@Value("${not.permitted.access.2}") + private String notPermitted2="operation on Topic:"; + + //@Value("${get.topic.details.failure}") + private String topicDetailsFail="Failed to retrieve details of topic:"; + + //@Value("${create.topic.failure}") + private String createTopicFail="Failed to create topic:"; + + //@Value("${delete.topic.failure}") + private String deleteTopicFail="Failed to delete topic:"; + + //@Value("${incorrect.json}") + private String incorrectJson="Incorrect JSON object.Could not parse JSON. Please correct the JSON format and try again."; + + //@Value("${consume.msg.error}") + private String consumeMsgError="Error while reading data from topic."; + + //@Value("${publish.msg.error}") + private String publishMsgError="Error while publishing data to topic."; + + + //@Value("${publish.msg.count}") + private String publishMsgCount="Successfully published number of messages :"; + + + //@Value("${authentication.failure}") + private String authFailure="Access Denied: Invalid Credentials. Enter a valid MechId and Password and try again."; + //@Value("${msg_size_exceeds}") + private String msgSizeExceeds="Message size exceeds the default size."; + + + //@Value("${topic.not.exist}") + private String topicNotExist="No such topic exists."; + + public String getMsgSizeExceeds() { + return msgSizeExceeds; + } + + public void setMsgSizeExceeds(String msgSizeExceeds) { + this.msgSizeExceeds = msgSizeExceeds; + } + + public String getNotFound() { + return notFound; + } + + public void setNotFound(String notFound) { + this.notFound = notFound; + } + + public String getServerUnav() { + return serverUnav; + } + + public void setServerUnav(String serverUnav) { + this.serverUnav = serverUnav; + } + + public String getMethodNotAllowed() { + return methodNotAllowed; + } + + public void setMethodNotAllowed(String methodNotAllowed) { + this.methodNotAllowed = methodNotAllowed; + } + + public String getBadRequest() { + return badRequest; + } + + public void setBadRequest(String badRequest) { + this.badRequest = badRequest; + } + + public String getNwTimeout() { + return nwTimeout; + } + + public void setNwTimeout(String nwTimeout) { + this.nwTimeout = nwTimeout; + } + + public String getNotPermitted1() { + return notPermitted1; + } + + public void setNotPermitted1(String notPermitted1) { + this.notPermitted1 = notPermitted1; + } + + public String getNotPermitted2() { + return notPermitted2; + } + + public void setNotPermitted2(String notPermitted2) { + this.notPermitted2 = notPermitted2; + } + + public String getTopicsfailure() { + return topicsfailure; + } + + public void setTopicsfailure(String topicsfailure) { + this.topicsfailure = topicsfailure; + } + + public String getTopicDetailsFail() { + return topicDetailsFail; + } + + public void setTopicDetailsFail(String topicDetailsFail) { + this.topicDetailsFail = topicDetailsFail; + } + + public String getCreateTopicFail() { + return createTopicFail; + } + + public void setCreateTopicFail(String createTopicFail) { + this.createTopicFail = createTopicFail; + } + + public String getIncorrectJson() { + return incorrectJson; + } + + public void setIncorrectJson(String incorrectJson) { + this.incorrectJson = incorrectJson; + } + + public String getDeleteTopicFail() { + return deleteTopicFail; + } + + public void setDeleteTopicFail(String deleteTopicFail) { + this.deleteTopicFail = deleteTopicFail; + } + + public String getConsumeMsgError() { + return consumeMsgError; + } + + public void setConsumeMsgError(String consumeMsgError) { + this.consumeMsgError = consumeMsgError; + } + + public String getPublishMsgError() { + return publishMsgError; + } + + public void setPublishMsgError(String publishMsgError) { + this.publishMsgError = publishMsgError; + } + + public String getPublishMsgCount() { + return publishMsgCount; + } + + public String getAuthFailure() { + return authFailure; + } + + public void setAuthFailure(String authFailure) { + this.authFailure = authFailure; + } + + public void setPublishMsgCount(String publishMsgCount) { + this.publishMsgCount = publishMsgCount; + } + + public String getTopicNotExist() { + return topicNotExist; + } + + public void setTopicNotExist(String topicNotExist) { + this.topicNotExist = topicNotExist; + } + + + @PostConstruct + public void init() { + SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(this); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPResponseCode.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPResponseCode.java new file mode 100644 index 0000000..a7b073a --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPResponseCode.java @@ -0,0 +1,93 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.exception; + +/** + * Define the Error Response Codes for MR + * using this enumeration + * @author rajashree.khare + * + */ +public enum DMaaPResponseCode { + + + /** + * GENERIC + */ + RESOURCE_NOT_FOUND(3001), + SERVER_UNAVAILABLE(3002), + METHOD_NOT_ALLOWED(3003), + GENERIC_INTERNAL_ERROR(1004), + /** + * AAF + */ + INVALID_CREDENTIALS(4001), + ACCESS_NOT_PERMITTED(4002), + UNABLE_TO_AUTHORIZE(4003), + /** + * PUBLISH AND SUBSCRIBE + */ + MSG_SIZE_EXCEEDS_BATCH_LIMIT(5001), + UNABLE_TO_PUBLISH(5002), + INCORRECT_BATCHING_FORMAT(5003), + MSG_SIZE_EXCEEDS_MSG_LIMIT(5004), + INCORRECT_JSON(5005), + CONN_TIMEOUT(5006), + PARTIAL_PUBLISH_MSGS(5007), + CONSUME_MSG_ERROR(5008), + PUBLISH_MSG_ERROR(5009), + RETRIEVE_TRANSACTIONS(5010), + RETRIEVE_TRANSACTIONS_DETAILS(5011), + TOO_MANY_REQUESTS(5012), + + RATE_LIMIT_EXCEED(301), + + /** + * TOPICS + */ + GET_TOPICS_FAIL(6001), + GET_TOPICS_DETAILS_FAIL(6002), + CREATE_TOPIC_FAIL(6003), + DELETE_TOPIC_FAIL(6004), + GET_PUBLISHERS_BY_TOPIC(6005), + GET_CONSUMERS_BY_TOPIC(6006), + PERMIT_PUBLISHER_FOR_TOPIC(6007), + REVOKE_PUBLISHER_FOR_TOPIC(6008), + PERMIT_CONSUMER_FOR_TOPIC(6009), + REVOKE_CONSUMER_FOR_TOPIC(6010), + GET_CONSUMER_CACHE(6011), + DROP_CONSUMER_CACHE(6012), + GET_METRICS_ERROR(6013), + GET_BLACKLIST(6014), + ADD_BLACKLIST(6015), + REMOVE_BLACKLIST(6016), + TOPIC_NOT_IN_AAF(6017); + private int responseCode; + + public int getResponseCode() { + return responseCode; + } + private DMaaPResponseCode (final int code) { + responseCode = code; + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPWebExceptionMapper.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPWebExceptionMapper.java new file mode 100644 index 0000000..c768794 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/exception/DMaaPWebExceptionMapper.java @@ -0,0 +1,137 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.exception; + +import javax.inject.Singleton; +import javax.ws.rs.BadRequestException; +import javax.ws.rs.InternalServerErrorException; +import javax.ws.rs.NotAllowedException; +import javax.ws.rs.NotAuthorizedException; +import javax.ws.rs.NotFoundException; +import javax.ws.rs.ServiceUnavailableException; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.ext.ExceptionMapper; +import javax.ws.rs.ext.Provider; + +import org.apache.http.HttpStatus; + +import org.springframework.beans.factory.annotation.Autowired; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * Exception Mapper class to handle + * Jersey Exceptions + * @author rajashree.khare + * + */ +@Provider +@Singleton +public class DMaaPWebExceptionMapper implements ExceptionMapper{ + + + private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPWebExceptionMapper.class); + private ErrorResponse errRes; + + @Autowired + private DMaaPErrorMessages msgs; + + public DMaaPWebExceptionMapper() { + super(); + LOGGER.info("WebException Mapper Created.."); + } + + @Override + public Response toResponse(WebApplicationException ex) { + + LOGGER.info("Reached WebException Mapper"); + + /** + * Resource Not Found + */ + if(ex instanceof NotFoundException) + { + errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),msgs.getNotFound()); + + LOGGER.info(errRes.toString()); + + return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) + .build(); + + } + + if(ex instanceof InternalServerErrorException) + { + errRes = new ErrorResponse(HttpStatus.SC_INTERNAL_SERVER_ERROR,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav()); + + LOGGER.info(errRes.toString()); + return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) + .build(); + + } + + if(ex instanceof NotAuthorizedException) + { + errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),msgs.getAuthFailure()); + + LOGGER.info(errRes.toString()); + return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) + .build(); + } + + if(ex instanceof BadRequestException) + { + errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,DMaaPResponseCode.INCORRECT_JSON.getResponseCode(),msgs.getBadRequest()); + + LOGGER.info(errRes.toString()); + return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) + .build(); + } + if(ex instanceof NotAllowedException) + { + errRes = new ErrorResponse(HttpStatus.SC_METHOD_NOT_ALLOWED,DMaaPResponseCode.METHOD_NOT_ALLOWED.getResponseCode(),msgs.getMethodNotAllowed()); + + LOGGER.info(errRes.toString()); + return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) + .build(); + } + + if(ex instanceof ServiceUnavailableException) + { + errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav()); + + LOGGER.info(errRes.toString()); + return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON) + .build(); + } + + + return Response.serverError().build(); + } + + + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/exception/ErrorResponse.java b/src/main/java/org/onap/dmaap/dmf/mr/exception/ErrorResponse.java new file mode 100644 index 0000000..58e81ae --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/exception/ErrorResponse.java @@ -0,0 +1,135 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.exception; +import org.json.JSONObject; +/** + * Represents the Error Response Object + * that is rendered as a JSON object when + * an exception or error occurs on MR Rest Service. + * @author rajashree.khare + * + */ +//@XmlRootElement +public class ErrorResponse { + + private int httpStatusCode; + private int mrErrorCode; + private String errorMessage; + private String helpURL; + private String statusTs; + private String topic; + private String publisherId; + private String publisherIp; + private String subscriberId; + private String subscriberIp; + + + public ErrorResponse(int httpStatusCode, int mrErrorCode, + String errorMessage, String helpURL, String statusTs, String topic, + String publisherId, String publisherIp, String subscriberId, + String subscriberIp) { + super(); + this.httpStatusCode = httpStatusCode; + this.mrErrorCode = mrErrorCode; + this.errorMessage = errorMessage; + this.helpURL = "http://onap.readthedocs.io"; + this.statusTs = statusTs; + this.topic = topic; + this.publisherId = publisherId; + this.publisherIp = publisherIp; + this.subscriberId = subscriberId; + this.subscriberIp = subscriberIp; + } + + public ErrorResponse(int httpStatusCode, int mrErrorCode, + String errorMessage) { + super(); + this.httpStatusCode = httpStatusCode; + this.mrErrorCode = mrErrorCode; + this.errorMessage = errorMessage; + this.helpURL = "http://onap.readthedocs.io"; + + } + + public int getHttpStatusCode() { + return httpStatusCode; + } + + public void setHttpStatusCode(int httpStatusCode) { + this.httpStatusCode = httpStatusCode; + } + + public int getMrErrorCode() { + return mrErrorCode; + } + + + public void setMrErrorCode(int mrErrorCode) { + this.mrErrorCode = mrErrorCode; + } + + + public String getErrorMessage() { + return errorMessage; + } + + public void setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + } + + public String getHelpURL() { + return helpURL; + } + + public void setHelpURL(String helpURL) { + this.helpURL = helpURL; + } + + @Override + public String toString() { + return "ErrorResponse {\"httpStatusCode\":\"" + httpStatusCode + + "\", \"mrErrorCode\":\"" + mrErrorCode + "\", \"errorMessage\":\"" + + errorMessage + "\", \"helpURL\":\"" + helpURL + "\", \"statusTs\":\""+statusTs+"\"" + + ", \"topicId\":\""+topic+"\", \"publisherId\":\""+publisherId+"\"" + + ", \"publisherIp\":\""+publisherIp+"\", \"subscriberId\":\""+subscriberId+"\"" + + ", \"subscriberIp\":\""+subscriberIp+"\"}"; + } + + public String getErrMapperStr1() { + return "ErrorResponse [httpStatusCode=" + httpStatusCode + ", mrErrorCode=" + mrErrorCode + ", errorMessage=" + + errorMessage + ", helpURL=" + helpURL + "]"; + } + + + + public JSONObject getErrMapperStr() { + JSONObject o = new JSONObject(); + o.put("status", getHttpStatusCode()); + o.put("mrstatus", getMrErrorCode()); + o.put("message", getErrorMessage()); + o.put("helpURL", getHelpURL()); + return o; + } + + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/listener/CambriaServletContextListener.java b/src/main/java/org/onap/dmaap/dmf/mr/listener/CambriaServletContextListener.java new file mode 100644 index 0000000..273b906 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/listener/CambriaServletContextListener.java @@ -0,0 +1,64 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.listener; + +import javax.servlet.ServletContextEvent; +import javax.servlet.ServletContextListener; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * This is the Cambria Servlet Context Listner which helpes while loading the app which provide the endpoints + * @author nilanjana.maity + * + */ +public class CambriaServletContextListener implements ServletContextListener { + + DME2EndPointLoader loader = DME2EndPointLoader.getInstance(); + + private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaServletContextListener.class); + + + @Override + + /** + * contextDestroyed() loads unpublished end points + * @param arg0 + */ + public void contextDestroyed(ServletContextEvent arg0) { + log.info("CambriaServletContextListener contextDestroyed"); + + loader.unPublishEndPoints(); + } + + @Override + /** + * contextInitialized() loads published end points + * @param arg0 + */ + public void contextInitialized(ServletContextEvent arg0) { + log.info("CambriaServletContextListener contextInitialized"); + loader.publishEndPoints(); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/listener/DME2EndPointLoader.java b/src/main/java/org/onap/dmaap/dmf/mr/listener/DME2EndPointLoader.java new file mode 100644 index 0000000..5d98f98 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/listener/DME2EndPointLoader.java @@ -0,0 +1,123 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.listener; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + +import com.att.aft.dme2.manager.registry.DME2EndpointRegistry; +import com.att.aft.dme2.api.DME2Exception; +import com.att.aft.dme2.api.DME2Manager; +import org.onap.dmaap.dmf.mr.service.impl.EventsServiceImpl; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * + * @author anowarul.islam + * + */ +public class DME2EndPointLoader { + + private String latitude; + private String longitude; + private String version; + private String serviceName; + private String env; + private String routeOffer; + private String hostName; + private String port; + private String contextPath; + private String protocol; + private String serviceURL; + private static DME2EndPointLoader loader = new DME2EndPointLoader(); + + private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class); + private DME2EndPointLoader() { + } + + public static DME2EndPointLoader getInstance() { + return loader; + } + + /** + * publishing endpoints + */ + public void publishEndPoints() { + + try { + InputStream input = this.getClass().getResourceAsStream("/endpoint.properties"); + Properties props = new Properties(); + props.load(input); + + latitude = props.getProperty("Latitude"); + longitude = props.getProperty("Longitude"); + version = props.getProperty("Version"); + serviceName = props.getProperty("ServiceName"); + env = props.getProperty("Environment"); + routeOffer = props.getProperty("RouteOffer"); + hostName = props.getProperty("HostName"); + port = props.getProperty("Port"); + contextPath = props.getProperty("ContextPath"); + protocol = props.getProperty("Protocol"); + + System.setProperty("AFT_LATITUDE", latitude); + System.setProperty("AFT_LONGITUDE", longitude); + System.setProperty("AFT_ENVIRONMENT", "AFTUAT"); + + serviceURL = "service=" + serviceName + "/" + "version=" + version + "/" + "envContext=" + env + "/" + + "routeOffer=" + routeOffer; + + DME2Manager manager = new DME2Manager("testEndpointPublish", props); + manager.setClientCredentials("sh301n", ""); + DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry(); + // Publish API takes service name, context path, hostname, port and + // protocol as args + svcRegistry.publish(serviceURL, contextPath, hostName, Integer.parseInt(port), protocol); + + } catch (IOException | DME2Exception e) { + LOG.error("Failed due to :" + e); + } + + } +/** + * unpublishing endpoints + */ + public void unPublishEndPoints() { + + DME2Manager manager; + try { + System.setProperty("AFT_LATITUDE", latitude); + System.setProperty("AFT_LONGITUDE", longitude); + System.setProperty("AFT_ENVIRONMENT", "AFTUAT"); + + manager = DME2Manager.getDefaultInstance(); + DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry(); + svcRegistry.unpublish(serviceURL, hostName, Integer.parseInt(port)); + } catch (DME2Exception e) { + LOG.error("Failed due to DME2Exception" + e); + } + + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker.java b/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker.java new file mode 100644 index 0000000..6cbb8c8 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker.java @@ -0,0 +1,92 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metabroker; + +import java.util.List; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +/** + * A broker interface to manage metadata around topics, etc. + * + * @author peter + * + */ +public interface Broker { + /** + * + * @author anowarul.islam + * + */ + public class TopicExistsException extends Exception { + /** + * + * @param topicName + */ + public TopicExistsException(String topicName) { + super("Topic " + topicName + " exists."); + } + + private static final long serialVersionUID = 1L; + } + + /** + * Get all topics in the underlying broker. + * + * @return + * @throws ConfigDbException + */ + List getAllTopics() throws ConfigDbException; + + /** + * Get a specific topic from the underlying broker. + * + * @param topic + * @return a topic, or null + */ + Topic getTopic(String topic) throws ConfigDbException; + + /** + * create a topic + * + * @param topic + * @param description + * @param ownerApiKey + * @param partitions + * @param replicas + * @param transactionEnabled + * @return + * @throws TopicExistsException + * @throws CambriaApiException + */ + Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas, + boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException; + + /** + * Delete a topic by name + * + * @param topic + */ + void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker1.java b/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker1.java new file mode 100644 index 0000000..69dacf9 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Broker1.java @@ -0,0 +1,95 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metabroker; + +import java.util.List; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +/** + * A broker interface to manage metadata around topics, etc. + * alternate for Broker1 to avoid this error in spring boot + *org.springframework.beans.factory.NoUniqueBeanDefinitionException: + * No qualifying bean of type [com.att.dmf.mr.metabroker.Broker] is defined: + * expected single matching bean but found 2: mmb,dMaaPKafkaMetaBroker + + * + */ +public interface Broker1 { + /** + * + * @author Ramkumar + * + */ + public class TopicExistsException extends Exception { + /** + * + * @param topicName + */ + public TopicExistsException(String topicName) { + super("Topic " + topicName + " exists."); + } + + private static final long serialVersionUID = 1L; + } + + /** + * Get all topics in the underlying broker. + * + * @return + * @throws ConfigDbException + */ + List getAllTopics() throws ConfigDbException; + + /** + * Get a specific topic from the underlying broker. + * + * @param topic + * @return a topic, or null + */ + Topic getTopic(String topic) throws ConfigDbException; + + /** + * create a topic + * + * @param topic + * @param description + * @param ownerApiKey + * @param partitions + * @param replicas + * @param transactionEnabled + * @return + * @throws TopicExistsException + * @throws CambriaApiException + */ + Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas, + boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException; + + /** + * Delete a topic by name + * + * @param topic + */ + void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Topic.java b/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Topic.java new file mode 100644 index 0000000..ea23182 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metabroker/Topic.java @@ -0,0 +1,133 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metabroker; + +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.NsaAcl; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.ReadWriteSecuredResource; +/** + * This is the interface for topic and all the topic related operations + * get topic name, owner, description, transactionEnabled etc. + * @author nilanjana.maity + * + */ +public interface Topic extends ReadWriteSecuredResource +{ + /** + * User defined exception for access denied while access the topic for Publisher and consumer + * @author nilanjana.maity + * + *//* + public class AccessDeniedException extends Exception + + *//** + * AccessDenied Description + *//* + + *//** + * AccessDenied Exception for the user while authenticating the user request + * @param user + *//* + + private static final long serialVersionUID = 1L; + }*/ + + /** + * Get this topic's name + * @return + */ + String getName (); + + /** + * Get the API key of the owner of this topic. + * @return + */ + String getOwner (); + + /** + * Get a description of the topic, as set by the owner at creation time. + * @return + */ + String getDescription (); + + /** + * If the topic is transaction enabled + * @return boolean true/false + */ + boolean isTransactionEnabled(); + + /** + * Get the ACL for reading on this topic. Can be null. + * @return + */ + NsaAcl getReaderAcl (); + + /** + * Get the ACL for writing on this topic. Can be null. + * @return + */ + NsaAcl getWriterAcl (); + + /** + * Check if this user can read the topic. Throw otherwise. Note that + * user may be null. + * @param user + */ + void checkUserRead ( NsaApiKey user ) throws AccessDeniedException; + + /** + * Check if this user can write to the topic. Throw otherwise. Note + * that user may be null. + * @param user + */ + void checkUserWrite ( NsaApiKey user ) throws AccessDeniedException; + + /** + * allow the given user to publish + * @param publisherId + * @param asUser + */ + void permitWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException; + + /** + * deny the given user from publishing + * @param publisherId + * @param asUser + */ + void denyWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException; + + /** + * allow the given user to read the topic + * @param consumerId + * @param asUser + */ + void permitReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException; + + /** + * deny the given user from reading the topic + * @param consumerId + * @param asUser + * @throws ConfigDbException + */ + void denyReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java new file mode 100644 index 0000000..023bb28 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java @@ -0,0 +1,52 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metrics.publisher; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.TimeUnit; + +/** + * A Cambria batching publisher is a publisher with additional functionality + * for managing delayed sends. + * + * @author peter + * + */ +public interface CambriaBatchingPublisher extends CambriaPublisher +{ + /** + * Get the number of messages that have not yet been sent. + * @return the number of pending messages + */ + int getPendingMessageCount (); + + /** + * Close this publisher, sending any remaining messages. + * @param timeout an amount of time to wait for unsent messages to be sent + * @param timeoutUnits the time unit for the timeout arg + * @return a list of any unsent messages after the timeout + * @throws IOException + * @throws InterruptedException + */ + List close ( long timeout, TimeUnit timeoutUnits ) throws IOException, InterruptedException; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaClient.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaClient.java new file mode 100644 index 0000000..2ce4216 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaClient.java @@ -0,0 +1,89 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metrics.publisher; + + + +// +import com.att.eelf.configuration.EELFLogger; + + +/** + * + * @author anowarul.islam + * + */ +public interface CambriaClient { + /** + * An exception at the Cambria layer. This is used when the HTTP transport + * layer returns a success code but the transaction is not completed as + * expected. + */ + public class CambriaApiException extends Exception { + /** + * + * @param msg + */ + public CambriaApiException(String msg) { + super(msg); + } + + /** + * + * @param msg + * @param t + */ + public CambriaApiException(String msg, Throwable t) { + super(msg, t); + } + + private static final long serialVersionUID = 1L; + } + + /** + * Optionally set the Logger to use + * + * @param log + */ + void logTo(EELFLogger log); + + /** + * Set the API credentials for this client connection. Subsequent calls will + * include authentication headers.who i + * + * @param apiKey + * @param apiSecret + */ + void setApiCredentials(String apiKey, String apiSecret); + + /** + * Remove API credentials, if any, on this connection. Subsequent calls will + * not include authentication headers. + */ + void clearApiCredentials(); + + /** + * Close this connection. Some client interfaces have additional close + * capability. + */ + void close(); +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaConsumer.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaConsumer.java new file mode 100644 index 0000000..4d05070 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaConsumer.java @@ -0,0 +1,52 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metrics.publisher; + +import java.io.IOException; + +/** + * This interface will provide fetch mechanism for consumer + * @author nilanjana.maity + * + */ +public interface CambriaConsumer extends CambriaClient +{ + /** + * Fetch a set of messages. The consumer's timeout and message limit are used if set in the constructor call. + + * @return a set of messages + * @throws IOException + */ + Iterable fetch () throws IOException; + + /** + * Fetch a set of messages with an explicit timeout and limit for this call. These values + * override any set in the constructor call. + * + * @param timeoutMs The amount of time in milliseconds that the server should keep the connection + * open while waiting for message traffic. Use -1 for default timeout (controlled on the server-side). + * @param limit A limit on the number of messages returned in a single call. Use -1 for no limit. + * @return a set messages + * @throws IOException if there's a problem connecting to the server + */ + Iterable fetch ( int timeoutMs, int limit ) throws IOException; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisher.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisher.java new file mode 100644 index 0000000..441d325 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisher.java @@ -0,0 +1,101 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metrics.publisher; + +import java.io.IOException; +import java.util.Collection; + +/** + * A Cambria publishing interface. + * + * @author peter + * + */ +public interface CambriaPublisher extends CambriaClient { + /** + * A simple message container + */ + public static class message { + /** + * + * @param partition + * @param msg + */ + public message(String partition, String msg) { + fPartition = partition == null ? "" : partition; + fMsg = msg; + if (fMsg == null) { + throw new IllegalArgumentException("Can't send a null message."); + } + } + + /** + * + * @param msg + */ + public message(message msg) { + this(msg.fPartition, msg.fMsg); + } + + /** + * declaring partition string + */ + public final String fPartition; + /** + * declaring fMsg String + */ + public final String fMsg; + } + + /** + * Send the given message using the given partition. + * + * @param partition + * @param msg + * @return the number of pending messages + * @throws IOException + */ + int send(String partition, String msg) throws IOException; + + /** + * Send the given message using its partition. + * + * @param msg + * @return the number of pending messages + * @throws IOException + */ + int send(message msg) throws IOException; + + /** + * Send the given messages using their partitions. + * + * @param msgs + * @return the number of pending messages + * @throws IOException + */ + int send(Collection msgs) throws IOException; + + /** + * Close this publisher. It's an error to call send() after close() + */ + void close(); +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisherUtility.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisherUtility.java new file mode 100644 index 0000000..4f072ac --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/CambriaPublisherUtility.java @@ -0,0 +1,146 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metrics.publisher; + +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.http.HttpHost; +/** + * + * @author anowarul.islam + * + */ +public class CambriaPublisherUtility +{ + public static final String kBasePath = "/events/"; + public static final int kStdCambriaServicePort = 3904; +/** + * + * Translates a string into application/x-www-form-urlencoded + * format using a specific encoding scheme. + * @param s + * @return + * + */ + public static String escape ( String s ) + { + try + { + return URLEncoder.encode ( s, "UTF-8"); + } + catch ( UnsupportedEncodingException e ) + { + throw new RuntimeException ( e ); + } + } +/** + * + * building url + * @param rawTopic + * @return + */ + public static String makeUrl ( String rawTopic ) + { + final String cleanTopic = escape ( rawTopic ); + + final StringBuffer url = new StringBuffer(). + append ( CambriaPublisherUtility.kBasePath ). + append ( cleanTopic ); + return url.toString (); + } +/** + * + * building consumerUrl + * @param topic + * @param rawConsumerGroup + * @param rawConsumerId + * @return + */ + public static String makeConsumerUrl ( String topic, String rawConsumerGroup, String rawConsumerId ) + { + final String cleanConsumerGroup = escape ( rawConsumerGroup ); + final String cleanConsumerId = escape ( rawConsumerId ); + return CambriaPublisherUtility.kBasePath + topic + "/" + cleanConsumerGroup + "/" + cleanConsumerId; + } + + /** + * Create a list of HttpHosts from an input list of strings. Input strings have + * host[:port] as format. If the port section is not provided, the default port is used. + * + * @param hosts + * @return a list of hosts + */ + public static List createHostsList(Collection hosts) + { + final ArrayList convertedHosts = new ArrayList<>(); + for ( String host : hosts ) + { + if ( host.length () == 0 ) continue; + convertedHosts.add ( hostForString ( host ) ); + } + return convertedHosts; + } + + /** + * Return an HttpHost from an input string. Input string has + * host[:port] as format. If the port section is not provided, the default port is used. + * + * @param hosts + * @return a list of hosts + * if host.length<1 throws IllegalArgumentException + * + */ + public static HttpHost hostForString ( String host ) + { + if ( host.length() < 1 ) throw new IllegalArgumentException ( "An empty host entry is invalid." ); + + String hostPart = host; + int port = kStdCambriaServicePort; + + final int colon = host.indexOf ( ':' ); + if ( colon == 0 ) throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid." ); + if ( colon > 0 ) + { + hostPart = host.substring ( 0, colon ).trim(); + + final String portPart = host.substring ( colon + 1 ).trim(); + if ( portPart.length () > 0 ) + { + try + { + port = Integer.parseInt ( portPart ); + } + catch ( NumberFormatException x ) + { + throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid.", x ); + } + } + // else: use default port on "foo:" + } + + return new HttpHost ( hostPart, port ); + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java new file mode 100644 index 0000000..ff46ce3 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java @@ -0,0 +1,420 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metrics.publisher; + +import java.net.MalformedURLException; +import java.nio.channels.NotYetConnectedException; +import java.util.Collection; +import java.util.TreeSet; +import java.util.UUID; + +import org.onap.dmaap.dmf.mr.metrics.publisher.impl.DMaaPCambriaConsumerImpl; +import org.onap.dmaap.dmf.mr.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher; + +/** + * A factory for Cambria clients.
+ *
+ * Use caution selecting a consumer creator factory. If the call doesn't accept + * a consumer group name, then it creates a consumer that is not restartable. + * That is, if you stop your process and start it again, your client will NOT + * receive any missed messages on the topic. If you need to ensure receipt of + * missed messages, then you must use a consumer that's created with a group + * name and ID. (If you create multiple consumer processes using the same group, + * load is split across them. Be sure to use a different ID for each instance.)
+ *
+ * Publishers + * + * @author peter + */ +public class DMaaPCambriaClientFactory { + /** + * Create a consumer instance with the default timeout and no limit on + * messages returned. This consumer operates as an independent consumer + * (i.e., not in a group) and is NOT re-startable across sessions. + * + * @param hostList + * A comma separated list of hosts to use to connect to Cambria. + * You can include port numbers (3904 is the default). For + * example, "ueb01hydc.it.att.com:8080,ueb02hydc.it.att.com" + * + * @param topic + * The topic to consume + * + * @return a consumer + */ + public static CambriaConsumer createConsumer(String hostList, String topic) { + return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList), + topic); + } + + /** + * Create a consumer instance with the default timeout and no limit on + * messages returned. This consumer operates as an independent consumer + * (i.e., not in a group) and is NOT re-startable across sessions. + * + * @param hostSet + * The host used in the URL to Cambria. Entries can be + * "host:port". + * @param topic + * The topic to consume + * + * @return a consumer + */ + public static CambriaConsumer createConsumer(Collection hostSet, + String topic) { + return createConsumer(hostSet, topic, null); + } + + /** + * Create a consumer instance with server-side filtering, the default + * timeout, and no limit on messages returned. This consumer operates as an + * independent consumer (i.e., not in a group) and is NOT re-startable + * across sessions. + * + * @param hostSet + * The host used in the URL to Cambria. Entries can be + * "host:port". + * @param topic + * The topic to consume + * @param filter + * a filter to use on the server side + * + * @return a consumer + */ + public static CambriaConsumer createConsumer(Collection hostSet, + String topic, String filter) { + return createConsumer(hostSet, topic, UUID.randomUUID().toString(), + "0", -1, -1, filter, null, null); + } + + /** + * Create a consumer instance with the default timeout, and no limit on + * messages returned. This consumer can operate in a logical group and is + * re-startable across sessions when you use the same group and ID on + * restart. + * + * @param hostSet + * The host used in the URL to Cambria. Entries can be + * "host:port". + * @param topic + * The topic to consume + * @param consumerGroup + * The name of the consumer group this consumer is part of + * @param consumerId + * The unique id of this consume in its group + * + * @return a consumer + */ + public static CambriaConsumer createConsumer(Collection hostSet, + final String topic, final String consumerGroup, + final String consumerId) { + return createConsumer(hostSet, topic, consumerGroup, consumerId, -1, -1); + } + + /** + * Create a consumer instance with the default timeout, and no limit on + * messages returned. This consumer can operate in a logical group and is + * re-startable across sessions when you use the same group and ID on + * restart. + * + * @param hostSet + * The host used in the URL to Cambria. Entries can be + * "host:port". + * @param topic + * The topic to consume + * @param consumerGroup + * The name of the consumer group this consumer is part of + * @param consumerId + * The unique id of this consume in its group + * @param timeoutMs + * The amount of time in milliseconds that the server should keep + * the connection open while waiting for message traffic. Use -1 + * for default timeout. + * @param limit + * A limit on the number of messages returned in a single call. + * Use -1 for no limit. + * + * @return a consumer + */ + public static CambriaConsumer createConsumer(Collection hostSet, + final String topic, final String consumerGroup, + final String consumerId, int timeoutMs, int limit) { + return createConsumer(hostSet, topic, consumerGroup, consumerId, + timeoutMs, limit, null, null, null); + } + + /** + * Create a consumer instance with the default timeout, and no limit on + * messages returned. This consumer can operate in a logical group and is + * re-startable across sessions when you use the same group and ID on + * restart. This consumer also uses server-side filtering. + * + * @param hostList + * A comma separated list of hosts to use to connect to Cambria. + * You can include port numbers (3904 is the default). For + * example, "ueb01hydc.it.att.com:8080,ueb02hydc.it.att.com" + * @param topic + * The topic to consume + * @param consumerGroup + * The name of the consumer group this consumer is part of + * @param consumerId + * The unique id of this consume in its group + * @param timeoutMs + * The amount of time in milliseconds that the server should keep + * the connection open while waiting for message traffic. Use -1 + * for default timeout. + * @param limit + * A limit on the number of messages returned in a single call. + * Use -1 for no limit. + * @param filter + * A Highland Park filter expression using only built-in filter + * components. Use null for "no filter". + * @param apiKey + * key associated with a user + * @param apiSecret + * of a user + * + * @return a consumer + */ + public static CambriaConsumer createConsumer(String hostList, + final String topic, final String consumerGroup, + final String consumerId, int timeoutMs, int limit, String filter, + String apiKey, String apiSecret) { + return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList), + topic, consumerGroup, consumerId, timeoutMs, limit, filter, + apiKey, apiSecret); + } + + /** + * Create a consumer instance with the default timeout, and no limit on + * messages returned. This consumer can operate in a logical group and is + * re-startable across sessions when you use the same group and ID on + * restart. This consumer also uses server-side filtering. + * + * @param hostSet + * The host used in the URL to Cambria. Entries can be + * "host:port". + * @param topic + * The topic to consume + * @param consumerGroup + * The name of the consumer group this consumer is part of + * @param consumerId + * The unique id of this consume in its group + * @param timeoutMs + * The amount of time in milliseconds that the server should keep + * the connection open while waiting for message traffic. Use -1 + * for default timeout. + * @param limit + * A limit on the number of messages returned in a single call. + * Use -1 for no limit. + * @param filter + * A Highland Park filter expression using only built-in filter + * components. Use null for "no filter". + * @param apiKey + * key associated with a user + * @param apiSecret + * of a user + * @return a consumer + */ + public static CambriaConsumer createConsumer(Collection hostSet, + final String topic, final String consumerGroup, + final String consumerId, int timeoutMs, int limit, String filter, + String apiKey, String apiSecret) { + if (sfMock != null) + return sfMock; + try { + return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup, + consumerId, timeoutMs, limit, filter, apiKey, apiSecret); + } catch (MalformedURLException e) { + + NotYetConnectedException exception=new NotYetConnectedException(); + exception.setStackTrace(e.getStackTrace()); + + throw exception ; + } + } + + /*************************************************************************/ + /*************************************************************************/ + /*************************************************************************/ + + /** + * Create a publisher that sends each message (or group of messages) + * immediately. Most applications should favor higher latency for much + * higher message throughput and the "simple publisher" is not a good + * choice. + * + * @param hostlist + * The host used in the URL to Cambria. Can be "host:port", can + * be multiple comma-separated entries. + * @param topic + * The topic on which to publish messages. + * @return a publisher + */ + public static CambriaBatchingPublisher createSimplePublisher( + String hostlist, String topic) { + return createBatchingPublisher(hostlist, topic, 1, 1); + } + + /** + * Create a publisher that batches messages. Be sure to close the publisher + * to send the last batch and ensure a clean shutdown. Message payloads are + * not compressed. + * + * @param hostlist + * The host used in the URL to Cambria. Can be "host:port", can + * be multiple comma-separated entries. + * @param topic + * The topic on which to publish messages. + * @param maxBatchSize + * The largest set of messages to batch + * @param maxAgeMs + * The maximum age of a message waiting in a batch + * + * @return a publisher + */ + public static CambriaBatchingPublisher createBatchingPublisher( + String hostlist, String topic, int maxBatchSize, long maxAgeMs) { + return createBatchingPublisher(hostlist, topic, maxBatchSize, maxAgeMs, + false); + } + + /** + * Create a publisher that batches messages. Be sure to close the publisher + * to send the last batch and ensure a clean shutdown. + * + * @param hostlist + * The host used in the URL to Cambria. Can be "host:port", can + * be multiple comma-separated entries. + * @param topic + * The topic on which to publish messages. + * @param maxBatchSize + * The largest set of messages to batch + * @param maxAgeMs + * The maximum age of a message waiting in a batch + * @param compress + * use gzip compression + * + * @return a publisher + */ + public static CambriaBatchingPublisher createBatchingPublisher( + String hostlist, String topic, int maxBatchSize, long maxAgeMs, + boolean compress) { + return createBatchingPublisher( + DMaaPCambriaConsumerImpl.stringToList(hostlist), topic, + maxBatchSize, maxAgeMs, compress); + } + + /** + * Create a publisher that batches messages. Be sure to close the publisher + * to send the last batch and ensure a clean shutdown. + * + * @param hostSet + * A set of hosts to be used in the URL to Cambria. Can be + * "host:port". Use multiple entries to enable failover. + * @param topic + * The topic on which to publish messages. + * @param maxBatchSize + * The largest set of messages to batch + * @param maxAgeMs + * The maximum age of a message waiting in a batch + * @param compress + * use gzip compression + * + * @return a publisher + */ + public static CambriaBatchingPublisher createBatchingPublisher( + String[] hostSet, String topic, int maxBatchSize, long maxAgeMs, + boolean compress) { + final TreeSet hosts = new TreeSet(); + for (String hp : hostSet) { + hosts.add(hp); + } + return createBatchingPublisher(hosts, topic, maxBatchSize, maxAgeMs, + compress); + } + + /** + * Create a publisher that batches messages. Be sure to close the publisher + * to send the last batch and ensure a clean shutdown. + * + * @param hostSet + * A set of hosts to be used in the URL to Cambria. Can be + * "host:port". Use multiple entries to enable failover. + * @param topic + * The topic on which to publish messages. + * @param maxBatchSize + * The largest set of messages to batch + * @param maxAgeMs + * The maximum age of a message waiting in a batch + * @param compress + * use gzip compression + * + * @return a publisher + */ + public static CambriaBatchingPublisher createBatchingPublisher( + Collection hostSet, String topic, int maxBatchSize, + long maxAgeMs, boolean compress) { + return new DMaaPCambriaSimplerBatchPublisher.Builder() + .againstUrls(hostSet).onTopic(topic) + .batchTo(maxBatchSize, maxAgeMs).compress(compress).build(); + } + + /** + * Create an identity manager client to work with API keys. + * + * @param hostSet + * A set of hosts to be used in the URL to Cambria. Can be + * "host:port". Use multiple entries to enable failover. + * @param apiKey + * Your API key + * @param apiSecret + * Your API secret + * @return an identity manager + */ + + + /** + * Create a topic manager for working with topics. + * + * @param hostSet + * A set of hosts to be used in the URL to Cambria. Can be + * "host:port". Use multiple entries to enable failover. + * @param apiKey + * Your API key + * @param apiSecret + * Your API secret + * @return a topic manager + */ + + + /** + * Inject a consumer. Used to support unit tests. + * + * @param cc + */ + public static void $testInject(CambriaConsumer cc) { + sfMock = cc; + } + + private static CambriaConsumer sfMock = null; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java new file mode 100644 index 0000000..6bedd18 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java @@ -0,0 +1,100 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metrics.publisher.impl; + +import java.net.MalformedURLException; +import java.util.Collection; +import java.util.Set; +import java.util.TreeSet; +import java.util.concurrent.TimeUnit; + +import org.json.JSONArray; +import org.json.JSONException; + +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; + +//import org.slf4j.LoggerFactory; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.apiClient.http.CacheUse; +import com.att.nsa.apiClient.http.HttpClient; + +/** + * + * @author anowarul.islam + * + */ +public class CambriaBaseClient extends HttpClient implements org.onap.dmaap.dmf.mr.metrics.publisher.CambriaClient +{ + protected CambriaBaseClient ( Collection hosts ) throws MalformedURLException + { + this ( hosts, null ); + } + + public CambriaBaseClient ( Collection hosts, String clientSignature ) throws MalformedURLException + { + + + + super(ConnectionType.HTTP, hosts, CambriaConstants.kStdCambriaServicePort, clientSignature, CacheUse.NONE, 1, 1L, TimeUnit.MILLISECONDS, 32, 32, 600000); + + + fLog = EELFManager.getInstance().getLogger(this.getClass().getName()); + + } + + @Override + public void close () + { + } + + public Set jsonArrayToSet ( JSONArray a ) throws JSONException + { + if ( a == null ) return null; + + final TreeSet set = new TreeSet<>(); + for ( int i=0; i hostPart, final String topic, final String consumerGroup, + final String consumerId, int timeoutMs, int limit, String filter, String apiKey, String apiSecret) throws MalformedURLException { + super(hostPart, topic + "::" + consumerGroup + "::" + consumerId); + + fTopic = topic; + fGroup = consumerGroup; + fId = consumerId; + fTimeoutMs = timeoutMs; + fLimit = limit; + fFilter = filter; + + setApiCredentials(apiKey, apiSecret); + } + + /** + * method converts String to list + * + * @param str + * @return + */ + public static List stringToList(String str) { + final LinkedList set = new LinkedList(); + if (str != null) { + final String[] parts = str.trim().split(","); + for (String part : parts) { + final String trimmed = part.trim(); + if (trimmed.length() > 0) { + set.add(trimmed); + } + } + } + return set; + } + + @Override + public Iterable fetch() throws IOException { + // fetch with the timeout and limit set in constructor + return fetch(fTimeoutMs, fLimit); + } + + @Override + public Iterable fetch(int timeoutMs, int limit) throws IOException { + final LinkedList msgs = new LinkedList(); + + final String urlPath = createUrlPath(timeoutMs, limit); + + getLog().info("UEB GET " + urlPath); + try { + final JSONObject o = get(urlPath); + + if (o != null) { + final JSONArray a = o.getJSONArray("result"); + if (a != null) { + for (int i = 0; i < a.length(); i++) { + msgs.add(a.getString(i)); + } + } + } + } catch (HttpObjectNotFoundException e) { + // this can happen if the topic is not yet created. ignore. + Log.error("Failed due to topic is not yet created" + e); + } catch (JSONException e) { + // unexpected response + reportProblemWithResponse(); + Log.error("Failed due to jsonException", e); + } catch (HttpException e) { + throw new IOException(e); + } + + return msgs; + } + + public String createUrlPath(int timeoutMs, int limit) { + final StringBuilder url = new StringBuilder(CambriaPublisherUtility.makeConsumerUrl(fTopic, fGroup, fId)); + final StringBuilder adds = new StringBuilder(); + if (timeoutMs > -1) { + adds.append("timeout=").append(timeoutMs); + } + + if (limit > -1) { + if (adds.length() > 0) { + adds.append("&"); + } + adds.append("limit=").append(limit); + } + if (fFilter != null && fFilter.length() > 0) { + try { + if (adds.length() > 0) { + adds.append("&"); + } + adds.append("filter=").append(URLEncoder.encode(fFilter, "UTF-8")); + } catch (UnsupportedEncodingException e) { + Log.error("Failed due to UnsupportedEncodingException" + e); + } + } + if (adds.length() > 0) { + url.append("?").append(adds.toString()); + } + return url.toString(); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java new file mode 100644 index 0000000..634ea80 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java @@ -0,0 +1,422 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.metrics.publisher.impl; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.net.MalformedURLException; +import java.nio.channels.NotYetConnectedException; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.zip.GZIPOutputStream; + +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.client.Entity; +import javax.ws.rs.client.WebTarget; +import javax.ws.rs.core.Response; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.metrics.publisher.CambriaPublisherUtility; + +/** + * + * class DMaaPCambriaSimplerBatchPublisher used to send the publish the messages + * in batch + * + * @author anowarul.islam + * + */ +public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient + implements org.onap.dmaap.dmf.mr.metrics.publisher.CambriaBatchingPublisher { + /** + * + * static inner class initializes with urls, topic,batchSize + * + * @author anowarul.islam + * + */ + public static class Builder { + public Builder() { + } + + /** + * constructor initialize with url + * + * @param baseUrls + * @return + * + */ + public Builder againstUrls(Collection baseUrls) { + fUrls = baseUrls; + return this; + } + + /** + * constructor initializes with topics + * + * @param topic + * @return + * + */ + public Builder onTopic(String topic) { + fTopic = topic; + return this; + } + + /** + * constructor initilazes with batch size and batch time + * + * @param maxBatchSize + * @param maxBatchAgeMs + * @return + * + */ + public Builder batchTo(int maxBatchSize, long maxBatchAgeMs) { + fMaxBatchSize = maxBatchSize; + fMaxBatchAgeMs = maxBatchAgeMs; + return this; + } + + /** + * constructor initializes with compress + * + * @param compress + * @return + */ + public Builder compress(boolean compress) { + fCompress = compress; + return this; + } + + /** + * method returns DMaaPCambriaSimplerBatchPublisher object + * + * @return + */ + public DMaaPCambriaSimplerBatchPublisher build() { + + try { + return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress); + } catch (MalformedURLException e) { + + NotYetConnectedException exception=new NotYetConnectedException(); + exception.setStackTrace(e.getStackTrace()); + + throw exception ; + + } + } + + private Collection fUrls; + private String fTopic; + private int fMaxBatchSize = 100; + private long fMaxBatchAgeMs = 1000; + private boolean fCompress = false; + }; + + /** + * + * @param partition + * @param msg + */ + @Override + public int send(String partition, String msg) { + return send(new message(partition, msg)); + } + + /** + * @param msg + */ + @Override + public int send(message msg) { + final LinkedList list = new LinkedList(); + list.add(msg); + return send(list); + } + + /** + * @param msgs + */ + @Override + public synchronized int send(Collection msgs) { + if (fClosed) { + throw new IllegalStateException("The publisher was closed."); + } + + for (message userMsg : msgs) { + fPending.add(new TimestampedMessage(userMsg)); + } + return getPendingMessageCount(); + } + + /** + * getPending message count + */ + @Override + public synchronized int getPendingMessageCount() { + return fPending.size(); + } + + /** + * + * @exception InterruptedException + * @exception IOException + */ + @Override + public void close() { + try { + final List remains = close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); + if (remains.isEmpty()) { + getLog().warn("Closing publisher with " + remains.size() + " messages unsent. " + + "Consider using CambriaBatchingPublisher.close( long timeout, TimeUnit timeoutUnits ) to recapture unsent messages on close."); + } + } catch (InterruptedException e) { + getLog().warn("Possible message loss. " + e.getMessage(), e); + } catch (IOException e) { + getLog().warn("Possible message loss. " + e.getMessage(), e); + } + } + + /** + * @param time + * @param unit + */ + @Override + public List close(long time, TimeUnit unit) throws IOException, InterruptedException { + synchronized (this) { + fClosed = true; + + // stop the background sender + fExec.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); + fExec.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); + fExec.shutdown(); + } + + final long now = Clock.now(); + final long waitInMs = TimeUnit.MILLISECONDS.convert(time, unit); + final long timeoutAtMs = now + waitInMs; + + while (Clock.now() < timeoutAtMs && getPendingMessageCount() > 0) { + send(true); + Thread.sleep(250); + } + // synchronizing the current object + synchronized (this) { + final LinkedList result = new LinkedList(); + fPending.drainTo(result); + return result; + } + } + + /** + * Possibly send a batch to the cambria server. This is called by the + * background thread and the close() method + * + * @param force + */ + private synchronized void send(boolean force) { + if (force || shouldSendNow()) { + if (!sendBatch()) { + getLog().warn("Send failed, " + fPending.size() + " message to send."); + + // note the time for back-off + fDontSendUntilMs = sfWaitAfterError + Clock.now(); + } + } + } + + /** + * + * @return + */ + private synchronized boolean shouldSendNow() { + boolean shouldSend = false; + if (fPending.isEmpty()) { + final long nowMs = Clock.now(); + + shouldSend = (fPending.size() >= fMaxBatchSize); + if (!shouldSend) { + final long sendAtMs = fPending.peek().timestamp + fMaxBatchAgeMs; + shouldSend = sendAtMs <= nowMs; + } + + // however, wait after an error + shouldSend = shouldSend && nowMs >= fDontSendUntilMs; + } + return shouldSend; + } + + /** + * + * @return + */ + private synchronized boolean sendBatch() { + // it's possible for this call to be made with an empty list. in this + // case, just return. + if (fPending.isEmpty()) { + return true; + } + + final long nowMs = Clock.now(); + final String url = CambriaPublisherUtility.makeUrl(fTopic); + + getLog().info("sending " + fPending.size() + " msgs to " + url + ". Oldest: " + + (nowMs - fPending.peek().timestamp) + " ms"); + + try { + + final ByteArrayOutputStream baseStream = new ByteArrayOutputStream(); + OutputStream os = baseStream; + if (fCompress) { + os = new GZIPOutputStream(baseStream); + } + for (TimestampedMessage m : fPending) { + os.write(("" + m.fPartition.length()).getBytes()); + os.write('.'); + os.write(("" + m.fMsg.length()).getBytes()); + os.write('.'); + os.write(m.fPartition.getBytes()); + os.write(m.fMsg.getBytes()); + os.write('\n'); + } + os.close(); + + final long startMs = Clock.now(); + + // code from REST Client Starts + + + + + Client client = ClientBuilder.newClient(); + String metricTopicname = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic"); + if (null==metricTopicname) { + + metricTopicname="msgrtr.apinode.metrics.dmaap"; + } + WebTarget target = client + .target("http://localhost:" + CambriaConstants.kStdCambriaServicePort); + target = target.path("/events/" + fTopic); + getLog().info("url : " + target.getUri().toString()); + // API Key + + Entity data = Entity.entity(baseStream.toByteArray(), "application/cambria"); + + Response response = target.request().post(data); + + getLog().info("Response received :: " + response.getStatus()); + getLog().info("Response received :: " + response.toString()); + + // code from REST Client Ends + + + fPending.clear(); + return true; + } catch (IllegalArgumentException x) { + getLog().warn(x.getMessage(), x); + } + + catch (IOException x) { + getLog().warn(x.getMessage(), x); + } + return false; + } + + private final String fTopic; + private final int fMaxBatchSize; + private final long fMaxBatchAgeMs; + private final boolean fCompress; + private boolean fClosed; + + private final LinkedBlockingQueue fPending; + private long fDontSendUntilMs; + private final ScheduledThreadPoolExecutor fExec; + + private static final long sfWaitAfterError = 1000; + + /** + * + * @param hosts + * @param topic + * @param maxBatchSize + * @param maxBatchAgeMs + * @param compress + */ + private DMaaPCambriaSimplerBatchPublisher(Collection hosts, String topic, int maxBatchSize, + long maxBatchAgeMs, boolean compress) throws MalformedURLException { + + super(hosts); + + if (topic == null || topic.length() < 1) { + throw new IllegalArgumentException("A topic must be provided."); + } + + fClosed = false; + fTopic = topic; + fMaxBatchSize = maxBatchSize; + fMaxBatchAgeMs = maxBatchAgeMs; + fCompress = compress; + + fPending = new LinkedBlockingQueue(); + fDontSendUntilMs = 0; + + fExec = new ScheduledThreadPoolExecutor(1); + fExec.scheduleAtFixedRate(new Runnable() { + @Override + public void run() { + send(false); + } + }, 100, 50, TimeUnit.MILLISECONDS); + } + + /** + * + * + * @author anowarul.islam + * + */ + private static class TimestampedMessage extends message { + /** + * to store timestamp value + */ + public final long timestamp; + + /** + * constructor initialize with message + * + * @param m + * + */ + public TimestampedMessage(message m) { + super(m); + timestamp = Clock.now(); + } + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaEventSet.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaEventSet.java new file mode 100644 index 0000000..4b65628 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaEventSet.java @@ -0,0 +1,114 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.resources; + +import java.io.IOException; +import java.io.InputStream; +import java.util.zip.GZIPInputStream; + +import javax.servlet.http.HttpServletResponse; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaJsonStreamReader; +import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaRawStreamReader; +import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaStreamReader; +import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaTextStreamReader; +import com.att.nsa.apiServer.streams.ChunkedInputStream; +import com.att.nsa.drumlin.service.standards.HttpStatusCodes; + +/** + * An inbound event set. + * + * @author peter + */ +public class CambriaEventSet { + private final reader fReader; + + /** + * constructor initialization + * + * @param mediaType + * @param originalStream + * @param chunked + * @param defPartition + * @throws CambriaApiException + */ + public CambriaEventSet(String mediaType, InputStream originalStream, + boolean chunked, String defPartition) throws CambriaApiException { + InputStream is = originalStream; + if (chunked) { + is = new ChunkedInputStream(originalStream); + } + + if (("application/json").equals(mediaType)) { + if (chunked) { + throw new CambriaApiException( + HttpServletResponse.SC_BAD_REQUEST, + "The JSON stream reader doesn't support chunking."); + } + fReader = new CambriaJsonStreamReader(is, defPartition); + } else if (("application/cambria").equals(mediaType)) { + fReader = new CambriaStreamReader(is); + } else if (("application/cambria-zip").equals(mediaType)) { + try { + is = new GZIPInputStream(is); + } catch (IOException e) { + throw new CambriaApiException(HttpStatusCodes.k400_badRequest, + "Couldn't read compressed format: " + e); + } + fReader = new CambriaStreamReader(is); + } else if (("text/plain").equals(mediaType)) { + fReader = new CambriaTextStreamReader(is, defPartition); + } else { + fReader = new CambriaRawStreamReader(is, defPartition); + } + } + + /** + * Get the next message from this event set. Returns null when the end of + * stream is reached. Will block until a message arrives (or the stream is + * closed/broken). + * + * @return a message, or null + * @throws IOException + * @throws CambriaApiException + */ + public message next() throws IOException, CambriaApiException { + return fReader.next(); + } + + /** + * + * @author anowarul.islam + * + */ + public interface reader { + /** + * + * @return + * @throws IOException + * @throws CambriaApiException + */ + message next() throws IOException, CambriaApiException; + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaOutboundEventStream.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaOutboundEventStream.java new file mode 100644 index 0000000..19e8e65 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/resources/CambriaOutboundEventStream.java @@ -0,0 +1,554 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.resources; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Date; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.json.JSONException; +import org.json.JSONObject; +import org.json.JSONTokener; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder.StreamWriter; +import org.onap.dmaap.dmf.mr.utils.Utils; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/*import com.att.sa.highlandPark.config.HpConfigContext; +import com.att.sa.highlandPark.config.HpReaderException; +import com.att.sa.highlandPark.events.HpJsonEvent; +import com.att.sa.highlandPark.events.HpJsonEventFactory; +import com.att.sa.highlandPark.processor.HpAlarmFilter; +import com.att.sa.highlandPark.processor.HpEvent; +import com.att.sa.highlandPark.processor.HpProcessingEngine; +import com.att.sa.highlandPark.processor.HpProcessingEngine.EventFactory; +*/ +/** + * class used to write the consumed messages + * + * @author anowarul.islam + * + */ +public class CambriaOutboundEventStream implements StreamWriter { + private static final int kTopLimit = 1024 * 4; + + /** + * + * static innerclass it takes all the input parameter for kafka consumer + * like limit, timeout, meta, pretty + * + * @author anowarul.islam + * + */ + public static class Builder { + + // Required + private final Consumer fConsumer; + // private final rrNvReadable fSettings; // used during write to tweak + // format, decide to explicitly + // close stream or not + + // Optional + private int fLimit; + private int fTimeoutMs; + private String fTopicFilter; + private boolean fPretty; + private boolean fWithMeta; + ArrayList fKafkaConsumerList; + + + /** + * constructor it initializes all the consumer parameters + * + * @param c + * @param settings + */ + public Builder(Consumer c) { + this.fConsumer = c; + + + fLimit = CambriaConstants.kNoTimeout; + fTimeoutMs = CambriaConstants.kNoLimit; + fTopicFilter = CambriaConstants.kNoFilter; + fPretty = false; + fWithMeta = false; + + + } + + /** + * + * constructor initializes with limit + * + * @param l + * only l no of messages will be consumed + * @return + */ + public Builder limit(int l) { + this.fLimit = l; + return this; + } + + /** + * constructor initializes with timeout + * + * @param t + * if there is no message to consume, them DMaaP will wait + * for t time + * @return + */ + public Builder timeout(int t) { + this.fTimeoutMs = t; + return this; + } + + /** + * constructor initializes with filter + * + * @param f + * filter + * @return + */ + public Builder filter(String f) { + this.fTopicFilter = f; + return this; + } + + /** + * constructor initializes with boolean value pretty + * + * @param p + * messages print in new line + * @return + */ + public Builder pretty(boolean p) { + fPretty = p; + return this; + } + + /** + * constructor initializes with boolean value meta + * + * @param withMeta, + * along with messages offset will print + * @return + */ + public Builder withMeta(boolean withMeta) { + fWithMeta = withMeta; + return this; + } + + // public Builder atOffset ( int pos ) + + + // return this; + // } + /** + * method returs object of CambriaOutboundEventStream + * + * @return + * @throws CambriaApiException + */ + public CambriaOutboundEventStream build() throws CambriaApiException { + return new CambriaOutboundEventStream(this); + } + } + + @SuppressWarnings("unchecked") + /** + * + * @param builder + * @throws CambriaApiException + * + */ + private CambriaOutboundEventStream(Builder builder) throws CambriaApiException { + fConsumer = builder.fConsumer; + fLimit = builder.fLimit; + fTimeoutMs = builder.fTimeoutMs; + + fSent = 0; + fPretty = builder.fPretty; + fWithMeta = builder.fWithMeta; + fKafkaConsumerList = builder.fKafkaConsumerList; + /* if (CambriaConstants.kNoFilter.equals(builder.fTopicFilter)) { + fHpAlarmFilter = null; + fHppe = null; + } else { + try { + final JSONObject filter = new JSONObject(new JSONTokener(builder.fTopicFilter)); + HpConfigContext cc = new HpConfigContext(); + fHpAlarmFilter = cc.create(HpAlarmFilter.class, filter); + final EventFactory ef = new HpJsonEventFactory(); + fHppe = new HpProcessingEngine(ef); + } catch (HpReaderException e) { + // JSON was okay, but the filter engine says it's bogus + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, + "Couldn't create filter: " + e.getMessage()); + } catch (JSONException e) { + // user sent a bogus JSON object + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, + "Couldn't parse JSON: " + e.getMessage()); + } + }*/ + } + + /** + * + * interface provides onWait and onMessage methods + * + */ + public interface operation { + /** + * Call thread.sleep + * + * @throws IOException + */ + void onWait() throws IOException; + + /** + * provides the output based in the consumer paramter + * + * @param count + * @param msg + * @throws IOException + */ + + void onMessage(int count, String msg, String transId, long offSet) throws IOException, JSONException; + } + + /** + * + * @return + */ + public int getSentCount() { + return fSent; + } + + @Override + /** + * + * @param os + * throws IOException + */ + public void write(final OutputStream os) throws IOException { + + + // final boolean transactionEnabled = istransEnable; + // synchronized(this){ + os.write('['); + fSent = forEachMessage(new operation() { + @Override + public void onMessage(int count, String msg, String transId, long offSet) + throws IOException, JSONException { + + if (count > 0) { + os.write(','); + } + if (fWithMeta) { + final JSONObject entry = new JSONObject(); + entry.put("offset", offSet); + entry.put("message", msg); + os.write(entry.toString().getBytes()); + } else { + + String jsonString = JSONObject.valueToString(msg); + os.write(jsonString.getBytes()); + } + + if (fPretty) { + os.write('\n'); + } + + String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap + .getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic"); + if (null == metricTopicname) + metricTopicname = "msgrtr.apinode.metrics.dmaap"; + if (!metricTopicname.equalsIgnoreCase(topic.getName())) { + try { + if (istransEnable && istransType) { + // final String transactionId = + + + StringBuilder consumerInfo = new StringBuilder(); + if (null != dmaapContext && null != dmaapContext.getRequest()) { + final HttpServletRequest request = dmaapContext.getRequest(); + consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\","); + consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\","); + consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\","); + consumerInfo.append("consumerGroup= \"" + + getConsumerGroupFromRequest(request.getRequestURI()) + "\","); + consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\","); + } + log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transId + + "\",messageLength= \"" + msg.length() + "\",topic= \"" + topic.getName() + "\"]"); + } + } catch (Exception e) { + } + } + + } + + @Override + /** + * + * It makes thread to wait + * + * @throws IOException + */ + public void onWait() throws IOException { + os.flush(); // likely totally unnecessary for a network socket + try { + // FIXME: would be good to wait/signal + Thread.sleep(100); + } catch (InterruptedException e) { + // ignore + } + } + }); + + + if (null != dmaapContext && istransEnable && istransType) { + + dmaapContext.getResponse().setHeader("transactionId", + Utils.getResponseTransactionId(responseTransactionId)); + } + + os.write(']'); + os.flush(); + + boolean close_out_stream = true; + String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "close.output.stream"); + if (null != strclose_out_stream) + close_out_stream = Boolean.parseBoolean(strclose_out_stream); + + + if (close_out_stream) { + os.close(); + + } + } + + /** + * + * @param requestURI + * @return + */ + private String getConsumerGroupFromRequest(String requestURI) { + if (null != requestURI && !requestURI.isEmpty()) { + + String consumerDetails = requestURI.substring(requestURI.indexOf("events/") + 7); + + int startIndex = consumerDetails.indexOf("/") + 1; + int endIndex = consumerDetails.lastIndexOf("/"); + return consumerDetails.substring(startIndex, endIndex); + } + return null; + } + + /** + * + * @param op + * @return + * @throws IOException + * @throws JSONException + */ + public int forEachMessage(operation op) throws IOException, JSONException { + final int effectiveLimit = (fLimit == 0 ? kTopLimit : fLimit); + + int count = 0; + boolean firstPing = true; + // boolean isTransType=false; + final long startMs = System.currentTimeMillis(); + final long timeoutMs = fTimeoutMs + startMs -500; //500 ms used in poll + + while (firstPing || (count == 0 && System.currentTimeMillis() < timeoutMs)) { + if (!firstPing) { + op.onWait(); + } + firstPing = false; + + + Consumer.Message msgRecord = null; + while (count < effectiveLimit && (msgRecord = + fConsumer.nextMessage()) != null) { + + String message = ""; + String transactionid = ""; + try { + // String msgRecord = msg; + JSONObject jsonMessage = new JSONObject(msgRecord); + String[] keys = JSONObject.getNames(jsonMessage); + boolean wrapheader1 = false; + boolean wrapheader2 = false; + boolean found_attr3 = false; + String wrapElement1 = "message"; + String wrapElement2 = "msgWrapMR"; + String transIdElement = "transactionId"; + if (null != keys) { + for (String key : keys) { + if (key.equals(wrapElement1)) { + wrapheader1 = true; + } else if (key.equals(wrapElement2)) { + wrapheader2 = true; + } else if (key.equals(transIdElement)) { + found_attr3 = true; + transactionid = jsonMessage.getString(key); + } + } + } + + // returns contents of attribute 1 if both attributes + // present, otherwise + // the whole msg + if (wrapheader2 && found_attr3) { + message = jsonMessage.getString(wrapElement2); + } else if (wrapheader1 && found_attr3) { + message = jsonMessage.getString(wrapElement1); + } else { + message = msgRecord.getMessage(); + } + // jsonMessage = extractMessage(jsonMessage , + // "message","msgWrapMR","transactionId"); + istransType = true; + } catch (JSONException e) { // This check is required for the + // message sent by MR AAF flow but + // consumed by UEB ACL flow which + // wont expect transaction id in + // cambria client api + // Ignore + log.info("JSON Exception logged when the message is non JSON Format"); + } catch (Exception exp) { + log.info("****Some Exception occured for writing messages in topic" + topic.getName() + + " Exception" + exp); + } + if (message == null || message.equals("")) { + istransType = false; + message = msgRecord.getMessage(); + } + + // If filters are enabled/set, message should be in JSON format + // for filters to work for + // otherwise filter will automatically ignore message in + // non-json format. + if (filterMatches(message)) { + op.onMessage(count, message, transactionid, msgRecord.getOffset()); + count++; + + } + + } + } + return count; + } + + + + /** + * + * Checks whether filter is initialized + */ + + + + + /** + * + * @param msg + * @return + */ + private boolean filterMatches(String msg) { + boolean result = true; + + + + + + + + + + + + + + return result; + } + + public DMaaPContext getDmaapContext() { + return dmaapContext; + } + + public void setDmaapContext(DMaaPContext dmaapContext) { + this.dmaapContext = dmaapContext; + } + + public Topic getTopic() { + return topic; + } + + public void setTopic(Topic topic) { + this.topic = topic; + } + + public void setTopicStyle(boolean aaftopic) { + this.isAAFTopic = aaftopic; + } + + public void setTransEnabled(boolean transEnable) { + this.istransEnable = transEnable; + } + + + private final Consumer fConsumer; + private final int fLimit; + private final int fTimeoutMs; + + private final boolean fPretty; + private final boolean fWithMeta; + private int fSent; + + //private final HpProcessingEngine fHppe; + private DMaaPContext dmaapContext; + private String responseTransactionId; + private Topic topic; + private boolean isAAFTopic = false; + private boolean istransEnable = false; + private ArrayList fKafkaConsumerList; + private boolean istransType = true; + // private static final Logger log = + + + private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaOutboundEventStream.class); +} \ No newline at end of file diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java new file mode 100644 index 0000000..fe2fc85 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java @@ -0,0 +1,169 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.resources.streamReaders; + +import java.io.InputStream; + +import javax.servlet.http.HttpServletResponse; + +import org.json.JSONException; +import org.json.JSONObject; +import org.json.JSONTokener; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.beans.LogDetails; +import org.onap.dmaap.dmf.mr.resources.CambriaEventSet.reader; + +/** + * + * @author anowarul.islam + * + */ +public class CambriaJsonStreamReader implements reader { + private final JSONTokener fTokens; + private final boolean fIsList; + private long fCount; + private final String fDefPart; + public static final String kKeyField = "cambria.partition"; + + /** + * + * @param is + * @param defPart + * @throws CambriaApiException + */ + public CambriaJsonStreamReader(InputStream is, String defPart) throws CambriaApiException { + try { + fTokens = new JSONTokener(is); + fCount = 0; + fDefPart = defPart; + + final int c = fTokens.next(); + if (c == '[') { + fIsList = true; + } else if (c == '{') { + fTokens.back(); + fIsList = false; + } else { + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expecting an array or an object."); + } + } catch (JSONException e) { + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage()); + } + } + + @Override + public message next() throws CambriaApiException { + try { + if (!fTokens.more()) { + return null; + } + + final int c = fTokens.next(); + + + if (fIsList) { + if (c == ']' || (fCount > 0 && c == 10)) + return null; + + + if (fCount > 0 && c != ',' && c!= 10) { + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, + "Expected ',' or closing ']' after last object."); + } + + if (fCount == 0 && c != '{' && c!= 10 && c!=32) { + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected { to start an object."); + } + } else if (fCount != 0 || c != '{') { + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected '{' to start an object."); + } + + if (c == '{') { + fTokens.back(); + } + final JSONObject o = new JSONObject(fTokens); + fCount++; + return new msg(o); + } catch (JSONException e) { + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage()); + + } + } + + private class msg implements message { + private final String fKey; + private String fMsg; + private LogDetails logDetails; + private boolean transactionEnabled; + + /** + * constructor + * + * @param o + */ + + + + public msg(JSONObject o) { + String key = o.optString(kKeyField, fDefPart); + if (key == null) { + key = "" + System.currentTimeMillis(); + } + fKey = key; + + fMsg = o.toString().trim(); + + } + + @Override + public String getKey() { + return fKey; + } + + @Override + public String getMessage() { + return fMsg; + } + + @Override + public boolean isTransactionEnabled() { + return transactionEnabled; + } + + @Override + public void setTransactionEnabled(boolean transactionEnabled) { + this.transactionEnabled = transactionEnabled; + } + + @Override + public void setLogDetails(LogDetails logDetails) { + this.logDetails = logDetails; + } + + @Override + public LogDetails getLogDetails() { + return logDetails; + } + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java new file mode 100644 index 0000000..93baee0 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java @@ -0,0 +1,141 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.resources.streamReaders; + +import java.io.IOException; +import java.io.InputStream; + +import javax.servlet.http.HttpServletResponse; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.beans.LogDetails; +import org.onap.dmaap.dmf.mr.resources.CambriaEventSet.reader; +import com.att.nsa.util.StreamTools; + +/** + * + * This stream reader reads raw bytes creating a single message. + * @author peter + * + */ +public class CambriaRawStreamReader implements reader +{ + /** + * This is the constructor of CambriaRawStreamReader, it will basically the read from Input stream + * @param is + * @param defPart + * @throws CambriaApiException + */ + public CambriaRawStreamReader ( InputStream is, String defPart ) throws CambriaApiException + { + fStream = is; + fDefPart = defPart; + fClosed = false; + } + + @Override + /** + * + * next() method reads the bytes and + * iterates through the messages + * @throws CambriaApiException + * + */ + public message next () throws CambriaApiException + { + if ( fClosed ) return null; + + try + { + final byte[] rawBytes = StreamTools.readBytes ( fStream ); + fClosed = true; + return new message () + { + private LogDetails logDetails; + private boolean transactionEnabled; + + /** + * returns boolean value which + * indicates whether transaction is enabled + */ + public boolean isTransactionEnabled() { + return transactionEnabled; + } + + /** + * sets boolean value which + * indicates whether transaction is enabled + */ + public void setTransactionEnabled(boolean transactionEnabled) { + this.transactionEnabled = transactionEnabled; + } + + @Override + /** + * @returns key + * It ch4ecks whether fDefPart value is Null. + * If yes, it will return ystem.currentTimeMillis () else + * it will return fDefPart variable value + */ + public String getKey () + { + return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart; + } + + @Override + /** + * returns the message in String type object + */ + public String getMessage () + { + return new String ( rawBytes ); + } + + /** + * set log details in logDetails variable + */ + @Override + public void setLogDetails(LogDetails logDetails) { + this.logDetails = logDetails; + } + + @Override + /** + * get the log details + */ + public LogDetails getLogDetails() { + return this.logDetails; + } + }; + } + catch ( IOException e ) + { + throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () ); + } + } + + private final InputStream fStream; + private final String fDefPart; + private boolean fClosed; + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaStreamReader.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaStreamReader.java new file mode 100644 index 0000000..6df1dd4 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaStreamReader.java @@ -0,0 +1,229 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.resources.streamReaders; + +import java.io.IOException; +import java.io.InputStream; + +import javax.servlet.http.HttpServletResponse; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.beans.LogDetails; +import org.onap.dmaap.dmf.mr.resources.CambriaEventSet.reader; + +/** + * Read an optionally chunked stream in the Cambria app format. This format + * allows for speedier server-side message parsing than pure JSON. It's looks + * like:
+ *
+ * <keyLength>.<msgLength>.<key><message>
+ *
+ * Whitespace before/after each entry is ignored, so messages can be delivered + * with newlines between them, or not. + * + * @author peter + * + */ +public class CambriaStreamReader implements reader { + /** + * constructor initializing InputStream with fStream + * + * @param senderStream + * @throws CambriaApiException + */ + public CambriaStreamReader(InputStream senderStream) throws CambriaApiException { + fStream = senderStream; + } + + @Override + /** + * next method iterates through msg length + * throws IOException + * throws CambriaApiException + * + */ + public message next() throws IOException, CambriaApiException { + final int keyLen = readLength(); + if (keyLen == -1) + return null; + + final int msgLen = readLength(); + final String keyPart = readString(keyLen); + final String msgPart = readString(msgLen); + + return new msg(keyPart, msgPart); + } + + private static class msg implements message { + /** + * constructor initialization + * + * @param key + * @param msg + */ + public msg(String key, String msg) { + // if no key, use the current time. This allows the message to be + // delivered + // in any order without forcing it into a single partition as empty + // string would. + if (key.length() < 1) { + key = "" + System.currentTimeMillis(); + } + + fKey = key; + fMsg = msg; + } + + @Override + /** + * @returns fkey + */ + public String getKey() { + return fKey; + } + + @Override + /** + * returns the message in String type object + */ + public String getMessage() { + return fMsg; + } + + private final String fKey; + private final String fMsg; + private LogDetails logDetails; + private boolean transactionEnabled; + + /** + * returns boolean value which + * indicates whether transaction is enabled + */ + public boolean isTransactionEnabled() { + return transactionEnabled; + } + + /** + * sets boolean value which + * indicates whether transaction is enabled + */ + public void setTransactionEnabled(boolean transactionEnabled) { + this.transactionEnabled = transactionEnabled; + } + + @Override + /** + * set log details in logDetails variable + */ + public void setLogDetails(LogDetails logDetails) { + this.logDetails = logDetails; + } + + @Override + /** + * get the log details + */ + public LogDetails getLogDetails() { + return this.logDetails; + } + + } + + private final InputStream fStream; + + /** + * max cambria length indicates message length + + // This limit is here to prevent the server from spinning on a long string of numbers + // that is delivered with 'application/cambria' as the format. The limit needs to be + // large enough to support the max message length (currently 1MB, the default Kafka + // limit) + * */ + + private static final int kMaxCambriaLength = 4*1000*1024; + + + /** + * + * @return + * @throws IOException + * @throws CambriaApiException + */ + private int readLength() throws IOException, CambriaApiException { + // always ignore leading whitespace + int c = fStream.read(); + while (Character.isWhitespace(c)) { + c = fStream.read(); + } + + if (c == -1) { + return -1; + } + + int result = 0; + while (Character.isDigit(c)) { + result = (result * 10) + (c - '0'); + if (result > kMaxCambriaLength) { + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length."); + } + c = fStream.read(); + } + + if (c != '.') { + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length."); + } + + return result; + } + + /** + * + * @param len + * @return + * @throws IOException + * @throws CambriaApiException + */ + private String readString(int len) throws IOException, CambriaApiException { + final byte[] buffer = new byte[len]; + + final long startMs = System.currentTimeMillis(); + final long timeoutMs = startMs + 30000; // FIXME configurable + + int readTotal = 0; + while (readTotal < len) { + final int read = fStream.read(buffer, readTotal, len - readTotal); + if (read == -1 || System.currentTimeMillis() > timeoutMs) { + // EOF + break; + } + readTotal += read; + } + + if (readTotal < len) { + throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, + "End of stream while reading " + len + " bytes"); + } + + return new String(buffer); + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java new file mode 100644 index 0000000..67a31bb --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java @@ -0,0 +1,140 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.resources.streamReaders; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; + +import javax.servlet.http.HttpServletResponse; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.beans.LogDetails; +import org.onap.dmaap.dmf.mr.resources.CambriaEventSet.reader; + +/** + * This stream reader just pulls single lines. It uses the default partition if provided. If + * not, the key is the current time, which does not guarantee ordering. + * + * @author peter + * + */ +public class CambriaTextStreamReader implements reader +{ + /** + * This is the constructor for Cambria Text Reader format + * @param is + * @param defPart + * @throws CambriaApiException + */ + public CambriaTextStreamReader ( InputStream is, String defPart ) throws CambriaApiException + { + fReader = new BufferedReader ( new InputStreamReader ( is ) ); + fDefPart = defPart; + } + + @Override + /** + * next() method iterates through msg length + * throws IOException + * throws CambriaApiException + * + */ + public message next () throws CambriaApiException + { + try + { + final String line = fReader.readLine (); + if ( line == null ) return null; + + return new message () + { + private LogDetails logDetails; + private boolean transactionEnabled; + + /** + * returns boolean value which + * indicates whether transaction is enabled + * @return + */ + public boolean isTransactionEnabled() { + return transactionEnabled; + } + + /** + * sets boolean value which + * indicates whether transaction is enabled + */ + public void setTransactionEnabled(boolean transactionEnabled) { + this.transactionEnabled = transactionEnabled; + } + + @Override + /** + * @returns key + * It ch4ecks whether fDefPart value is Null. + * If yes, it will return ystem.currentTimeMillis () else + * it will return fDefPart variable value + */ + public String getKey () + { + return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart; + } + + @Override + /** + * returns the message in String type object + * @return + */ + public String getMessage () + { + return line; + } + + @Override + /** + * set log details in logDetails variable + */ + public void setLogDetails(LogDetails logDetails) { + this.logDetails = logDetails; + } + + @Override + /** + * get the log details + */ + public LogDetails getLogDetails() { + return this.logDetails; + } + }; + } + catch ( IOException e ) + { + throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () ); + } + } + + private final BufferedReader fReader; + private final String fDefPart; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticator.java b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticator.java new file mode 100644 index 0000000..a7f2376 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticator.java @@ -0,0 +1,39 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.security; + +import javax.servlet.http.HttpServletRequest; + +import org.onap.dmaap.dmf.mr.CambriaApiException; + + + + +/** + * + * @author sneha.d.desai + * + */ +public interface DMaaPAAFAuthenticator { + boolean aafAuthentication( HttpServletRequest req , String role); + String aafPermissionString(String permission, String action) throws CambriaApiException; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java new file mode 100644 index 0000000..25644a7 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java @@ -0,0 +1,80 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.security; + +import javax.servlet.http.HttpServletRequest; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; + + +/** + * + * @author sneha.d.desai + * + */ +public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator { + + /** + * @param req + * @param role + */ + @Override + public boolean aafAuthentication(HttpServletRequest req, String role) { + boolean auth = false; + if(req.isUserInRole(role)) + { + + auth = true; + } + + return auth; + } + + @Override + public String aafPermissionString(String topicName, String action) throws CambriaApiException { + + + String permission = ""; + String nameSpace =""; + if(topicName.contains(".") && topicName.contains("org.onap")) { + + nameSpace = topicName.substring(0,topicName.lastIndexOf(".")); + } + else { + nameSpace = null; + nameSpace= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"defaultNSforUEB"); + + if(null==nameSpace)nameSpace="org.onap.dmaap.mr"; + + + + } + + permission = nameSpace+".topic|:topic."+topicName+"|"+action; + return permission; + + } + + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticator.java b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticator.java new file mode 100644 index 0000000..e6ff30f --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticator.java @@ -0,0 +1,61 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.security; + +import javax.servlet.http.HttpServletRequest; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import com.att.nsa.security.NsaApiKey; + + +/** + * An interface for authenticating an inbound request. + * @author nilanjana.maity + * + * @param NsaApiKey + */ +public interface DMaaPAuthenticator { + + /** + * Qualify a request as possibly using the authentication method that this class implements. + * @param req + * @return true if the request might be authenticated by this class + */ + boolean qualify ( HttpServletRequest req ); + + /** + * Check for a request being authentic. If it is, return the API key. If not, return null. + * @param req An inbound web request + * @return the API key for an authentic request, or null + */ + K isAuthentic ( HttpServletRequest req ); + /** + * Check for a ctx being authenticate. If it is, return the API key. If not, return null. + * @param ctx + * @return the API key for an authentication request, or null + */ + K authenticate ( DMaaPContext ctx ); + + + void addAuthenticator(DMaaPAuthenticator a); + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticatorImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticatorImpl.java new file mode 100644 index 0000000..f0d0d4d --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/security/DMaaPAuthenticatorImpl.java @@ -0,0 +1,133 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.security; + +import java.util.LinkedList; + +import javax.servlet.http.HttpServletRequest; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.db.NsaApiDb; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +/** + * + * @author anowarul.islam + * + * @param + */ +public class DMaaPAuthenticatorImpl implements DMaaPAuthenticator { + + private final LinkedList> fAuthenticators; + + + + // Setting timeout to a large value for testing purpose. + + // 10 minutes + private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10 * 10 * 10 * 10 * 10; + + /** + * Construct the security manager against an API key database + * + * @param db + * the API key db + */ + public DMaaPAuthenticatorImpl(NsaApiDb db) { + this(db, kDefaultRequestTimeWindow); + } + + + + + /** + * Construct the security manager against an API key database with a + * specific request time window size + * + * @param db + * the API key db + * @param authTimeWindowMs + * the size of the time window for request authentication + */ + public DMaaPAuthenticatorImpl(NsaApiDb db, long authTimeWindowMs) { + fAuthenticators = new LinkedList<>(); + + fAuthenticators.add(new DMaaPOriginalUebAuthenticator(db, authTimeWindowMs)); + } + + /** + * Authenticate a user's request. This method returns the API key if the + * user is authentic, null otherwise. + * + * @param ctx + * @return an api key record, or null + */ + public K authenticate(DMaaPContext ctx) { + final HttpServletRequest req = ctx.getRequest(); + for (DMaaPAuthenticator a : fAuthenticators) { + if (a.qualify(req)) { + final K k = a.isAuthentic(req); + if (k != null) + return k; + } + // else: this request doesn't look right to the authenticator + } + return null; + } + + /** + * Get the user associated with the incoming request, or null if the user is + * not authenticated. + * + * @param ctx + * @return + */ + public static NsaSimpleApiKey getAuthenticatedUser(DMaaPContext ctx) { + final DMaaPAuthenticator m = ctx.getConfigReader().getfSecurityManager(); + return m.authenticate(ctx); + } + + /** + * method by default returning false + * @param req + * @return false + */ + public boolean qualify(HttpServletRequest req) { + return false; + } +/** + * method by default returning null + * @param req + * @return null + */ + public K isAuthentic(HttpServletRequest req) { + return null; + } + + public void addAuthenticator ( DMaaPAuthenticator a ) + { + this.fAuthenticators.add(a); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java b/src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java new file mode 100644 index 0000000..6f2ebac --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java @@ -0,0 +1,87 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.security.impl; + +import javax.servlet.http.HttpServletRequest; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.authenticators.MechIdAuthenticator; + +/** + * An authenticator for AT&T MechIds. + * + * @author peter + * + * @param + */ +public class DMaaPMechIdAuthenticator implements DMaaPAuthenticator { + +/** + * This is not yet implemented. by refault its returing false + * @param req HttpServletRequest + * @return false + */ + public boolean qualify (HttpServletRequest req) { + // we haven't implemented anything here yet, so there's no qualifying request + return false; + } +/** + * This metod authenticate the mech id + * @param req + * @return APIkey or null + */ + public K isAuthentic (HttpServletRequest req) { + final String remoteAddr = req.getRemoteAddr(); + authLog ( "MechId auth is not yet implemented.", remoteAddr ); + return null; + } + + private static void authLog ( String msg, String remoteAddr ) + { + log.info ( "AUTH-LOG(" + remoteAddr + "): " + msg ); + } + + + //private static final Logger log = Logger.getLogger( MechIdAuthenticator.class.toString()); + private static final EELFLogger log = EELFManager.getInstance().getLogger(MechIdAuthenticator.class); +/** + * Curently its not yet implemented returning null + * @param ctx DMaaP context + * @return APIkey or null + */ + @Override + public K authenticate(DMaaPContext ctx) { + // TODO Auto-generated method stub + return null; + } +@Override +public void addAuthenticator(DMaaPAuthenticator a) { + // TODO Auto-generated method stub + +} + +} \ No newline at end of file diff --git a/src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java b/src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java new file mode 100644 index 0000000..f8cb066 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java @@ -0,0 +1,293 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.security.impl; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; + +import javax.servlet.http.HttpServletRequest; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.till.data.sha1HmacSigner; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.db.NsaApiDb; + +/** + * This authenticator handles an AWS-like authentication, originally used by the + * Cambria server (the API server for UEB). + * + * @author peter + * + * @param + */ +public class DMaaPOriginalUebAuthenticator implements DMaaPAuthenticator { + /** + * constructor initialization + * + * @param db + * @param requestTimeWindowMs + */ + public DMaaPOriginalUebAuthenticator(NsaApiDb db, long requestTimeWindowMs) { + fDb = db; + fRequestTimeWindowMs = requestTimeWindowMs; + + + + + } + + @Override + public boolean qualify(HttpServletRequest req) { + // accept anything that comes in with X-(Cambria)Auth in the header + final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" }); + return xAuth != null; + } + + /** + * method for authentication + * + * @param req + * @return + */ + public K isAuthentic(HttpServletRequest req) { + final String remoteAddr = req.getRemoteAddr(); + // Cambria originally used "Cambria..." headers, but as the API key + // system is now more + // general, we take either form. + final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" }); + final String xDate = getFirstHeader(req, new String[] { "X-CambriaDate", "X-Date" }); + + final String httpDate = req.getHeader("Date"); + + final String xNonce = getFirstHeader(req, new String[] { "X-Nonce" }); + return authenticate(remoteAddr, xAuth, xDate, httpDate, xNonce); + } + + /** + * Authenticate a user's request. This method returns the API key if the + * user is authentic, null otherwise. + * + * @param remoteAddr + * @param xAuth + * @param xDate + * @param httpDate + * @param nonce + * @return an api key record, or null + */ + public K authenticate(String remoteAddr, String xAuth, String xDate, String httpDate, String nonce) { + if (xAuth == null) { + authLog("No X-Auth header on request", remoteAddr); + return null; + } + + final String[] xAuthParts = xAuth.split(":"); + if (xAuthParts.length != 2) { + authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr); + return null; + } + + + // get the api key and signature + final String clientApiKey = xAuthParts[0]; + final String clientApiHash = xAuthParts[1]; + if (clientApiKey.length() == 0 || clientApiHash.length() == 0) { + authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr); + return null; + } + // if the user provided X-Date, use that. Otherwise, go for Date + final String dateString = xDate != null ? xDate : httpDate; + final Date clientDate = getClientDate(dateString); + if (clientDate == null) { + authLog("Couldn't parse client date '" + dateString + "'. Preferring X-Date over Date.", remoteAddr); + return null; + } + // check the time range + final long nowMs = System.currentTimeMillis(); + final long diffMs = Math.abs(nowMs - clientDate.getTime()); + if (diffMs > fRequestTimeWindowMs) { + authLog("Client date is not in acceptable range of server date. Client:" + clientDate.getTime() + + ", Server: " + nowMs + ", Threshold: " + fRequestTimeWindowMs + ".", remoteAddr); + return null; + } + K apiRecord; + try { + apiRecord = fDb.loadApiKey(clientApiKey); + if (apiRecord == null) { + authLog("No such API key " + clientApiKey, remoteAddr); + return null; + } + } catch (ConfigDbException e) { + authLog("Couldn't load API key " + clientApiKey + ": " + e.getMessage(), remoteAddr); + return null; + } + // make the signed content + final StringBuilder sb = new StringBuilder(); + sb.append(dateString); + if (nonce != null) { + sb.append(":"); + sb.append(nonce); + } + final String signedContent = sb.toString(); + // now check the signed date string + final String serverCalculatedSignature = sha1HmacSigner.sign(signedContent, apiRecord.getSecret()); + if (serverCalculatedSignature == null || !serverCalculatedSignature.equals(clientApiHash)) { + authLog("Signatures don't match. Rec'd " + clientApiHash + ", expect " + serverCalculatedSignature + ".", + remoteAddr); + return null; + } + authLog("authenticated " + apiRecord.getKey(), remoteAddr); + return apiRecord; + } + + /** + * Get the first value of the first existing header from the headers list + * + * @param req + * @param headers + * @return a header value, or null if none exist + */ + private static String getFirstHeader(HttpServletRequest req, String[] headers) { + for (String header : headers) { + final String result = req.getHeader(header); + if (result != null) + return result; + } + return null; + } + + /** + * Parse the date string into a Date using one of the supported date + * formats. + * + * @param dateHeader + * @return a date, or null + */ + private static Date getClientDate(String dateString) { + if (dateString == null) { + return null; + } + + // parse the date + Date result = null; + for (String dateFormat : kDateFormats) { + final SimpleDateFormat parser = new SimpleDateFormat(dateFormat, java.util.Locale.US); + if (!dateFormat.contains("z") && !dateFormat.contains("Z")) { + parser.setTimeZone(TIMEZONE_GMT); + } + + try { + result = parser.parse(dateString); + break; + } catch (ParseException e) { + // presumably wrong format + } + } + return result; + } + + private static void authLog(String msg, String remoteAddr) { + log.info("AUTH-LOG(" + remoteAddr + "): " + msg); + } + + private final NsaApiDb fDb; + private final long fRequestTimeWindowMs; + + private static final java.util.TimeZone TIMEZONE_GMT = java.util.TimeZone.getTimeZone("GMT"); + + private static final String kDateFormats[] = + { + // W3C date format (RFC 3339). + "yyyy-MM-dd'T'HH:mm:ssz", + "yyyy-MM-dd'T'HH:mm:ssXXX", // as of Java 7, reqd to handle colon in TZ offset + + // Preferred HTTP date format (RFC 1123). + "EEE, dd MMM yyyy HH:mm:ss zzz", + + // simple unix command line 'date' format + "EEE MMM dd HH:mm:ss z yyyy", + + // Common date format (RFC 822). + "EEE, dd MMM yy HH:mm:ss z", + "EEE, dd MMM yy HH:mm z", + "dd MMM yy HH:mm:ss z", + "dd MMM yy HH:mm z", + + // Obsoleted HTTP date format (ANSI C asctime() format). + "EEE MMM dd HH:mm:ss yyyy", + + // Obsoleted HTTP date format (RFC 1036). + "EEEE, dd-MMM-yy HH:mm:ss zzz", + }; + + + + + + + + + + + + + + + + + + + + // logger declaration + + private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPOriginalUebAuthenticator.class); + @Override + + // TODO Auto-generated method stub + + //} + + public K authenticate(DMaaPContext ctx) { + + + + + + + + + + + return null; + } + + + public void addAuthenticator ( DMaaPAuthenticator a ) + { + + } + +} \ No newline at end of file diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/AdminService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/AdminService.java new file mode 100644 index 0000000..ef89d06 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/AdminService.java @@ -0,0 +1,83 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service; + +import java.io.IOException; + +import org.json.JSONException; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +/** + * @author muzainulhaque.qazi + * + */ +public interface AdminService { + /** + * method provide consumerCache + * + * @param dMaaPContext + * @throws IOException + */ + void showConsumerCache(DMaaPContext dMaaPContext) throws IOException,AccessDeniedException; + + /** + * method drops consumer cache + * + * @param dMaaPContext + * @throws JSONException + * @throws IOException + */ + void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException,AccessDeniedException; + + + /** + * Get list of blacklisted ips + * @param dMaaPContext context + * @throws IOException ex + * @throws AccessDeniedException ex + */ + void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException; + + /** + * Add ip to blacklist + * @param dMaaPContext context + * @param ip ip + * @throws IOException ex + * @throws ConfigDbException ex + * @throws AccessDeniedException ex + */ + void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException; + + /** + * Remove ip from blacklist + * @param dMaaPContext context + * @param ip ip + * @throws IOException ex + * @throws ConfigDbException ex + * @throws AccessDeniedException ex + */ + void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException; + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/ApiKeysService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/ApiKeysService.java new file mode 100644 index 0000000..9526ca1 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/ApiKeysService.java @@ -0,0 +1,105 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service; + +import java.io.IOException; + +import org.onap.dmaap.dmf.mr.beans.ApiKeyBean; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.security.db.NsaApiDb.KeyExistsException; + +/** + * Declaring all the method in interface that is mainly used for authentication + * purpose. + * + * + */ + +public interface ApiKeysService { + /** + * This method declaration for getting all ApiKey that has generated on + * server. + * + * @param dmaapContext + * @throws ConfigDbException + * @throws IOException + */ + + public void getAllApiKeys(DMaaPContext dmaapContext) + throws ConfigDbException, IOException; + + /** + * Getting information about specific ApiKey + * + * @param dmaapContext + * @param apikey + * @throws ConfigDbException + * @throws IOException + */ + + public void getApiKey(DMaaPContext dmaapContext, String apikey) + throws ConfigDbException, IOException; + + /** + * Thid method is used for create a particular ApiKey + * + * @param dmaapContext + * @param nsaApiKey + * @throws KeyExistsException + * @throws ConfigDbException + * @throws IOException + */ + + public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey) + throws KeyExistsException, ConfigDbException, IOException; + + /** + * This method is used for update ApiKey that is already generated on + * server. + * + * @param dmaapContext + * @param apikey + * @param nsaApiKey + * @throws ConfigDbException + * @throws IOException + * @throws AccessDeniedException + * @throws com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException + */ + public void updateApiKey(DMaaPContext dmaapContext, String apikey, + ApiKeyBean nsaApiKey) throws ConfigDbException, IOException,AccessDeniedException + ; + + /** + * This method is used for delete specific ApiKey + * + * @param dmaapContext + * @param apikey + * @throws ConfigDbException + * @throws IOException + * @throws AccessDeniedException + */ + + public void deleteApiKey(DMaaPContext dmaapContext, String apikey) + throws ConfigDbException, IOException,AccessDeniedException; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/EventsService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/EventsService.java new file mode 100644 index 0000000..526b7b8 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/EventsService.java @@ -0,0 +1,75 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service; + +import java.io.IOException; +import java.io.InputStream; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +/** + * + * @author anowarul.islam + * + */ +public interface EventsService { + /** + * + * @param ctx + * @param topic + * @param consumerGroup + * @param clientId + * @throws ConfigDbException + * @throws TopicExistsException + * @throws AccessDeniedException + * @throws UnavailableException + * @throws CambriaApiException + * @throws IOException + */ + public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId) + throws ConfigDbException, TopicExistsException,UnavailableException, + CambriaApiException, IOException,AccessDeniedException; + + /** + * + * @param ctx + * @param topic + * @param msg + * @param defaultPartition + * @param requestTime + * @throws ConfigDbException + * @throws AccessDeniedException + * @throws TopicExistsException + * @throws CambriaApiException + * @throws IOException + */ + public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition, + final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException, + CambriaApiException, IOException,missingReqdSetting; + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/MMService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/MMService.java new file mode 100644 index 0000000..1bd28e7 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/MMService.java @@ -0,0 +1,66 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service; + +import java.io.IOException; +import java.io.InputStream; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +/** + * Contains the logic for executing calls to the Mirror Maker agent tool. + * + * @author Kawsar Jahan + * + * @since May 25, 2016 + */ + +public interface MMService { + + /* + * this method calls the add white list method of a Mirror Maker agent API + */ + public void addWhiteList(); + + /* + * this method calls the remove white list method of a Mirror Maker agent API + */ + public void removeWhiteList(); + + /* + * This method calls the list white list method of a Mirror Maker agent API + */ + public void listWhiteList(); + + public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId) throws ConfigDbException, TopicExistsException, + AccessDeniedException, UnavailableException, CambriaApiException, IOException; + + public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition, + final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException, + CambriaApiException, IOException, missingReqdSetting; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/MetricsService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/MetricsService.java new file mode 100644 index 0000000..fd24d6c --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/MetricsService.java @@ -0,0 +1,54 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service; + +/** + * @author amol.ramesh.dalne + * + */ +import java.io.IOException; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; + +/** + * + * @author anowarul.islam + * + */ +public interface MetricsService { + /** + * + * @param ctx + * @throws IOException + */ + public void get(DMaaPContext ctx) throws IOException; + + /** + * + * @param ctx + * @param name + * @throws IOException + * @throws CambriaApiException + */ + public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/TopicService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/TopicService.java new file mode 100644 index 0000000..ae257c1 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/TopicService.java @@ -0,0 +1,176 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service; + +import java.io.IOException; + +import org.json.JSONException; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.TopicBean; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +/** + * interface provide all the topic related operations + * + * @author anowarul.islam + * + */ +public interface TopicService { + /** + * method fetch details of all the topics + * + * @param dmaapContext + * @throws JSONException + * @throws ConfigDbException + * @throws IOException + */ + void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException; + void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException; + + /** + * method fetch details of specific topic + * + * @param dmaapContext + * @param topicName + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + */ + void getTopic(DMaaPContext dmaapContext, String topicName) + throws ConfigDbException, IOException, TopicExistsException; + + /** + * method used to create the topic + * + * @param dmaapContext + * @param topicBean + * @throws CambriaApiException + * @throws TopicExistsException + * @throws IOException + * @throws AccessDeniedException + * @throws JSONException + */ + + void createTopic(DMaaPContext dmaapContext, TopicBean topicBean) + throws CambriaApiException, TopicExistsException, IOException, AccessDeniedException; + + /** + * method used to delete to topic + * + * @param dmaapContext + * @param topicName + * @throws IOException + * @throws AccessDeniedException + * @throws ConfigDbException + * @throws CambriaApiException + * @throws TopicExistsException + */ + + void deleteTopic(DMaaPContext dmaapContext, String topicName) + throws IOException, AccessDeniedException, ConfigDbException, CambriaApiException, TopicExistsException; + + /** + * method provides list of all the publishers associated with a topic + * + * @param dmaapContext + * @param topicName + * @throws IOException + * @throws ConfigDbException + * @throws TopicExistsException + */ + void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName) + throws IOException, ConfigDbException, TopicExistsException; + + /** + * method provides details of all the consumer associated with a specific + * topic + * + * @param dmaapContext + * @param topicName + * @throws IOException + * @throws ConfigDbException + * @throws TopicExistsException + */ + void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName) + throws IOException, ConfigDbException, TopicExistsException; + + /** + * method provides publishing right to a specific topic + * + * @param dmaapContext + * @param topicName + * @param producerId + * @throws AccessDeniedException + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + */ + void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException; + + /** + * method denies any specific publisher from a topic + * + * @param dmaapContext + * @param topicName + * @param producerId + * @throws AccessDeniedException + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + */ + void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException; + + /** + * method provide consuming right to a specific user on a topic + * + * @param dmaapContext + * @param topicName + * @param consumerId + * @throws AccessDeniedException + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + */ + void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException; + + /** + * method denies a particular user's consuming right on a topic + * + * @param dmaapContext + * @param topicName + * @param consumerId + * @throws AccessDeniedException + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + */ + void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException; + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/TransactionService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/TransactionService.java new file mode 100644 index 0000000..b1593c7 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/TransactionService.java @@ -0,0 +1,61 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service; + +import java.io.IOException; + +import com.att.aft.dme2.internal.jettison.json.JSONException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; +import com.att.nsa.configs.ConfigDbException; + +/** + * + * @author anowarul.islam + * + */ +public interface TransactionService { + /** + * + * @param trnObj + */ + void checkTransaction(TransactionObj trnObj); + + /** + * + * @param dmaapContext + * @throws ConfigDbException + * @throws IOException + */ + void getAllTransactionObjs(DMaaPContext dmaapContext) throws ConfigDbException, IOException; + + /** + * + * @param dmaapContext + * @param transactionId + * @throws ConfigDbException + * @throws JSONException + * @throws IOException + */ + void getTransactionObj(DMaaPContext dmaapContext, String transactionId) + throws ConfigDbException, JSONException, IOException; +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/UIService.java b/src/main/java/org/onap/dmaap/dmf/mr/service/UIService.java new file mode 100644 index 0000000..777abf6 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/UIService.java @@ -0,0 +1,92 @@ +/** + * + */ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service; + +import java.io.IOException; + +import org.apache.kafka.common.errors.TopicExistsException; +import org.json.JSONException; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import com.att.nsa.configs.ConfigDbException; +/** + * @author muzainulhaque.qazi + * + */ +public interface UIService { + /** + * Returning template of hello page. + * + * @param dmaapContext + * @throws IOException + */ + void hello(DMaaPContext dmaapContext) throws IOException; + + /** + * Fetching list of all api keys and returning in a templated form for + * display + * + * @param dmaapContext + * @throws ConfigDbException + * @throws IOException + */ + void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException, + IOException; + + /** + * Fetching detials of apikey in a templated form for display + * + * @param dmaapContext + * @param apiKey + * @throws Exception + */ + void getApiKey(DMaaPContext dmaapContext, final String apiKey) + throws CambriaApiException, ConfigDbException, JSONException, IOException; + + /** + * Fetching list of all the topics and returning in a templated form for + * display + * + * @param dmaapContext + * @throws ConfigDbException + * @throws IOException + */ + void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException, + IOException; + + /** + * Fetching detials of topic in a templated form for display + * + * @param dmaapContext + * @param topic + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + */ + void getTopic(DMaaPContext dmaapContext, final String topic) + throws ConfigDbException, IOException, TopicExistsException; + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/AdminServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/AdminServiceImpl.java new file mode 100644 index 0000000..29a431e --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/AdminServiceImpl.java @@ -0,0 +1,190 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import java.io.IOException; +import java.util.Collection; +import java.util.Set; + +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.springframework.stereotype.Component; + +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.AdminService; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.limits.Blacklist; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + + +/** + * @author muzainulhaque.qazi + * + */ +@Component +public class AdminServiceImpl implements AdminService { + + //private Logger log = Logger.getLogger(AdminServiceImpl.class.toString()); + private static final EELFLogger log = EELFManager.getInstance().getLogger(AdminServiceImpl.class); + /** + * getConsumerCache returns consumer cache + * @param dMaaPContext context + * @throws IOException ex + * @throws AccessDeniedException + */ + @Override + public void showConsumerCache(DMaaPContext dMaaPContext) throws IOException, AccessDeniedException { + adminAuthenticate(dMaaPContext); + + JSONObject consumers = new JSONObject(); + JSONArray jsonConsumersList = new JSONArray(); + + for (Consumer consumer : getConsumerFactory(dMaaPContext).getConsumers()) { + JSONObject consumerObject = new JSONObject(); + consumerObject.put("name", consumer.getName()); + consumerObject.put("created", consumer.getCreateTimeMs()); + consumerObject.put("accessed", consumer.getLastAccessMs()); + jsonConsumersList.put(consumerObject); + } + + consumers.put("consumers", jsonConsumersList); + log.info("========== AdminServiceImpl: getConsumerCache: " + jsonConsumersList.toString() + "==========="); + DMaaPResponseBuilder.respondOk(dMaaPContext, consumers); + } + + /** + * + * dropConsumerCache() method clears consumer cache + * @param dMaaPContext context + * @throws JSONException ex + * @throws IOException ex + * @throws AccessDeniedException + * + */ + @Override + public void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException, AccessDeniedException { + adminAuthenticate(dMaaPContext); + getConsumerFactory(dMaaPContext).dropCache(); + DMaaPResponseBuilder.respondOkWithHtml(dMaaPContext, "Consumer cache cleared successfully"); + // log.info("========== AdminServiceImpl: dropConsumerCache: Consumer + // Cache successfully dropped.==========="); + } + + /** + * getfConsumerFactory returns CosnumerFactory details + * @param dMaaPContext contxt + * @return ConsumerFactory obj + * + */ + private ConsumerFactory getConsumerFactory(DMaaPContext dMaaPContext) { + return dMaaPContext.getConfigReader().getfConsumerFactory(); + } + + /** + * return ipblacklist + * @param dMaaPContext context + * @return blacklist obj + */ + private static Blacklist getIpBlacklist(DMaaPContext dMaaPContext) { + return dMaaPContext.getConfigReader().getfIpBlackList(); + } + + + /** + * Get list of blacklisted ips + */ + @Override + public void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException + { + adminAuthenticate ( dMaaPContext ); + + DMaaPResponseBuilder.respondOk ( dMaaPContext, + new JSONObject().put ( "blacklist", + setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) ); + } + + public static JSONArray setToJsonArray ( Set fields ) + { + return collectionToJsonArray ( fields ); + } + + public static JSONArray collectionToJsonArray ( Collection fields ) + { + final JSONArray a = new JSONArray (); + if ( fields != null ) + { + for ( Object o : fields ) + { + a.put ( o ); + } + } + return a; + } + + /** + * Add ip to blacklist + */ + @Override + public void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException + { + adminAuthenticate ( dMaaPContext ); + + getIpBlacklist (dMaaPContext).add ( ip ); + DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext ); + } + + /** + * Remove ip from blacklist + */ + @Override + public void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException + { + adminAuthenticate ( dMaaPContext ); + + getIpBlacklist (dMaaPContext).remove ( ip ); + DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext ); + } + + /** + * Authenticate if user is admin + * @param dMaaPContext context + * @throws AccessDeniedException ex + */ + private static void adminAuthenticate ( DMaaPContext dMaaPContext ) throws AccessDeniedException + { + + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dMaaPContext); + if ( user == null || !user.getKey ().equals ( "admin" ) ) + { + throw new AccessDeniedException (); + } + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ApiKeysServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ApiKeysServiceImpl.java new file mode 100644 index 0000000..47da903 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ApiKeysServiceImpl.java @@ -0,0 +1,320 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import java.io.IOException; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.springframework.stereotype.Service; + +import org.onap.dmaap.dmf.mr.beans.ApiKeyBean; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.ApiKeysService; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.onap.dmaap.dmf.mr.utils.Emailer; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.service.standards.HttpStatusCodes; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.security.db.NsaApiDb; +import com.att.nsa.security.db.NsaApiDb.KeyExistsException; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +/** + * Implementation of the ApiKeysService, this will provide the below operations, + * getAllApiKeys, getApiKey, createApiKey, updateApiKey, deleteApiKey + * + * @author nilanjana.maity + */ +@Service +public class ApiKeysServiceImpl implements ApiKeysService { + + + private static final EELFLogger log = EELFManager.getInstance().getLogger(ApiKeysServiceImpl.class.toString()); + /** + * This method will provide all the ApiKeys present in kafka server. + * + * @param dmaapContext + * @throws ConfigDbException + * @throws IOException + */ + public void getAllApiKeys(DMaaPContext dmaapContext) + throws ConfigDbException, IOException { + + ConfigurationReader configReader = dmaapContext.getConfigReader(); + + log.info("configReader : " + configReader.toString()); + + final JSONObject result = new JSONObject(); + final JSONArray keys = new JSONArray(); + result.put("apiKeys", keys); + + NsaApiDb apiDb = configReader.getfApiKeyDb(); + + for (String key : apiDb.loadAllKeys()) { + keys.put(key); + } + log.info("========== ApiKeysServiceImpl: getAllApiKeys: Api Keys are : " + + keys.toString() + "==========="); + DMaaPResponseBuilder.respondOk(dmaapContext, result); + } + + /** + * @param dmaapContext + * @param apikey + * @throws ConfigDbException + * @throws IOException + */ + @Override + public void getApiKey(DMaaPContext dmaapContext, String apikey) + throws ConfigDbException, IOException { + + String errorMsg = "Api key name is not mentioned."; + int errorCode = HttpStatusCodes.k400_badRequest; + + if (null != apikey) { + NsaSimpleApiKey simpleApiKey = getApiKeyDb(dmaapContext) + .loadApiKey(apikey); + + + if (null != simpleApiKey) { + JSONObject result = simpleApiKey.asJsonObject(); + DMaaPResponseBuilder.respondOk(dmaapContext, result); + log.info("========== ApiKeysServiceImpl: getApiKey : " + + result.toString() + "==========="); + return; + } else { + errorMsg = "Api key [" + apikey + "] does not exist."; + errorCode = HttpStatusCodes.k404_notFound; + log.info("========== ApiKeysServiceImpl: getApiKey: Error : API Key does not exist. " + + "==========="); + DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode, + errorMsg); + throw new IOException(); + } + } + + } + + /** + * @param dmaapContext + * @param nsaApiKey + * @throws KeyExistsException + * @throws ConfigDbException + * @throws IOException + */ + @Override + public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey) + throws KeyExistsException, ConfigDbException, IOException { + + log.debug("TopicService: : createApiKey...."); + + String contactEmail = nsaApiKey.getEmail(); + final boolean emailProvided = contactEmail != null && contactEmail.length() > 0 && contactEmail.indexOf("@") > 1 ; + String kSetting_AllowAnonymousKeys= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"apiKeys.allowAnonymous"); + if(null==kSetting_AllowAnonymousKeys) kSetting_AllowAnonymousKeys ="false"; + + + if ( kSetting_AllowAnonymousKeys.equalsIgnoreCase("true") && !emailProvided ) + { + DMaaPResponseBuilder.respondWithErrorInJson(dmaapContext, 400, "You must provide an email address."); + return; + } + + + final NsaApiDb apiKeyDb = getApiKeyDb(dmaapContext); + String apiKey = nsaApiKey.getKey(); + String sharedSecret = nsaApiKey.getSharedSecret(); + final NsaSimpleApiKey key = apiKeyDb.createApiKey(apiKey, + sharedSecret); + if (null != key) { + + if (null != nsaApiKey.getEmail()) { + key.setContactEmail(nsaApiKey.getEmail()); + } + + if (null != nsaApiKey.getDescription()) { + key.setDescription(nsaApiKey.getDescription()); + } + + log.debug("=======ApiKeysServiceImpl: createApiKey : saving api key : " + + key.toString() + "====="); + apiKeyDb.saveApiKey(key); + + // email out the secret to validate the email address + if ( emailProvided ) + { + String body = "\n" + "Your email address was provided as the creator of new API key \"" + + apiKey + "\".\n" + "\n" + "If you did not make this request, please let us know." + + " See http://sa2020.it.att.com:8888 for contact information, " + "but don't worry -" + + " the API key is useless without the information below, which has been provided " + + "only to you.\n" + "\n\n" + "For API key \"" + apiKey + "\", use API key secret:\n\n\t" + + sharedSecret + "\n\n" + "Note that it's normal to share the API key" + + " (" + apiKey + "). " + + "This is how you are granted access to resources " + "like a UEB topic or Flatiron scope. " + + "However, you should NOT share the API key's secret. " + "The API key is associated with your" + + " email alone. ALL access to data made with this " + "key will be your responsibility. If you " + + "share the secret, someone else can use the API key " + "to access proprietary data with your " + + "identity.\n" + "\n" + "Enjoy!\n" + "\n" + "The GFP/SA-2020 Team"; + + Emailer em = dmaapContext.getConfigReader().getSystemEmailer(); + em.send(contactEmail, "New API Key", body); + } + log.debug("TopicService: : sending response."); + + JSONObject o = key.asJsonObject(); + + o.put ( NsaSimpleApiKey.kApiSecretField, + emailProvided ? + "Emailed to " + contactEmail + "." : + key.getSecret () + ); + DMaaPResponseBuilder.respondOk(dmaapContext, + o); + + return; + } else { + log.debug("=======ApiKeysServiceImpl: createApiKey : Error in creating API Key.====="); + DMaaPResponseBuilder.respondWithError(dmaapContext, + HttpStatusCodes.k500_internalServerError, + "Failed to create api key."); + throw new KeyExistsException(apiKey); + } + } + + /** + * @param dmaapContext + * @param apikey + * @param nsaApiKey + * @throws ConfigDbException + * @throws IOException + * @throws AccessDeniedException + */ + @Override + public void updateApiKey(DMaaPContext dmaapContext, String apikey, + ApiKeyBean nsaApiKey) throws ConfigDbException, IOException, AccessDeniedException { + + String errorMsg = "Api key name is not mentioned."; + int errorCode = HttpStatusCodes.k400_badRequest; + + if (null != apikey) { + final NsaApiDb apiKeyDb = getApiKeyDb(dmaapContext); + final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey); + boolean shouldUpdate = false; + + if (null != key) { + final NsaApiKey user = DMaaPAuthenticatorImpl + .getAuthenticatedUser(dmaapContext); + + if (user == null || !user.getKey().equals(key.getKey())) { + throw new AccessDeniedException("You must authenticate with the key you'd like to update."); + } + + if (null != nsaApiKey.getEmail()) { + key.setContactEmail(nsaApiKey.getEmail()); + shouldUpdate = true; + } + + if (null != nsaApiKey.getDescription()) { + key.setDescription(nsaApiKey.getDescription()); + shouldUpdate = true; + } + + if (shouldUpdate) { + apiKeyDb.saveApiKey(key); + } + + log.info("======ApiKeysServiceImpl : updateApiKey : Key Updated Successfully :" + + key.toString() + "========="); + DMaaPResponseBuilder.respondOk(dmaapContext, + key.asJsonObject()); + return; + } + } else { + errorMsg = "Api key [" + apikey + "] does not exist."; + errorCode = HttpStatusCodes.k404_notFound; + DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode, + errorMsg); + log.info("======ApiKeysServiceImpl : updateApiKey : Error in Updating Key.============"); + throw new IOException(); + } + } + + /** + * @param dmaapContext + * @param apikey + * @throws ConfigDbException + * @throws IOException + * @throws AccessDeniedException + */ + @Override + public void deleteApiKey(DMaaPContext dmaapContext, String apikey) + throws ConfigDbException, IOException, AccessDeniedException { + + String errorMsg = "Api key name is not mentioned."; + int errorCode = HttpStatusCodes.k400_badRequest; + + if (null != apikey) { + final NsaApiDb apiKeyDb = getApiKeyDb(dmaapContext); + final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey); + + if (null != key) { + + final NsaApiKey user = DMaaPAuthenticatorImpl + .getAuthenticatedUser(dmaapContext); + if (user == null || !user.getKey().equals(key.getKey())) { + throw new AccessDeniedException("You don't own the API key."); + } + + apiKeyDb.deleteApiKey(key); + log.info("======ApiKeysServiceImpl : deleteApiKey : Deleted Key successfully.============"); + DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, + "Api key [" + apikey + "] deleted successfully."); + return; + } + } else { + errorMsg = "Api key [" + apikey + "] does not exist."; + errorCode = HttpStatusCodes.k404_notFound; + DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode, + errorMsg); + log.info("======ApiKeysServiceImpl : deleteApiKey : Error while deleting key.============"); + throw new IOException(); + } + } + + /** + * + * @param dmaapContext + * @return + */ + private NsaApiDb getApiKeyDb(DMaaPContext dmaapContext) { + ConfigurationReader configReader = dmaapContext.getConfigReader(); + return configReader.getfApiKeyDb(); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/BaseTransactionDbImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/BaseTransactionDbImpl.java new file mode 100644 index 0000000..1e248bf --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/BaseTransactionDbImpl.java @@ -0,0 +1,153 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import java.util.Set; +import java.util.TreeSet; + +import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionFactory; +import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionObj; +import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionObjDB; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; +import com.att.nsa.configs.ConfigDb; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.configs.ConfigPath; + +/** + * Persistent storage for Transaction objects built over an abstract config db. + * + * @author anowarul.islam + * + * @param + */ +public class BaseTransactionDbImpl implements DMaaPTransactionObjDB { + + private final ConfigDb fDb; + private final ConfigPath fBasePath; + private final DMaaPTransactionFactory fKeyFactory; + + private static final String kStdRootPath = "/transaction"; + + private ConfigPath makePath(String transactionId) { + return fBasePath.getChild(transactionId); + } + + /** + * Construct an Transaction db over the given config db at the standard + * location + * + * @param db + * @param keyFactory + * @throws ConfigDbException + */ + public BaseTransactionDbImpl(ConfigDb db, DMaaPTransactionFactory keyFactory) throws ConfigDbException { + this(db, kStdRootPath, keyFactory); + } + + /** + * Construct an Transaction db over the given config db using the given root + * location + * + * @param db + * @param rootPath + * @param keyFactory + * @throws ConfigDbException + */ + public BaseTransactionDbImpl(ConfigDb db, String rootPath, DMaaPTransactionFactory keyFactory) + throws ConfigDbException { + fDb = db; + fBasePath = db.parse(rootPath); + fKeyFactory = keyFactory; + + if (!db.exists(fBasePath)) { + db.store(fBasePath, ""); + } + } + + /** + * Create a new Transaction Obj. If one exists, + * + * @param id + * @return the new Transaction record + * @throws ConfigDbException + */ + public synchronized K createTransactionObj(String id) throws KeyExistsException, ConfigDbException { + final ConfigPath path = makePath(id); + if (fDb.exists(path)) { + throw new KeyExistsException(id); + } + + // make one, store it, return it + final K newKey = fKeyFactory.makeNewTransactionId(id); + fDb.store(path, newKey.serialize()); + return newKey; + } + + /** + * Save an Transaction record. This must be used after changing auxiliary + * data on the record. Note that the transaction object must exist (via + * createTransactionObj). + * + * @param transaction + * object + * @throws ConfigDbException + */ + @Override + public synchronized void saveTransactionObj(K trnObj) throws ConfigDbException { + final ConfigPath path = makePath(trnObj.getId()); + if (!fDb.exists(path) || !(trnObj instanceof TransactionObj)) { + throw new IllegalStateException(trnObj.getId() + " is not known to this database"); + } + fDb.store(path, ((TransactionObj) trnObj).serialize()); + } + + /** + * Load an Transaction record based on the Transaction Id value + * + * @param transactionId + * @return an Transaction Object record or null + * @throws ConfigDbException + */ + @Override + public synchronized K loadTransactionObj(String transactionId) throws ConfigDbException { + final String data = fDb.load(makePath(transactionId)); + if (data != null) { + return fKeyFactory.makeNewTransactionObj(data); + } + return null; + } + + /** + * Load all transactions known to this database. (This could be expensive.) + * + * @return a set of all Transaction objects + * @throws ConfigDbException + */ + public synchronized Set loadAllTransactionObjs() throws ConfigDbException { + final TreeSet result = new TreeSet<>(); + for (ConfigPath cp : fDb.loadChildrenNames(fBasePath)) { + result.add(cp.getName()); + } + return result; + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImpl.java new file mode 100644 index 0000000..387b667 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImpl.java @@ -0,0 +1,867 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import java.io.IOException; +import java.io.InputStream; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.Properties; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.MediaType; + +import org.apache.http.HttpStatus; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.errors.TopicExistsException; +import org.json.JSONObject; +import org.json.JSONTokener; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Service; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException; +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.dmf.mr.backends.Publisher; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.backends.kafka.KafkaLiveLockAvoider2; +import org.onap.dmaap.dmf.mr.beans.DMaaPCambriaLimiter; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.LogDetails; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; + +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.resources.CambriaEventSet; +import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.EventsService; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.onap.dmaap.dmf.mr.utils.Utils; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.service.standards.MimeTypes; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.util.rrConvertor; + +/** + * This class provides the functinality to publish and subscribe message to + * kafka + * + * @author Ramkumar Sembaiyam + * + */ +@Service +public class EventsServiceImpl implements EventsService { + // private static final Logger LOG = + + private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class); + + private static final String BATCH_LENGTH = "event.batch.length"; + private static final String TRANSFER_ENCODING = "Transfer-Encoding"; + @Autowired + private DMaaPErrorMessages errorMessages; + + //@Autowired + + + // @Value("${metrics.send.cambria.topic}") + + + public DMaaPErrorMessages getErrorMessages() { + return errorMessages; + } + + public void setErrorMessages(DMaaPErrorMessages errorMessages) { + this.errorMessages = errorMessages; + } + + /** + * @param ctx + * @param topic + * @param consumerGroup + * @param clientId + * @throws ConfigDbException, + * TopicExistsException, AccessDeniedException, + * UnavailableException, CambriaApiException, IOException + * + * + */ + @Override + public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId) + throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException, + CambriaApiException, IOException, DMaaPAccessDeniedException { + final long startTime = System.currentTimeMillis(); + final HttpServletRequest req = ctx.getRequest(); + + boolean isAAFTopic = false; + // was this host blacklisted? + final String remoteAddr = Utils.getRemoteAddress(ctx); + if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + + int limit = CambriaConstants.kNoLimit; + if (req.getParameter("limit") != null) { + limit = Integer.parseInt(req.getParameter("limit")); + } + + int timeoutMs = CambriaConstants.kNoTimeout; + String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout"); + if (strtimeoutMS != null) + timeoutMs = Integer.parseInt(strtimeoutMS); + // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout", + + if (req.getParameter("timeout") != null) { + timeoutMs = Integer.parseInt(req.getParameter("timeout")); + } + + // By default no filter is applied if filter is not passed as a + // parameter in the request URI + String topicFilter = CambriaConstants.kNoFilter; + if (null != req.getParameter("filter")) { + topicFilter = req.getParameter("filter"); + } + // pretty to print the messaages in new line + String prettyval = "0"; + String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty"); + if (null != strPretty) + prettyval = strPretty; + + String metaval = "0"; + String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta"); + if (null != strmeta) + metaval = strmeta; + + final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval); + // withMeta to print offset along with message + final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval); + + final LogWrap logger = new LogWrap(topic, consumerGroup, clientId); + logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter + " from Remote host "+ctx.getRequest().getRemoteHost()); + + // is this user allowed to read this topic? + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); + final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); + + if (metatopic == null) { + // no such topic. + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND, + DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(), + errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()), + topic, null, null, consumerGroup + "/" + clientId, ctx.getRequest().getRemoteHost()); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "metrics.send.cambria.topic"); + if (null == metricTopicname) + metricTopicname = "msgrtr.apinode.metrics.dmaap"; + + if (null == ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) { + if (null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))) { + // check permissions + metatopic.checkUserRead(user); + } + } + // if headers are not provided then user will be null + if (user == null && null != ctx.getRequest().getHeader("Authorization")) { + // the topic name will be sent by the client + + DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); + String permission = aaf.aafPermissionString(topic, "sub"); + if (!aaf.aafAuthentication(ctx.getRequest(), permission)) { + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + errorMessages.getNotPermitted1() + " read " + errorMessages.getNotPermitted2() + topic + " on " + + permission, + null, Utils.getFormattedDate(new Date()), topic, null, null, consumerGroup + "/" + clientId, + ctx.getRequest().getRemoteHost()); + LOG.info(errRes.toString()); + throw new DMaaPAccessDeniedException(errRes); + + } + isAAFTopic = true; + } + final long elapsedMs1 = System.currentTimeMillis() - startTime; + logger.info("Time taken in getEvents Authorization " + elapsedMs1 + " ms for " + topic + " " + consumerGroup + + " " + clientId); + Consumer c = null; + + String lhostId = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "clusterhostid"); + if (null == lhostId) { + try { + lhostId = InetAddress.getLocalHost().getCanonicalHostName(); + } catch (UnknownHostException e) { + LOG.info("Unknown Host Exception error occured while getting getting hostid"); + } + + } + CambriaOutboundEventStream coes = null; + try { + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter(); + rl.onCall(topic, consumerGroup, clientId, ctx.getRequest().getRemoteHost()); + c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs, + ctx.getRequest().getRemoteHost()); + coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs) + .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build(); + coes.setDmaapContext(ctx); + coes.setTopic(metatopic); + if (isTransEnabled() || isAAFTopic) { + coes.setTransEnabled(true); + } else { + coes.setTransEnabled(false); + } + coes.setTopicStyle(isAAFTopic); + final long elapsedMs2 = System.currentTimeMillis() - startTime; + logger.info("Time taken in getEvents getConsumerFor " + elapsedMs2 + " ms for " + topic + " " + + consumerGroup + " " + clientId); + + DMaaPResponseBuilder.setNoCacheHeadings(ctx); + + DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes); + // No IOException thrown during respondOkWithStream, so commit the + // new offsets to all the brokers + c.commitOffsets(); + final int sent = coes.getSentCount(); + + metricsSet.consumeTick(sent); + rl.onSend(topic, consumerGroup, clientId, sent); + final long elapsedMs = System.currentTimeMillis() - startTime; + logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + c.getOffset() + " for " + + topic + " " + consumerGroup + " " + clientId + " on to the server " + + ctx.getRequest().getRemoteHost()); + + } catch (UnavailableException excp) { + logger.warn(excp.getMessage(), excp); + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, + DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), + errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic, + null, null, consumerGroup + "-" + clientId, ctx.getRequest().getRemoteHost()); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } catch (java.util.ConcurrentModificationException excp1) { + LOG.info(excp1.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId + " from Remote"+ctx.getRequest().getRemoteHost()); + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_CONFLICT, + DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(), + "Couldn't respond to client, possible of consumer requests from more than one server. Please contact MR team if you see this issue occurs continously", null, + Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost()); + logger.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } catch (CambriaApiException excp) { + LOG.info(excp.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId); + + throw excp; + } + catch (Exception excp) { + // System.out.println(excp + "------------------ " + topic+" + // "+consumerGroup+" "+clientId); + + logger.info("Couldn't respond to client, closing cambria consumer " + " " + topic + " " + consumerGroup + + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE + " ****** " + excp); + + ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId); + + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, + DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), + "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null, + Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost()); + logger.info(errRes.toString()); + throw new CambriaApiException(errRes); + } finally { + coes = null; + // If no cache, close the consumer now that we're done with it. + boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled; + String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + ConsumerFactory.kSetting_EnableCache); + if (null != strkSetting_EnableCache) + kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache); + // if + // (!ctx.getConfigReader().getSettings().getBoolean(ConsumerFactory.kSetting_EnableCache, + // ConsumerFactory.kDefault_IsCacheEnabled) && (c != null)) { + if (!kSetting_EnableCache && (c != null)) { + try { + c.close(); + } catch (Exception e) { + logger.info("***Exception occured in getEvents finaly block while closing the consumer " + " " + + topic + " " + consumerGroup + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE + + " " + e); + } + } + } + } + + /** + * @throws missingReqdSetting + * + */ + @Override + public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition, + final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException, + CambriaApiException, IOException, missingReqdSetting, DMaaPAccessDeniedException { + + // is this user allowed to write to this topic? + final long startMs = System.currentTimeMillis(); + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); + final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); + boolean isAAFTopic = false; + + // was this host blacklisted? + final String remoteAddr = Utils.getRemoteAddress(ctx); + + if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + + String topicNameStd = null; + + // topicNameStd= + + topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, + "enforced.topic.name.AAF"); + String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "metrics.send.cambria.topic"); + if (null == metricTopicname) + metricTopicname = "msgrtr.apinode.metrics.dmaap"; + boolean topicNameEnforced = false; + if (null != topicNameStd && topic.startsWith(topicNameStd)) { + topicNameEnforced = true; + } + + // Here check if the user has rights to publish on the topic + // ( This will be called when no auth is added or when UEB API Key + // Authentication is used) + // checkUserWrite(user) method will throw an error when there is no Auth + // header added or when the + // user has no publish rights + + if (null != metatopic && null != metatopic.getOwner() && !("".equals(metatopic.getOwner())) + && null == ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) { + metatopic.checkUserWrite(user); + } + + // if headers are not provided then user will be null + if (topicNameEnforced || (user == null && null != ctx.getRequest().getHeader("Authorization") + && !topic.equalsIgnoreCase(metricTopicname))) { + // the topic name will be sent by the client + + DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); + String permission = aaf.aafPermissionString(topic, "pub"); + if (!aaf.aafAuthentication(ctx.getRequest(), permission)) { + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + errorMessages.getNotPermitted1() + " publish " + errorMessages.getNotPermitted2() + topic + + " on " + permission, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new DMaaPAccessDeniedException(errRes); + } + isAAFTopic = true; + } + + final HttpServletRequest req = ctx.getRequest(); + + // check for chunked input + boolean chunked = false; + if (null != req.getHeader(TRANSFER_ENCODING)) { + chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked"); + } + // get the media type, or set it to a generic value if it wasn't + // provided + String mediaType = req.getContentType(); + if (mediaType == null || mediaType.length() == 0) { + mediaType = MimeTypes.kAppGenericBinary; + } + + if (mediaType.contains("charset=UTF-8")) { + mediaType = mediaType.replace("; charset=UTF-8", "").trim(); + } + + String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "transidUEBtopicreqd"); + boolean istransidreqd = false; + if (null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) { + istransidreqd = true; + } + + if (isAAFTopic || istransidreqd) { + pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType); + } else { + pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType); + } + final long endMs = System.currentTimeMillis(); + final long totalMs = endMs - startMs; + + LOG.info("Overall Response time - Published " + " msgs in " + totalMs + " ms for topic " + topic); + + } + + /** + * + * @param ctx + * @param topic + * @param msg + * @param defaultPartition + * @param chunked + * @param mediaType + * @throws ConfigDbException + * @throws AccessDeniedException + * @throws TopicExistsException + * @throws CambriaApiException + * @throws IOException + */ + private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked, + String mediaType) + throws ConfigDbException, AccessDeniedException, TopicExistsException, CambriaApiException, IOException { + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + // setup the event set + final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition); + + // start processing, building a batch to push to the backend + final long startMs = System.currentTimeMillis(); + long count = 0; + long maxEventBatch = 1024L* 16; + String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); + if (null != batchlen) + maxEventBatch = Long.parseLong(batchlen); + // long maxEventBatch = + + final LinkedList batch = new LinkedList<>(); + // final ArrayList> kms = new + + final ArrayList> pms = new ArrayList<>(); + try { + // for each message... + Publisher.message m = null; + while ((m = events.next()) != null) { + // add the message to the batch + batch.add(m); + // final KeyedMessage data = new + // KeyedMessage(topic, m.getKey(), + + // kms.add(data); + final ProducerRecord data = new ProducerRecord(topic, m.getKey(), + m.getMessage()); + + pms.add(data); + // check if the batch is full + final int sizeNow = batch.size(); + if (sizeNow > maxEventBatch) { + // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, + + // kms.clear(); + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + } + } + + // send the pending batch + final int sizeNow = batch.size(); + if (sizeNow > 0) { + // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, + + // kms.clear(); + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + } + + final long endMs = System.currentTimeMillis(); + final long totalMs = endMs - startMs; + + LOG.info("Published " + count + " msgs in " + totalMs + " ms for topic " + topic + " from server " + + ctx.getRequest().getRemoteHost()); + + // build a responseP + final JSONObject response = new JSONObject(); + response.put("count", count); + response.put("serverTimeMs", totalMs); + DMaaPResponseBuilder.respondOk(ctx, response); + + } catch (Exception excp) { + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp instanceof CambriaApiException) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + + } + ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count + + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null, + null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } + } + + /** + * + * @param ctx + * @param inputStream + * @param topic + * @param partitionKey + * @param requestTime + * @param chunked + * @param mediaType + * @throws ConfigDbException + * @throws AccessDeniedException + * @throws TopicExistsException + * @throws IOException + * @throws CambriaApiException + */ + private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic, + final String partitionKey, final String requestTime, final boolean chunked, final String mediaType) + throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException, CambriaApiException { + + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + + // setup the event set + final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey); + + // start processing, building a batch to push to the backend + final long startMs = System.currentTimeMillis(); + long count = 0; + long maxEventBatch = 1024L * 16; + String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); + if (null != evenlen) + maxEventBatch = Long.parseLong(evenlen); + // final long maxEventBatch = + + final LinkedList batch = new LinkedList(); + // final ArrayList> kms = new + + final ArrayList> pms = new ArrayList>(); + Publisher.message m = null; + int messageSequence = 1; + Long batchId = 1L; + final boolean transactionEnabled = true; + int publishBatchCount = 0; + SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS"); + + // LOG.warn("Batch Start Id: " + + + try { + // for each message... + batchId = DMaaPContext.getBatchID(); + + String responseTransactionId = null; + + while ((m = events.next()) != null) { + + // LOG.warn("Batch Start Id: " + + + + addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId, + transactionEnabled); + messageSequence++; + + + batch.add(m); + + responseTransactionId = m.getLogDetails().getTransactionId(); + + JSONObject jsonObject = new JSONObject(); + jsonObject.put("msgWrapMR", m.getMessage()); + jsonObject.put("transactionId", responseTransactionId); + // final KeyedMessage data = new + // KeyedMessage(topic, m.getKey(), + + // kms.add(data); + final ProducerRecord data = new ProducerRecord(topic, m.getKey(), + m.getMessage()); + + pms.add(data); + // check if the batch is full + final int sizeNow = batch.size(); + if (sizeNow >= maxEventBatch) { + String startTime = sdf.format(new Date()); + LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" + + batchId + "]"); + try { + // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, + // kms); + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + // transactionLogs(batch); + for (message msg : batch) { + LogDetails logDetails = msg.getLogDetails(); + LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); + } + } catch (Exception excp) { + + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp instanceof CambriaApiException) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + ErrorResponse errRes = new ErrorResponse(status, + DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + publishBatchCount = sizeNow; + count += sizeNow; + // batchId++; + String endTime = sdf.format(new Date()); + LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + + ",Batch End Time=" + endTime + "]"); + batchId = DMaaPContext.getBatchID(); + } + } + + // send the pending batch + final int sizeNow = batch.size(); + if (sizeNow > 0) { + String startTime = sdf.format(new Date()); + LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" + + batchId + "]"); + try { + // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, + // kms); + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + // transactionLogs(batch); + for (message msg : batch) { + LogDetails logDetails = msg.getLogDetails(); + LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); + } + } catch (Exception excp) { + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp instanceof CambriaApiException) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + + ErrorResponse errRes = new ErrorResponse(status, + DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + pms.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + // batchId++; + String endTime = sdf.format(new Date()); + publishBatchCount = sizeNow; + LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId + + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time=" + + endTime + "]"); + } + + final long endMs = System.currentTimeMillis(); + final long totalMs = endMs - startMs; + + LOG.info("Published " + count + " msgs(with transaction id) in " + totalMs + " ms for topic " + topic); + + if (null != responseTransactionId) { + ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId)); + } + + // build a response + final JSONObject response = new JSONObject(); + response.put("count", count); + response.put("serverTimeMs", totalMs); + DMaaPResponseBuilder.respondOk(ctx, response); + + } catch (Exception excp) { + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp instanceof CambriaApiException) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + + ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + } + + /** + * + * @param msg + * @param topic + * @param request + * @param messageCreationTime + * @param messageSequence + * @param batchId + * @param transactionEnabled + */ + private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request, + final String messageCreationTime, final int messageSequence, final Long batchId, + final boolean transactionEnabled) { + LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId, + transactionEnabled); + logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage())); + msg.setTransactionEnabled(transactionEnabled); + msg.setLogDetails(logDetails); + } + + /** + * + * @author anowarul.islam + * + */ + private static class LogWrap { + private final String fId; + + /** + * constructor initialization + * + * @param topic + * @param cgroup + * @param cid + */ + public LogWrap(String topic, String cgroup, String cid) { + fId = "[" + topic + "/" + cgroup + "/" + cid + "] "; + } + + /** + * + * @param msg + */ + public void info(String msg) { + LOG.info(fId + msg); + } + + /** + * + * @param msg + * @param t + */ + public void warn(String msg, Exception t) { + LOG.warn(fId + msg, t); + } + + } + + public boolean isTransEnabled() { + String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "transidUEBtopicreqd"); + boolean istransidreqd = false; + if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true"))) { + istransidreqd = true; + } + + return istransidreqd; + + } + + private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request, + final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) { + LogDetails logDetails = new LogDetails(); + logDetails.setTopicId(topicName); + logDetails.setMessageTimestamp(messageTimestamp); + logDetails.setPublisherId(Utils.getUserApiKey(request)); + logDetails.setPublisherIp(request.getRemoteHost()); + logDetails.setMessageBatchId(batchId); + logDetails.setMessageSequence(String.valueOf(messageSequence)); + logDetails.setTransactionEnabled(transactionEnabled); + logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date())); + logDetails.setServerIp(request.getLocalAddr()); + return logDetails; + } + + /* + * public String getMetricsTopic() { return metricsTopic; } + * + * public void setMetricsTopic(String metricsTopic) { this.metricsTopic = + * metricsTopic; } + */ + + + +} \ No newline at end of file diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MMServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MMServiceImpl.java new file mode 100644 index 0000000..d862677 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MMServiceImpl.java @@ -0,0 +1,600 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.LinkedList; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.Context; + +import org.apache.http.HttpStatus; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.json.JSONObject; +import org.json.JSONTokener; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Service; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException; +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.dmf.mr.backends.Publisher; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.LogDetails; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.resources.CambriaEventSet; +import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream; +import org.onap.dmaap.dmf.mr.service.MMService; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.onap.dmaap.dmf.mr.utils.Utils; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.service.standards.MimeTypes; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.util.rrConvertor; + + + +@Service +public class MMServiceImpl implements MMService { + private static final String BATCH_LENGTH = "event.batch.length"; + private static final String TRANSFER_ENCODING = "Transfer-Encoding"; + //private static final Logger LOG = Logger.getLogger(MMServiceImpl.class); + private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MMServiceImpl.class); + @Autowired + private DMaaPErrorMessages errorMessages; + + @Autowired + @Qualifier("configurationReader") + private ConfigurationReader configReader; + + // HttpServletRequest object + @Context + private HttpServletRequest request; + + // HttpServletResponse object + @Context + private HttpServletResponse response; + + @Override + public void addWhiteList() { + + } + + @Override + public void removeWhiteList() { + + } + + @Override + public void listWhiteList() { + + } + + @Override + public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId) + throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException, + CambriaApiException, IOException { + + + final HttpServletRequest req = ctx.getRequest(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + + // was this host blacklisted? + final String remoteAddr = Utils.getRemoteAddress(ctx); + + if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + + int limit = CambriaConstants.kNoLimit; + + if (req.getParameter("limit") != null) { + limit = Integer.parseInt(req.getParameter("limit")); + } + limit = 1; + + int timeoutMs = CambriaConstants.kNoTimeout; + String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout"); + if (strtimeoutMS != null) + timeoutMs = Integer.parseInt(strtimeoutMS); + // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout", + + if (req.getParameter("timeout") != null) { + timeoutMs = Integer.parseInt(req.getParameter("timeout")); + } + + // By default no filter is applied if filter is not passed as a + // parameter in the request URI + String topicFilter = CambriaConstants.kNoFilter; + if (null != req.getParameter("filter")) { + topicFilter = req.getParameter("filter"); + } + // pretty to print the messaages in new line + String prettyval = "0"; + String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty"); + if (null != strPretty) + prettyval = strPretty; + + String metaval = "0"; + String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta"); + if (null != strmeta) + metaval = strmeta; + + final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval); + // withMeta to print offset along with message + final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval); + + // is this user allowed to read this topic? + //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); + final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); + + if (metatopic == null) { + // no such topic. + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND, + DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(), + errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()), + topic, null, null, clientId, ctx.getRequest().getRemoteHost()); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + //String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic"); + /* + * if (null==metricTopicname) + * metricTopicname="msgrtr.apinode.metrics.dmaap"; //else if(user!=null) + * if(null==ctx.getRequest().getHeader("Authorization")&& + * !topic.equalsIgnoreCase(metricTopicname)) { if (null != + * metatopic.getOwner() && !("".equals(metatopic.getOwner()))){ // check + * permissions metatopic.checkUserRead(user); } } + */ + + Consumer c = null; + try { + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + + c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,ctx.getRequest().getRemoteHost()); + + final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs) + .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build(); + coes.setDmaapContext(ctx); + coes.setTopic(metatopic); + + DMaaPResponseBuilder.setNoCacheHeadings(ctx); + + try { + coes.write(baos); + } catch (Exception ex) { + + } + + c.commitOffsets(); + final int sent = coes.getSentCount(); + + metricsSet.consumeTick(sent); + + } catch (UnavailableException excp) { + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, + DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), + errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic, + null, null, clientId, ctx.getRequest().getRemoteHost()); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } catch (CambriaApiException excp) { + + throw excp; + } catch (Exception excp) { + + ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId); + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, + DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), + "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null, + Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost()); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } finally { + + boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled; + String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + ConsumerFactory.kSetting_EnableCache); + if (null != strkSetting_EnableCache) + kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache); + + if (!kSetting_EnableCache && (c != null)) { + c.close(); + + } + } + return baos.toString(); + } + + @Override + public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition, + final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException, + CambriaApiException, IOException, missingReqdSetting { + + //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); + //final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); + + final String remoteAddr = Utils.getRemoteAddress(ctx); + + if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + + String topicNameStd = null; + + topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, + "enforced.topic.name.AAF"); + String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "metrics.send.cambria.topic"); + if (null == metricTopicname) + metricTopicname = "msgrtr.apinode.metrics.dmaap"; + boolean topicNameEnforced = false; + if (null != topicNameStd && topic.startsWith(topicNameStd)) { + topicNameEnforced = true; + } + + final HttpServletRequest req = ctx.getRequest(); + + boolean chunked = false; + if (null != req.getHeader(TRANSFER_ENCODING)) { + chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked"); + } + + String mediaType = req.getContentType(); + if (mediaType == null || mediaType.length() == 0) { + mediaType = MimeTypes.kAppGenericBinary; + } + + if (mediaType.contains("charset=UTF-8")) { + mediaType = mediaType.replace("; charset=UTF-8", "").trim(); + } + + if (!topic.equalsIgnoreCase(metricTopicname)) { + pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType); + } else { + pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType); + } + } + + private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request, + final String messageCreationTime, final int messageSequence, final Long batchId, + final boolean transactionEnabled) { + LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId, + transactionEnabled); + logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage())); + msg.setTransactionEnabled(transactionEnabled); + msg.setLogDetails(logDetails); + } + + private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request, + final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) { + LogDetails logDetails = new LogDetails(); + logDetails.setTopicId(topicName); + logDetails.setMessageTimestamp(messageTimestamp); + logDetails.setPublisherId(Utils.getUserApiKey(request)); + logDetails.setPublisherIp(request.getRemoteHost()); + logDetails.setMessageBatchId(batchId); + logDetails.setMessageSequence(String.valueOf(messageSequence)); + logDetails.setTransactionEnabled(transactionEnabled); + logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date())); + logDetails.setServerIp(request.getLocalAddr()); + return logDetails; + } + + private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked, + String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException, + CambriaApiException, IOException { + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + + // setup the event set + final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition); + + // start processing, building a batch to push to the backend + final long startMs = System.currentTimeMillis(); + long count = 0; + + long maxEventBatch = 1024 * 16; + String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); + if (null != batchlen) + maxEventBatch = Long.parseLong(batchlen); + + // long maxEventBatch = + // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16); + final LinkedList batch = new LinkedList(); + final ArrayList> pms = new ArrayList>(); + //final ArrayList> kms = new ArrayList>(); + + try { + // for each message... + Publisher.message m = null; + while ((m = events.next()) != null) { + // add the message to the batch + batch.add(m); + final ProducerRecord data = new ProducerRecord(topic, m.getKey(), + m.getMessage()); + // check if the batch is full + final int sizeNow = batch.size(); + if (sizeNow > maxEventBatch) { + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + } + } + + // send the pending batch + final int sizeNow = batch.size(); + if (sizeNow > 0) { + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + } + + final long endMs = System.currentTimeMillis(); + final long totalMs = endMs - startMs; + + LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic); + + // build a responseP + final JSONObject response = new JSONObject(); + response.put("count", count); + response.put("serverTimeMs", totalMs); + // DMaaPResponseBuilder.respondOk(ctx, response); + + } catch (Exception excp) { + + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp.getClass().toString().contains("CambriaApiException")) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + + } + ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count + + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null, + null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } + } + + private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic, + final String partitionKey, final String requestTime, final boolean chunked, final String mediaType) + throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException, + CambriaApiException { + + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + + // setup the event set + final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey); + + // start processing, building a batch to push to the backend + final long startMs = System.currentTimeMillis(); + long count = 0; + long maxEventBatch = 1024 * 16; + String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); + if (null != evenlen) + maxEventBatch = Long.parseLong(evenlen); + + final LinkedList batch = new LinkedList(); + final ArrayList> pms = new ArrayList>(); + + Publisher.message m = null; + int messageSequence = 1; + Long batchId = 1L; + final boolean transactionEnabled = true; + int publishBatchCount = 0; + SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS"); + + // LOG.warn("Batch Start Id: " + + // Utils.getFromattedBatchSequenceId(batchId)); + try { + // for each message... + batchId = DMaaPContext.getBatchID(); + + String responseTransactionId = null; + + while ((m = events.next()) != null) { + + // LOG.warn("Batch Start Id: " + + // Utils.getFromattedBatchSequenceId(batchId)); + + addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId, + transactionEnabled); + messageSequence++; + + // add the message to the batch + batch.add(m); + + responseTransactionId = m.getLogDetails().getTransactionId(); + + JSONObject jsonObject = new JSONObject(); + jsonObject.put("message", m.getMessage()); + jsonObject.put("transactionId", responseTransactionId); + final ProducerRecord data = new ProducerRecord(topic, m.getKey(), + m.getMessage()); + pms.add(data); + + // check if the batch is full + final int sizeNow = batch.size(); + if (sizeNow >= maxEventBatch) { + String startTime = sdf.format(new Date()); + LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" + + batchId + "]"); + try { + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + // transactionLogs(batch); + for (message msg : batch) { + LogDetails logDetails = msg.getLogDetails(); + LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); + } + } catch (Exception excp) { + + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp.getClass().toString().contains("CambriaApiException")) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + ErrorResponse errRes = new ErrorResponse(status, + DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + publishBatchCount = sizeNow; + count += sizeNow; + // batchId++; + String endTime = sdf.format(new Date()); + LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + + ",Batch End Time=" + endTime + "]"); + batchId = DMaaPContext.getBatchID(); + } + } + + // send the pending batch + final int sizeNow = batch.size(); + if (sizeNow > 0) { + String startTime = sdf.format(new Date()); + LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" + + batchId + "]"); + try { + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + // transactionLogs(batch); + for (message msg : batch) { + LogDetails logDetails = msg.getLogDetails(); + LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); + } + } catch (Exception excp) { + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp.getClass().toString().contains("CambriaApiException")) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + + ErrorResponse errRes = new ErrorResponse(status, + DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + pms.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + // batchId++; + String endTime = sdf.format(new Date()); + publishBatchCount = sizeNow; + LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId + + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time=" + + endTime + "]"); + } + + final long endMs = System.currentTimeMillis(); + final long totalMs = endMs - startMs; + + LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic); + + // build a response + final JSONObject response = new JSONObject(); + response.put("count", count); + response.put("serverTimeMs", totalMs); + + } catch (Exception excp) { + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp.getClass().toString().contains("CambriaApiException")) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + + ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MetricsServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MetricsServiceImpl.java new file mode 100644 index 0000000..2041e54 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MetricsServiceImpl.java @@ -0,0 +1,115 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import java.io.IOException; + +import org.json.JSONObject; +import org.springframework.stereotype.Component; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.service.MetricsService; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.metrics.CdmMeasuredItem; + +/** + * + * + * This will provide all the generated metrics details also it can provide the + * get metrics details + * + * + * @author nilanjana.maity + * + * + */ +@Component +public class MetricsServiceImpl implements MetricsService { + + + private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MetricsService.class); + /** + * + * + * @param ctx + * @throws IOException + * + * + * get Metric details + * + */ + @Override + + public void get(DMaaPContext ctx) throws IOException { + LOG.info("Inside : MetricsServiceImpl : get()"); + final MetricsSet metrics = ctx.getConfigReader().getfMetrics(); + DMaaPResponseBuilder.setNoCacheHeadings(ctx); + final JSONObject result = metrics.toJson(); + DMaaPResponseBuilder.respondOk(ctx, result); + LOG.info("============ Metrics generated : " + result.toString() + "================="); + + } + + + @Override + /** + * + * get Metric by name + * + * + * @param ctx + * @param name + * @throws IOException + * @throws CambriaApiException + * + * + */ + public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException { + LOG.info("Inside : MetricsServiceImpl : getMetricByName()"); + final MetricsSet metrics = ctx.getConfigReader().getfMetrics(); + + final CdmMeasuredItem item = metrics.getItem(name); + /** + * check if item is null + */ + if (item == null) { + throw new CambriaApiException(404, "No metric named [" + name + "]."); + } + + final JSONObject entry = new JSONObject(); + entry.put("summary", item.summarize()); + entry.put("raw", item.getRawValueString()); + + DMaaPResponseBuilder.setNoCacheHeadings(ctx); + + final JSONObject result = new JSONObject(); + result.put(name, entry); + + DMaaPResponseBuilder.respondOk(ctx, result); + LOG.info("============ Metrics generated : " + entry.toString() + "================="); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImpl.java new file mode 100644 index 0000000..f2ba222 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImpl.java @@ -0,0 +1,694 @@ +/** + * + */ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import java.io.IOException; + +import org.apache.http.HttpStatus; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.beans.TopicBean; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import org.onap.dmaap.dmf.mr.metabroker.Broker1; + +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.TopicService; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.onap.dmaap.dmf.mr.utils.Utils; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.NsaAcl; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +/** + * @author muzainulhaque.qazi + * + */ +@Service +public class TopicServiceImpl implements TopicService { + + // private static final Logger LOGGER = + + private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicServiceImpl.class); + @Autowired + private DMaaPErrorMessages errorMessages; + + // @Value("${msgRtr.topicfactory.aaf}") + + + public DMaaPErrorMessages getErrorMessages() { + return errorMessages; + } + + public void setErrorMessages(DMaaPErrorMessages errorMessages) { + this.errorMessages = errorMessages; + } + + /** + * @param dmaapContext + * @throws JSONException + * @throws ConfigDbException + * @throws IOException + * + */ + @Override + public void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException { + LOGGER.info("Fetching list of all the topics."); + JSONObject json = new JSONObject(); + + JSONArray topicsList = new JSONArray(); + + for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) { + topicsList.put(topic.getName()); + } + + json.put("topics", topicsList); + + LOGGER.info("Returning list of all the topics."); + DMaaPResponseBuilder.respondOk(dmaapContext, json); + + } + + /** + * @param dmaapContext + * @throws JSONException + * @throws ConfigDbException + * @throws IOException + * + */ + public void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException { + + LOGGER.info("Fetching list of all the topics."); + JSONObject json = new JSONObject(); + + JSONArray topicsList = new JSONArray(); + + for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) { + JSONObject obj = new JSONObject(); + obj.put("topicName", topic.getName()); + + obj.put("owner", topic.getOwner()); + obj.put("txenabled", topic.isTransactionEnabled()); + topicsList.put(obj); + } + + json.put("topics", topicsList); + + LOGGER.info("Returning list of all the topics."); + DMaaPResponseBuilder.respondOk(dmaapContext, json); + + } + + /** + * @param dmaapContext + * @param topicName + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + */ + @Override + public void getTopic(DMaaPContext dmaapContext, String topicName) + throws ConfigDbException, IOException, TopicExistsException { + + LOGGER.info("Fetching details of topic " + topicName); + Topic t = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == t) { + LOGGER.error("Topic [" + topicName + "] does not exist."); + throw new TopicExistsException("Topic [" + topicName + "] does not exist."); + } + + JSONObject o = new JSONObject(); + o.put("name", t.getName()); + o.put("description", t.getDescription()); + + if (null != t.getOwners()) + o.put("owner", t.getOwners().iterator().next()); + if (null != t.getReaderAcl()) + o.put("readerAcl", aclToJson(t.getReaderAcl())); + if (null != t.getWriterAcl()) + o.put("writerAcl", aclToJson(t.getWriterAcl())); + + LOGGER.info("Returning details of topic " + topicName); + DMaaPResponseBuilder.respondOk(dmaapContext, o); + + } + + /** + * @param dmaapContext + * @param topicBean + * @throws CambriaApiException + * @throws AccessDeniedException + * @throws IOException + * @throws TopicExistsException + * @throws JSONException + * + * + * + */ + @Override + public void createTopic(DMaaPContext dmaapContext, TopicBean topicBean) + throws CambriaApiException, DMaaPAccessDeniedException, IOException, TopicExistsException { + LOGGER.info("Creating topic " + topicBean.getTopicName()); + + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); + String key = null; + String appName = dmaapContext.getRequest().getHeader("AppName"); + String enfTopicName = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, + "enforced.topic.name.AAF"); + + if (user != null) { + key = user.getKey(); + + if (enfTopicName != null && topicBean.getTopicName().indexOf(enfTopicName) >= 0) { + + LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed."); + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Failed to create topic: Access Denied.User does not have permission to perform create topic"); + + LOGGER.info(errRes.toString()); + // throw new DMaaPAccessDeniedException(errRes); + + } + } + // else if (user==null && + // (null==dmaapContext.getRequest().getHeader("Authorization") && null + // == dmaapContext.getRequest().getHeader("cookie")) ) { + else if (Utils.isCadiEnabled()&&user == null && null == dmaapContext.getRequest().getHeader("Authorization") + && (null == appName && null == dmaapContext.getRequest().getHeader("cookie"))) { + LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed."); + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Failed to create topic: Access Denied.User does not have permission to perform create topic"); + + LOGGER.info(errRes.toString()); + // throw new DMaaPAccessDeniedException(errRes); + } + + if (user == null && (null != dmaapContext.getRequest().getHeader("Authorization") + )) { + // if (user == null && + // (null!=dmaapContext.getRequest().getHeader("Authorization") || + // null != dmaapContext.getRequest().getHeader("cookie"))) { + // ACL authentication is not provided so we will use the aaf + // authentication + LOGGER.info("Authorization the topic"); + + String permission = ""; + String nameSpace = ""; + if (topicBean.getTopicName().indexOf(".") > 1) + nameSpace = topicBean.getTopicName().substring(0, topicBean.getTopicName().lastIndexOf(".")); + + String mrFactoryVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "msgRtr.topicfactory.aaf"); + + // AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper); + + permission = mrFactoryVal + nameSpace + "|create"; + DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); + + if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) { + + LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed."); + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Failed to create topic: Access Denied.User does not have permission to create topic with perm " + + permission); + + LOGGER.info(errRes.toString()); + throw new DMaaPAccessDeniedException(errRes); + + } else { + // if user is null and aaf authentication is ok then key should + // be "" + // key = ""; + /** + * Added as part of AAF user it should return username + */ + + key = dmaapContext.getRequest().getUserPrincipal().getName().toString(); + LOGGER.info("key ==================== " + key); + + } + } + + try { + final String topicName = topicBean.getTopicName(); + final String desc = topicBean.getTopicDescription(); + int partition = topicBean.getPartitionCount(); + // int replica = topicBean.getReplicationCount(); + if (partition == 0) { + partition = 1; + } + final int partitions = partition; + + int replica = topicBean.getReplicationCount(); + if (replica == 0) { + replica = 1; + } + final int replicas = replica; + boolean transactionEnabled = topicBean.isTransactionEnabled(); + + final Broker1 metabroker = getMetaBroker(dmaapContext); + final Topic t = metabroker.createTopic(topicName, desc, key, partitions, replicas, transactionEnabled); + + LOGGER.info("Topic created successfully. Sending response"); + DMaaPResponseBuilder.respondOk(dmaapContext, topicToJson(t)); + } catch (JSONException excp) { + + LOGGER.error("Failed to create topic. Couldn't parse JSON data.", excp); + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST, + DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson()); + LOGGER.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } catch (ConfigDbException excp1) { + + LOGGER.error("Failed to create topic. Config DB Exception", excp1); + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST, + DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson()); + LOGGER.info(errRes.toString()); + throw new CambriaApiException(errRes); + } catch (org.onap.dmaap.dmf.mr.metabroker.Broker1.TopicExistsException e) { + // TODO Auto-generated catch block + LOGGER.error( e.getMessage()); + } + } + + /** + * @param dmaapContext + * @param topicName + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + * @throws CambriaApiException + * @throws AccessDeniedException + */ + @Override + public void deleteTopic(DMaaPContext dmaapContext, String topicName) throws IOException, ConfigDbException, + CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException { + + + LOGGER.info(" Deleting topic " + topicName); + /*if (true) { // { + LOGGER.error("Failed to delete topi" + topicName + ". Authentication failed."); + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), errorMessages.getCreateTopicFail() + " " + + errorMessages.getNotPermitted1() + " delete " + errorMessages.getNotPermitted2()); + LOGGER.info(errRes.toString()); + throw new DMaaPAccessDeniedException(errRes); + }*/ + + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); + + if (user == null && null != dmaapContext.getRequest().getHeader("Authorization")) { + LOGGER.info("Authenticating the user, as ACL authentication is not provided"); + // String permission = + + String permission = ""; + String nameSpace = topicName.substring(0, topicName.lastIndexOf(".")); + String mrFactoryVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "msgRtr.topicfactory.aaf"); + + permission = mrFactoryVal + nameSpace + "|destroy"; + DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); + if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) { + LOGGER.error("Failed to delete topi" + topicName + ". Authentication failed."); + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + errorMessages.getCreateTopicFail() + " " + errorMessages.getNotPermitted1() + " delete " + + errorMessages.getNotPermitted2()); + LOGGER.info(errRes.toString()); + throw new DMaaPAccessDeniedException(errRes); + } + + } + + final Broker1 metabroker = getMetaBroker(dmaapContext); + final Topic topic = metabroker.getTopic(topicName); + + if (topic == null) { + LOGGER.error("Failed to delete topic. Topic [" + topicName + "] does not exist."); + throw new TopicExistsException("Failed to delete topic. Topic [" + topicName + "] does not exist."); + } + + // metabroker.deleteTopic(topicName); + + LOGGER.info("Topic [" + topicName + "] deleted successfully. Sending response."); + DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Topic [" + topicName + "] deleted successfully"); + } + + /** + * + * @param dmaapContext + * @return + */ + private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) { + return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker(); + } + + /** + * @param dmaapContext + * @param topicName + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + * + */ + @Override + public void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName) + throws ConfigDbException, IOException, TopicExistsException { + LOGGER.info("Retrieving list of all the publishers for topic " + topicName); + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (topic == null) { + LOGGER.error("Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist."); + throw new TopicExistsException( + "Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist."); + } + + final NsaAcl acl = topic.getWriterAcl(); + + LOGGER.info("Returning list of all the publishers for topic " + topicName + ". Sending response."); + DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl)); + + } + + /** + * + * @param acl + * @return + */ + private static JSONObject aclToJson(NsaAcl acl) { + final JSONObject o = new JSONObject(); + if (acl == null) { + o.put("enabled", false); + o.put("users", new JSONArray()); + } else { + o.put("enabled", acl.isActive()); + + final JSONArray a = new JSONArray(); + for (String user : acl.getUsers()) { + a.put(user); + } + o.put("users", a); + } + return o; + } + + /** + * @param dmaapContext + * @param topicName + */ + @Override + public void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName) + throws IOException, ConfigDbException, TopicExistsException { + LOGGER.info("Retrieving list of all the consumers for topic " + topicName); + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (topic == null) { + LOGGER.error("Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist."); + throw new TopicExistsException( + "Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist."); + } + + final NsaAcl acl = topic.getReaderAcl(); + + LOGGER.info("Returning list of all the consumers for topic " + topicName + ". Sending response."); + DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl)); + + } + + /** + * + * @param t + * @return + */ + private static JSONObject topicToJson(Topic t) { + final JSONObject o = new JSONObject(); + + o.put("name", t.getName()); + o.put("description", t.getDescription()); + o.put("owner", t.getOwner()); + o.put("readerAcl", aclToJson(t.getReaderAcl())); + o.put("writerAcl", aclToJson(t.getWriterAcl())); + + return o; + } + + /** + * @param dmaapContext + * @param topicName @param producerId @throws + * ConfigDbException @throws IOException @throws + * TopicExistsException @throws AccessDeniedException @throws + * + */ + @Override + public void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, CambriaApiException { + + LOGGER.info("Granting write access to producer [" + producerId + "] for topic " + topicName); + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); + + + // + // LOGGER.info("Authenticating the user, as ACL authentication is not + + //// String permission = + + // + + + + // { + // LOGGER.error("Failed to permit write access to producer [" + + // producerId + "] for topic " + topicName + + // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + // errorMessages.getNotPermitted1()+" + + + + // } + // } + + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == topic) { + LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic. Topic [" + topicName + + "] does not exist."); + throw new TopicExistsException("Failed to permit write access to producer [" + producerId + + "] for topic. Topic [" + topicName + "] does not exist."); + } + + topic.permitWritesFromUser(producerId, user); + + LOGGER.info("Write access has been granted to producer [" + producerId + "] for topic [" + topicName + + "]. Sending response."); + DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been granted to publisher."); + + } + + /** + * @param dmaapContext + * @param topicName + * @param producerId + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + * @throws AccessDeniedException + * @throws DMaaPAccessDeniedException + * + */ + @Override + public void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, + DMaaPAccessDeniedException { + + LOGGER.info("Revoking write access to producer [" + producerId + "] for topic " + topicName); + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); + + // + //// String permission = + + // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); + // String permission = aaf.aafPermissionString(topicName, "manage"); + // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) + // { + // LOGGER.error("Failed to revoke write access to producer [" + + // producerId + "] for topic " + topicName + + // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + // errorMessages.getNotPermitted1()+" + + + // throw new DMaaPAccessDeniedException(errRes); + // + + // } + + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == topic) { + LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic. Topic [" + topicName + + "] does not exist."); + throw new TopicExistsException("Failed to revoke write access to producer [" + producerId + + "] for topic. Topic [" + topicName + "] does not exist."); + } + + topic.denyWritesFromUser(producerId, user); + + LOGGER.info("Write access has been revoked to producer [" + producerId + "] for topic [" + topicName + + "]. Sending response."); + DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been revoked for publisher."); + + } + + /** + * @param dmaapContext + * @param topicName + * @param consumerId + * @throws DMaaPAccessDeniedException + */ + @Override + public void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, + DMaaPAccessDeniedException { + + LOGGER.info("Granting read access to consumer [" + consumerId + "] for topic " + topicName); + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); + + // + //// String permission = + + + // String permission = aaf.aafPermissionString(topicName, "manage"); + // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) + // { + // LOGGER.error("Failed to permit read access to consumer [" + + // consumerId + "] for topic " + topicName + + // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + // errorMessages.getNotPermitted1()+" + + + + // } + // } + + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == topic) { + LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic. Topic [" + topicName + + "] does not exist."); + throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId + + "] for topic. Topic [" + topicName + "] does not exist."); + } + + topic.permitReadsByUser(consumerId, user); + + LOGGER.info("Read access has been granted to consumer [" + consumerId + "] for topic [" + topicName + + "]. Sending response."); + DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, + "Read access has been granted for consumer [" + consumerId + "] for topic [" + topicName + "]."); + } + + /** + * @param dmaapContext + * @param topicName + * @param consumerId + * @throws DMaaPAccessDeniedException + */ + @Override + public void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, + DMaaPAccessDeniedException { + + LOGGER.info("Revoking read access to consumer [" + consumerId + "] for topic " + topicName); + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); + + //// String permission = + + + // String permission = aaf.aafPermissionString(topicName, "manage"); + // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) + // { + // LOGGER.error("Failed to revoke read access to consumer [" + + // consumerId + "] for topic " + topicName + + // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + // errorMessages.getNotPermitted1()+" + + + // throw new DMaaPAccessDeniedException(errRes); + // } + // + // + + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == topic) { + LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic. Topic [" + topicName + + "] does not exist."); + throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId + + "] for topic. Topic [" + topicName + "] does not exist."); + } + + topic.denyReadsByUser(consumerId, user); + + LOGGER.info("Read access has been revoked to consumer [" + consumerId + "] for topic [" + topicName + + "]. Sending response."); + DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, + "Read access has been revoked for consumer [" + consumerId + "] for topic [" + topicName + "]."); + + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TransactionServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TransactionServiceImpl.java new file mode 100644 index 0000000..cfe2948 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TransactionServiceImpl.java @@ -0,0 +1,100 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import java.io.IOException; + +import org.springframework.stereotype.Service; + +import com.att.aft.dme2.internal.jettison.json.JSONException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.service.TransactionService; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; +import com.att.nsa.configs.ConfigDbException; + +/** + * Once the transaction rest gateway will be using that time it will provide all + * the transaction details like fetching all the transactional objects or get + * any particular transaction object details + * + * @author nilanjana.maity + * + */ +@Service +public class TransactionServiceImpl implements TransactionService { + + @Override + public void checkTransaction(TransactionObj trnObj) { + /* Need to implement the method */ + } + + @Override + public void getAllTransactionObjs(DMaaPContext dmaapContext) + throws ConfigDbException, IOException { + + /* + + * + * LOG.info("configReader : "+configReader.toString()); + * + * final JSONObject result = new JSONObject (); final JSONArray + * transactionIds = new JSONArray (); result.put ( "transactionIds", + * transactionIds ); + * + * DMaaPTransactionObjDB transDb = + * configReader.getfTranDb(); + * + * for (String transactionId : transDb.loadAllTransactionObjs()) { + * transactionIds.put (transactionId); } LOG.info( + * "========== TransactionServiceImpl: getAllTransactionObjs: Transaction objects are : " + * + transactionIds.toString()+"==========="); + * DMaaPResponseBuilder.respondOk(dmaapContext, result); + */ + } + + @Override + public void getTransactionObj(DMaaPContext dmaapContext, + String transactionId) throws ConfigDbException, JSONException, + IOException { + + /* + + * + * ConfigurationReader configReader = dmaapContext.getConfigReader(); + * + * DMaaPTransactionObj trnObj; + * + * trnObj = configReader.getfTranDb().loadTransactionObj(transactionId); + * + * + * if (null != trnObj) { trnObj.serialize(); JSONObject result = + * trnObj.asJsonObject(); DMaaPResponseBuilder.respondOk(dmaapContext, + * result); + * LOG.info("========== TransactionServiceImpl: getTransactionObj : "+ + * result.toString()+"==========="); return; } + * + * } LOG.info( + * "========== TransactionServiceImpl: getTransactionObj: Error : Transaction object does not exist. " + * +"==========="); + */ + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/UIServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/UIServiceImpl.java new file mode 100644 index 0000000..b60fb44 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/UIServiceImpl.java @@ -0,0 +1,210 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import java.io.IOException; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + + +import org.apache.kafka.common.errors.TopicExistsException; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.springframework.stereotype.Service; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.service.UIService; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.db.NsaApiDb; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; +/** + * @author muzainulhaque.qazi + * + */ +@Service +public class UIServiceImpl implements UIService { + + + private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(UIServiceImpl.class); + /** + * Returning template of hello page + * @param dmaapContext + * @throws IOException + */ + @Override + public void hello(DMaaPContext dmaapContext) throws IOException { + LOGGER.info("Returning template of hello page."); + DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "templates/hello.html"); + } + + /** + * Fetching list of all api keys and returning in a templated form for display. + * @param dmaapContext + * @throws ConfigDbException + * @throws IOException + */ + @Override + public void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException { + // TODO - We need to work on the templates and how data will be set in + // the template + LOGGER.info("Fetching list of all api keys and returning in a templated form for display."); + Map keyMap = getApiKeyDb(dmaapContext).loadAllKeyRecords(); + + LinkedList keyList = new LinkedList<>(); + + JSONObject jsonList = new JSONObject(); + + for (Entry e : keyMap.entrySet()) { + final NsaSimpleApiKey key = e.getValue(); + final JSONObject jsonObject = new JSONObject(); + jsonObject.put("key", key.getKey()); + jsonObject.put("email", key.getContactEmail()); + jsonObject.put("description", key.getDescription()); + keyList.add(jsonObject); + } + + jsonList.put("apiKeys", keyList); + + LOGGER.info("Returning list of all the api keys in JSON format for the template."); + // "templates/apiKeyList.html" + DMaaPResponseBuilder.respondOk(dmaapContext, jsonList); + + } + + /** + * @param dmaapContext + * @param apiKey + * @throws ConfigDbException + * @throws IOException + * @throws JSONException + * @throws Exception + */ + @Override + public void getApiKey(DMaaPContext dmaapContext, String apiKey) throws CambriaApiException, ConfigDbException, JSONException, IOException { + // TODO - We need to work on the templates and how data will be set in + // the template + LOGGER.info("Fetching detials of apikey: " + apiKey); + final NsaSimpleApiKey key = getApiKeyDb(dmaapContext).loadApiKey(apiKey); + + if (null != key) { + LOGGER.info("Details of apikey [" + apiKey + "] found. Returning response"); + DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject()); + } else { + LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response"); + throw new CambriaApiException(400,"Key [" + apiKey + "] not found."); + } + + } + + /** + * Fetching list of all the topics + * @param dmaapContext + * @throws ConfigDbException + * @throws IOException + */ + @Override + public void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException { + // TODO - We need to work on the templates and how data will be set in + // the template + LOGGER.info("Fetching list of all the topics and returning in a templated form for display"); + List topicsList = getMetaBroker(dmaapContext).getAllTopics(); + + JSONObject jsonObject = new JSONObject(); + + JSONArray topicsArray = new JSONArray(); + + List topicList = getMetaBroker(dmaapContext).getAllTopics(); + + for (Topic topic : topicList) { + JSONObject obj = new JSONObject(); + obj.put("topicName", topic.getName()); + obj.put("description", topic.getDescription()); + obj.put("owner", topic.getOwner()); + topicsArray.put(obj); + } + + jsonObject.put("topics", topicsList); + + LOGGER.info("Returning the list of topics in templated format for display."); + DMaaPResponseBuilder.respondOk(dmaapContext, jsonObject); + + } + + /** + * @param dmaapContext + * @param topicName + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + */ + @Override + public void getTopic(DMaaPContext dmaapContext, String topicName) + throws ConfigDbException, IOException, TopicExistsException { + // TODO - We need to work on the templates and how data will be set in + // the template + LOGGER.info("Fetching detials of apikey: " + topicName); + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == topic) { + LOGGER.error("Topic [" + topicName + "] does not exist."); + throw new TopicExistsException("Topic [" + topicName + "] does not exist."); + } + + JSONObject json = new JSONObject(); + json.put("topicName", topic.getName()); + json.put("description", topic.getDescription()); + json.put("owner", topic.getOwner()); + + LOGGER.info("Returning details of topic [" + topicName + "]. Sending response."); + DMaaPResponseBuilder.respondOk(dmaapContext, json); + + } + + /** + * + * @param dmaapContext + * @return + */ + private NsaApiDb getApiKeyDb(DMaaPContext dmaapContext) { + return dmaapContext.getConfigReader().getfApiKeyDb(); + + } + + /** + * + * @param dmaapContext + * @return + */ + private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) { + return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker(); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionFactory.java new file mode 100644 index 0000000..3a02252 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionFactory.java @@ -0,0 +1,44 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.transaction; +/** + * + * @author anowarul.islam + * + * @param + */ +public interface DMaaPTransactionFactory { + + /** + * + * @param data + * @return + */ + K makeNewTransactionObj ( String data ); + /** + * + * @param id + * @return + */ + K makeNewTransactionId ( String id ); + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObj.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObj.java new file mode 100644 index 0000000..5ef986a --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObj.java @@ -0,0 +1,83 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.transaction; + +import org.json.JSONObject; +/** + * This is an interface for DMaaP transactional logging object class. + * @author nilanjana.maity + * + */ +public interface DMaaPTransactionObj { + /** + * This will get the transaction id + * @return id transactionId + */ + String getId(); + /** + * This will set the transaction id + * @param id transactionId + */ + void setId(String id); + /** + * This will sync the transaction object mapping + * @return String or null + */ + String serialize(); + /** + * get the total message count once the publisher published + * @return long totalMessageCount + */ + long getTotalMessageCount(); + /** + * set the total message count once the publisher published + * @param totalMessageCount + */ + void setTotalMessageCount(long totalMessageCount); + /** + * get the total Success Message Count once the publisher published + * @return getSuccessMessageCount + */ + long getSuccessMessageCount(); + /** + * set the total Success Message Count once the publisher published + * @param successMessageCount + */ + void setSuccessMessageCount(long successMessageCount); + /** + * get the failure Message Count once the publisher published + * @return failureMessageCount + */ + long getFailureMessageCount(); + /** + * set the failure Message Count once the publisher published + * @param failureMessageCount + */ + void setFailureMessageCount(long failureMessageCount); + + /** + * wrapping the data into json object + * @return JSONObject + */ + JSONObject asJsonObject(); + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObjDB.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObjDB.java new file mode 100644 index 0000000..d2d297c --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/transaction/DMaaPTransactionObjDB.java @@ -0,0 +1,86 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.transaction; + +import java.util.Set; + +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.NsaSecurityManagerException; + + +/** + * Persistent storage for Transaction Object and secrets built over an abstract config db. Instances + * of this DB must support concurrent access. + * @author nilanjana.maity + * + * @param DMaaPTransactionObj + */ +public interface DMaaPTransactionObjDB { + + + /** + * Create a new Transaction Object. If one exists, + * @param id + * @return the new Transaction record + * @throws ConfigDbException + */ + K createTransactionObj (String id) throws KeyExistsException, ConfigDbException; + + + /** + * An exception to signal a Transaction object already exists + * @author nilanjana.maity + * + */ + public static class KeyExistsException extends NsaSecurityManagerException + { + /** + * If the key exists + * @param key + */ + public KeyExistsException ( String key ) { super ( "Transaction Object " + key + " exists" ); } + private static final long serialVersionUID = 1L; + } + + /** + * Save a Transaction Object record. This must be used after changing auxiliary data on the record. + * Note that the transaction must exist (via createTransactionObj). + * @param transactionObj + * @throws ConfigDbException + */ + void saveTransactionObj ( K transactionObj ) throws ConfigDbException; + + /** + * Load an Transaction Object record based on the Transaction ID value + * @param transactionId + * @return a transaction record or null + * @throws ConfigDbException + */ + K loadTransactionObj ( String transactionId ) throws ConfigDbException; + + /** + * Load all Transaction objects. + * @return + * @throws ConfigDbException + */ + Set loadAllTransactionObjs () throws ConfigDbException; +} \ No newline at end of file diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/TransactionObj.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/TransactionObj.java new file mode 100644 index 0000000..660acec --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/transaction/TransactionObj.java @@ -0,0 +1,202 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.transaction; + +import org.json.JSONObject; + +/** + * This is the class which will have the transaction enabled logging object + * details + * + * @author nilanjana.maity + * + */ +public class TransactionObj implements DMaaPTransactionObj { + + private String id; + private String createTime; + private long totalMessageCount; + private long successMessageCount; + private long failureMessageCount; + private JSONObject fData = new JSONObject(); + private TrnRequest trnRequest; + private static final String kAuxData = "transaction"; + + /** + * Initializing constructor + * put the json data for transaction enabled logging + * + * @param data + */ + public TransactionObj(JSONObject data) { + fData = data; + + // check for required fields (these throw if not present) + getId(); + getTotalMessageCount(); + getSuccessMessageCount(); + getFailureMessageCount(); + + // make sure we've got an aux data object + final JSONObject aux = fData.optJSONObject(kAuxData); + if (aux == null) { + fData.put(kAuxData, new JSONObject()); + } + } + + /** + * this constructor will have the details of transaction id, + * totalMessageCount successMessageCount, failureMessageCount to get the + * transaction object + * + * @param id + * @param totalMessageCount + * @param successMessageCount + * @param failureMessageCount + */ + public TransactionObj(String id, long totalMessageCount, long successMessageCount, long failureMessageCount) { + this.id = id; + this.totalMessageCount = totalMessageCount; + this.successMessageCount = successMessageCount; + this.failureMessageCount = failureMessageCount; + + } + + /** + * The constructor passing only transaction id + * + * @param id + */ + public TransactionObj(String id) { + this.id = id; + } + + /** + * Wrapping the data into json object + * + * @return JSONObject + */ + public JSONObject asJsonObject() { + final JSONObject full = new JSONObject(fData, JSONObject.getNames(fData)); + return full; + } + + /** + * To get the transaction id + */ + public String getId() { + return id; + } + + /** + * To set the transaction id + */ + public void setId(String id) { + this.id = id; + } + + /** + * + * @return + */ + public String getCreateTime() { + return createTime; + } + + /** + * + * @param createTime + */ + public void setCreateTime(String createTime) { + this.createTime = createTime; + } + + @Override + public String serialize() { + fData.put("transactionId", id); + fData.put("totalMessageCount", totalMessageCount); + fData.put("successMessageCount", successMessageCount); + fData.put("failureMessageCount", failureMessageCount); + return fData.toString(); + } + + public long getTotalMessageCount() { + return totalMessageCount; + } + + public void setTotalMessageCount(long totalMessageCount) { + this.totalMessageCount = totalMessageCount; + } + + public long getSuccessMessageCount() { + return successMessageCount; + } + + public void setSuccessMessageCount(long successMessageCount) { + this.successMessageCount = successMessageCount; + } + + public long getFailureMessageCount() { + return failureMessageCount; + } + + /** + * @param failureMessageCount + */ + public void setFailureMessageCount(long failureMessageCount) { + this.failureMessageCount = failureMessageCount; + } + + /** + * + * @return JSOnObject fData + */ + public JSONObject getfData() { + return fData; + } + + /** + * set the json object into data + * + * @param fData + */ + public void setfData(JSONObject fData) { + this.fData = fData; + } + + /** + * + * @return + */ + public TrnRequest getTrnRequest() { + return trnRequest; + } + + /** + * + * @param trnRequest + */ + public void setTrnRequest(TrnRequest trnRequest) { + this.trnRequest = trnRequest; + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/TrnRequest.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/TrnRequest.java new file mode 100644 index 0000000..138ebea --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/transaction/TrnRequest.java @@ -0,0 +1,183 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.transaction; + +/** + * Created for transaction enable logging details, this is nothing but a bean + * class. + * + * @author nilanjana.maity + * + */ +public class TrnRequest { + + private String id; + private String requestCreate; + private String requestHost; + private String serverHost; + private String messageProceed; + private String totalMessage; + private String clientType; + private String url; + + /** + * + * + * + * @return id + * + */ + public String getId() { + return id; + } + + /** + * + * + * @param id + */ + public void setId(String id) { + this.id = id; + } + + /** + * + * + * @return requestCreate + */ + public String getRequestCreate() { + return requestCreate; + } + + /** + * + * @param requestCreate + */ + public void setRequestCreate(String requestCreate) { + this.requestCreate = requestCreate; + } + + /** + * + * @return + */ + public String getRequestHost() { + return requestHost; + } + + /** + * + * @param requestHost + */ + public void setRequestHost(String requestHost) { + this.requestHost = requestHost; + } + + /** + * + * + * + * @return + */ + public String getServerHost() { + return serverHost; + } + + /** + * + * @param serverHost + */ + public void setServerHost(String serverHost) { + this.serverHost = serverHost; + } + + /** + * + * + * + * @return + */ + public String getMessageProceed() { + return messageProceed; + } + + /** + * + * @param messageProceed + */ + public void setMessageProceed(String messageProceed) { + this.messageProceed = messageProceed; + } + + /** + * + * @return + */ + public String getTotalMessage() { + return totalMessage; + } + + /** + * + * @param totalMessage + * + * + */ + public void setTotalMessage(String totalMessage) { + this.totalMessage = totalMessage; + } + + /** + * + * @return + */ + public String getClientType() { + return clientType; + } + + /** + * + * @param clientType + * + */ + public void setClientType(String clientType) { + this.clientType = clientType; + } + + /** + * + * @return + */ + public String getUrl() { + return url; + } + + /** + * + * @param url + * + */ + public void setUrl(String url) { + this.url = url; + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java new file mode 100644 index 0000000..31bc6ca --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java @@ -0,0 +1,62 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.transaction.impl; + +import org.json.JSONObject; + +import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionFactory; +import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionObj; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; + +/** + * A factory for the simple Transaction implementation + * + * + * @author nilanjana.maity + * + */ +public class DMaaPSimpleTransactionFactory implements DMaaPTransactionFactory { + /** + * + * @param data + * @return DMaaPTransactionObj + */ + @Override + public DMaaPTransactionObj makeNewTransactionObj(String data) { + JSONObject jsonObject = new JSONObject(data); + return new TransactionObj(jsonObject.getString("transactionId"), jsonObject.getLong("totalMessageCount"), + jsonObject.getLong("successMessageCount"), jsonObject.getLong("failureMessageCount")); + } + + /** + * + * @param id + * @return TransactionObj + * + * + */ + @Override + public DMaaPTransactionObj makeNewTransactionId(String id) { + return new TransactionObj(id); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/ConfigurationReader.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/ConfigurationReader.java new file mode 100644 index 0000000..3462567 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/utils/ConfigurationReader.java @@ -0,0 +1,492 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.utils; + +import javax.servlet.ServletException; + +import org.I0Itec.zkclient.ZkClient; +import org.apache.curator.framework.CuratorFramework; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Component; + +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.dmf.mr.backends.Publisher; +import org.onap.dmaap.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException; +import org.onap.dmaap.dmf.mr.backends.memory.MemoryConsumerFactory; +import org.onap.dmaap.dmf.mr.backends.memory.MemoryMetaBroker; +import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueue; +import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueuePublisher; +import org.onap.dmaap.dmf.mr.beans.DMaaPCambriaLimiter; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.beans.DMaaPZkConfigDb; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.metabroker.Broker; + +import org.onap.dmaap.dmf.mr.metabroker.Broker1; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator; +import org.onap.dmaap.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.configs.confimpl.MemConfigDb; +import com.att.nsa.drumlin.till.nv.rrNvReadable; +import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.limits.Blacklist; +import com.att.nsa.security.NsaAuthenticatorService; + +import com.att.nsa.security.db.BaseNsaApiDbImpl; +import com.att.nsa.security.db.NsaApiDb; +import com.att.nsa.security.db.NsaApiDb.KeyExistsException; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; +import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory; + +/** + * Class is created for all the configuration for rest and service layer + * integration. + * + */ +@Component +public class ConfigurationReader { + + + private Broker1 fMetaBroker; + private ConsumerFactory fConsumerFactory; + private Publisher fPublisher; + private MetricsSet fMetrics; + @Autowired + private DMaaPCambriaLimiter fRateLimiter; + private NsaApiDb fApiKeyDb; + + private DMaaPAuthenticator fSecurityManager; + private NsaAuthenticatorService nsaSecurityManager; + private static CuratorFramework curator; + private ZkClient zk; + private DMaaPZkConfigDb fConfigDb; + private MemoryQueue q; + private MemoryMetaBroker mmb; + private Blacklist fIpBlackList; + private Emailer fEmailer; + + private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class); + + + /** + * constructor to initialize all the values + * + * @param settings + * @param fMetrics + * @param zk + * @param fConfigDb + * @param fPublisher + * @param curator + * @param fConsumerFactory + * @param fMetaBroker + * @param q + * @param mmb + * @param fApiKeyDb + * @param fSecurityManager + * @throws missingReqdSetting + * @throws invalidSettingValue + * @throws ServletException + * @throws KafkaConsumerCacheException + * @throws ConfigDbException + * @throws KeyExistsException + */ + @Autowired + public ConfigurationReader(@Qualifier("propertyReader") rrNvReadable settings, + @Qualifier("dMaaPMetricsSet") MetricsSet fMetrics, @Qualifier("dMaaPZkClient") ZkClient zk, + @Qualifier("dMaaPZkConfigDb") DMaaPZkConfigDb fConfigDb, @Qualifier("kafkaPublisher") Publisher fPublisher, + @Qualifier("curator") CuratorFramework curator, + @Qualifier("dMaaPKafkaConsumerFactory") ConsumerFactory fConsumerFactory, + @Qualifier("dMaaPKafkaMetaBroker") Broker1 fMetaBroker, + @Qualifier("q") MemoryQueue q, + @Qualifier("mmb") MemoryMetaBroker mmb, @Qualifier("dMaaPNsaApiDb") NsaApiDb fApiKeyDb, + /* + * @Qualifier("dMaaPTranDb") + * DMaaPTransactionObjDB fTranDb, + */ + @Qualifier("dMaaPAuthenticatorImpl") DMaaPAuthenticator fSecurityManager + ) + throws missingReqdSetting, invalidSettingValue, ServletException, KafkaConsumerCacheException, ConfigDbException, KeyExistsException { + + this.fMetrics = fMetrics; + this.zk = zk; + this.fConfigDb = fConfigDb; + this.fPublisher = fPublisher; + ConfigurationReader.curator = curator; + this.fConsumerFactory = fConsumerFactory; + this.fMetaBroker = fMetaBroker; + + this.q = q; + this.mmb = mmb; + this.fApiKeyDb = fApiKeyDb; + + this.fSecurityManager = fSecurityManager; + + long allowedtimeSkewMs=600000L; + String strallowedTimeSkewM= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"authentication.allowedTimeSkewMs"); + if(null!=strallowedTimeSkewM)allowedtimeSkewMs= Long.parseLong(strallowedTimeSkewM); + + + //String strrequireSecureChannel= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"aauthentication.requireSecureChannel"); + //if(strrequireSecureChannel!=null)requireSecureChannel=Boolean.parseBoolean(strrequireSecureChannel); + //this.nsaSecurityManager = new NsaAuthenticatorService(this.fApiKeyDb, settings.getLong("authentication.allowedTimeSkewMs", 600000L), settings.getBoolean("authentication.requireSecureChannel", true)); + //this.nsaSecurityManager = new NsaAuthenticatorService(this.fApiKeyDb, allowedtimeSkewMs, requireSecureChannel); + + servletSetup(); + } + + protected void servletSetup() + throws rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, ConfigDbException, KeyExistsException { + try { + + fMetrics.toJson(); + fMetrics.setupCambriaSender(); + // add the admin authenticator + + final String adminSecret = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_AdminSecret); + + if ( adminSecret != null && adminSecret.length () > 0 ) + { + + final NsaApiDb adminDb = new BaseNsaApiDbImpl ( new MemConfigDb(), new NsaSimpleApiKeyFactory() ); + adminDb.createApiKey ( "admin", adminSecret ); + + fSecurityManager.addAuthenticator ( new DMaaPOriginalUebAuthenticator ( adminDb, 10*60*1000 ) ); + + } + + // setup a backend + + String type = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kBrokerType); + if (type==null) type = CambriaConstants.kBrokerType_Kafka; + if (CambriaConstants.kBrokerType_Kafka.equalsIgnoreCase(type)) { + log.info("Broker Type is:" + CambriaConstants.kBrokerType_Kafka); + } else if (CambriaConstants.kBrokerType_Memory.equalsIgnoreCase(type)) { + log.info("Broker Type is:" + CambriaConstants.kBrokerType_Memory); + fPublisher = new MemoryQueuePublisher(q, mmb); + //Ramkumar remove below + // fMetaBroker = mmb; + fConsumerFactory = new MemoryConsumerFactory(q); + } else { + throw new IllegalArgumentException( + "Unrecognized type for " + CambriaConstants.kBrokerType + ": " + type + "."); + } + fIpBlackList = new Blacklist ( getfConfigDb(), getfConfigDb().parse ( "/ipBlacklist" ) ); + this.fEmailer = new Emailer(); + log.info("Broker Type is:" + type); + + } catch (SecurityException e) { + throw new ServletException(e); + } + } + + /** + * method returns metaBroker + * + * @return + */ + public Broker1 getfMetaBroker() { + return fMetaBroker; + } + + /** + * method to set the metaBroker + * + * @param fMetaBroker + */ + public void setfMetaBroker(Broker1 fMetaBroker) { + this.fMetaBroker = fMetaBroker; + } + + /** + * method to get ConsumerFactory Object + * + * @return + */ + public ConsumerFactory getfConsumerFactory() { + return fConsumerFactory; + } + + /** + * method to set the consumerfactory object + * + * @param fConsumerFactory + */ + public void setfConsumerFactory(ConsumerFactory fConsumerFactory) { + this.fConsumerFactory = fConsumerFactory; + } + + /** + * method to get Publisher object + * + * @return + */ + public Publisher getfPublisher() { + return fPublisher; + } + + /** + * method to set Publisher object + * + * @param fPublisher + */ + public void setfPublisher(Publisher fPublisher) { + this.fPublisher = fPublisher; + } + + /** + * method to get MetricsSet Object + * + * @return + */ + public MetricsSet getfMetrics() { + return fMetrics; + } + + /** + * method to set MetricsSet Object + * + * @param fMetrics + */ + public void setfMetrics(MetricsSet fMetrics) { + this.fMetrics = fMetrics; + } + + /** + * method to get DMaaPCambriaLimiter object + * + * @return + */ + public DMaaPCambriaLimiter getfRateLimiter() { + return fRateLimiter; + } + + /** + * method to set DMaaPCambriaLimiter object + * + * @param fRateLimiter + */ + public void setfRateLimiter(DMaaPCambriaLimiter fRateLimiter) { + this.fRateLimiter = fRateLimiter; + } + + /** + * Method to get DMaaPAuthenticator object + * + * @return + */ + public DMaaPAuthenticator getfSecurityManager() { + return fSecurityManager; + } + + /** + * method to set DMaaPAuthenticator object + * + * @param fSecurityManager + */ + public void setfSecurityManager(DMaaPAuthenticator fSecurityManager) { + this.fSecurityManager = fSecurityManager; + } + + /** + * method to get rrNvReadable object + * + * @return + */ + /*public rrNvReadable getSettings() { + return settings; + }*/ + + /** + * method to set rrNvReadable object + * + * @param settings + */ + /*public void setSettings(rrNvReadable settings) { + this.settings = settings; + }*/ + + /** + * method to get CuratorFramework object + * + * @return + */ + public static CuratorFramework getCurator() { + return curator; + } + + /** + * method to set CuratorFramework object + * + * @param curator + */ + public static void setCurator(CuratorFramework curator) { + ConfigurationReader.curator = curator; + } + + /** + * method to get ZkClient object + * + * @return + */ + public ZkClient getZk() { + return zk; + } + + /** + * method to set ZkClient object + * + * @param zk + */ + public void setZk(ZkClient zk) { + this.zk = zk; + } + + /** + * method to get DMaaPZkConfigDb object + * + * @return + */ + public DMaaPZkConfigDb getfConfigDb() { + return fConfigDb; + } + + /** + * method to set DMaaPZkConfigDb object + * + * @param fConfigDb + */ + public void setfConfigDb(DMaaPZkConfigDb fConfigDb) { + this.fConfigDb = fConfigDb; + } + + /** + * method to get MemoryQueue object + * + * @return + */ + public MemoryQueue getQ() { + return q; + } + + /** + * method to set MemoryQueue object + * + * @param q + */ + public void setQ(MemoryQueue q) { + this.q = q; + } + + /** + * method to get MemoryMetaBroker object + * + * @return + */ + public MemoryMetaBroker getMmb() { + return mmb; + } + + /** + * method to set MemoryMetaBroker object + * + * @param mmb + */ + public void setMmb(MemoryMetaBroker mmb) { + this.mmb = mmb; + } + + /** + * method to get NsaApiDb object + * + * @return + */ + public NsaApiDb getfApiKeyDb() { + return fApiKeyDb; + } + + /** + * method to set NsaApiDb object + * + * @param fApiKeyDb + */ + public void setfApiKeyDb(NsaApiDb fApiKeyDb) { + this.fApiKeyDb = fApiKeyDb; + } + + /* + * public DMaaPTransactionObjDB getfTranDb() { return + * fTranDb; } + * + * public void setfTranDb(DMaaPTransactionObjDB + * fTranDb) { this.fTranDb = fTranDb; } + */ + /** + * method to get the zookeeper connection String + * + * @param settings + * @return + */ + public static String getMainZookeeperConnectionString() { + //return settings.getString(CambriaConstants.kSetting_ZkConfigDbServers, CambriaConstants.kDefault_ZkConfigDbServers); + + String zkServername = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers); + if (zkServername==null) zkServername=CambriaConstants.kDefault_ZkConfigDbServers; + return zkServername; + } + + public static String getMainZookeeperConnectionSRoot(){ + String strVal=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot); + + if (null==strVal) + strVal=CambriaConstants.kDefault_ZkConfigDbRoot; + + return strVal; + } + + public Blacklist getfIpBlackList() { + return fIpBlackList; + } + + public void setfIpBlackList(Blacklist fIpBlackList) { + this.fIpBlackList = fIpBlackList; + } + + public NsaAuthenticatorService getNsaSecurityManager() { + return nsaSecurityManager; + } + + public void setNsaSecurityManager(NsaAuthenticatorService nsaSecurityManager) { + this.nsaSecurityManager = nsaSecurityManager; + } + + public Emailer getSystemEmailer() + { + return this.fEmailer; + } + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPCuratorFactory.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPCuratorFactory.java new file mode 100644 index 0000000..a5afa75 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPCuratorFactory.java @@ -0,0 +1,69 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.utils; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.CuratorFrameworkFactory; +import org.apache.curator.retry.ExponentialBackoffRetry; + +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import com.att.nsa.drumlin.till.nv.rrNvReadable; + +/** + * + * + * @author anowarul.islam + * + * + */ +public class DMaaPCuratorFactory { + /** + * + * method provide CuratorFramework object + * + * @param settings + * @return + * + * + * + */ + public static CuratorFramework getCurator(rrNvReadable settings) { + String Setting_ZkConfigDbServers =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkConfigDbServers); + + if(null==Setting_ZkConfigDbServers) + Setting_ZkConfigDbServers =CambriaConstants.kDefault_ZkConfigDbServers; + + String strSetting_ZkSessionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs); + if (strSetting_ZkSessionTimeoutMs==null) strSetting_ZkSessionTimeoutMs = CambriaConstants.kDefault_ZkSessionTimeoutMs+""; + int Setting_ZkSessionTimeoutMs = Integer.parseInt(strSetting_ZkSessionTimeoutMs); + + String str_ZkConnectionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs); + if (str_ZkConnectionTimeoutMs==null) str_ZkConnectionTimeoutMs = CambriaConstants.kDefault_ZkConnectionTimeoutMs+""; + int setting_ZkConnectionTimeoutMs = Integer.parseInt(str_ZkConnectionTimeoutMs); + + + CuratorFramework curator = CuratorFrameworkFactory.newClient( + Setting_ZkConfigDbServers,Setting_ZkSessionTimeoutMs,setting_ZkConnectionTimeoutMs + ,new ExponentialBackoffRetry(1000, 5)); + return curator; + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPResponseBuilder.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPResponseBuilder.java new file mode 100644 index 0000000..3630086 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/utils/DMaaPResponseBuilder.java @@ -0,0 +1,370 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.utils; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.io.Writer; + +import javax.servlet.http.HttpServletResponse; + +import org.json.JSONException; +import org.json.JSONObject; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * class is used to create response object which is given to user + * + * @author nilanjana.maity + * + */ + +public class DMaaPResponseBuilder { + + + private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPResponseBuilder.class); + protected static final int kBufferLength = 4096; + + public static void setNoCacheHeadings(DMaaPContext ctx) { + HttpServletResponse response = ctx.getResponse(); + response.addHeader("Cache-Control", "no-store, no-cache, must-revalidate"); + response.addHeader("Pragma", "no-cache"); + response.addHeader("Expires", "0"); + } + + /** + * static method is used to create response object associated with + * JSONObject + * + * @param ctx + * @param result + * @throws JSONException + * @throws IOException + */ + public static void respondOk(DMaaPContext ctx, JSONObject result) throws JSONException, IOException { + + respondOkWithStream(ctx, "application/json", new ByteArrayInputStream(result.toString(4).getBytes())); + + } + + /** + * method used to set staus to 204 + * + * @param ctx + */ + public static void respondOkNoContent(DMaaPContext ctx) { + try { + ctx.getResponse().setStatus(204); + } catch (Exception excp) { + log.error(excp.getMessage(), excp); + } + } + + /** + * static method is used to create response object associated with html + * + * @param ctx + * @param html + */ + public static void respondOkWithHtml(DMaaPContext ctx, String html) { + try { + respondOkWithStream(ctx, "text/html", new ByteArrayInputStream(html.toString().getBytes())); + } catch (Exception excp) { + log.error(excp.getMessage(), excp); + } + } + + /** + * method used to create response object associated with InputStream + * + * @param ctx + * @param mediaType + * @param is + * @throws IOException + */ + public static void respondOkWithStream(DMaaPContext ctx, String mediaType, final InputStream is) + throws IOException { + /* + * creates response object associated with streamwriter + */ + respondOkWithStream(ctx, mediaType, new StreamWriter() { + + public void write(OutputStream os) throws IOException { + copyStream(is, os); + } + }); + + } + + /** + * + * @param ctx + * @param mediaType + * @param writer + * @throws IOException + */ + public static void respondOkWithStream(DMaaPContext ctx, String mediaType, StreamWriter writer) throws IOException { + ctx.getResponse().setStatus(200); + try(OutputStream os = getStreamForBinaryResponse(ctx, mediaType)) { + writer.write(os); + } + + + } + + /** + * static method to create error objects + * + * @param ctx + * @param errCode + * @param msg + */ + public static void respondWithError(DMaaPContext ctx, int errCode, String msg) { + try { + ctx.getResponse().sendError(errCode, msg); + } catch (IOException excp) { + log.error(excp.getMessage(), excp); + } + } + + /** + * method to create error objects + * + * @param ctx + * @param errCode + * @param body + */ + public static void respondWithError(DMaaPContext ctx, int errCode, JSONObject body) { + try { + sendErrorAndBody(ctx, errCode, body.toString(4), "application/json"); + } catch (Exception excp) { + log.error(excp.getMessage(), excp); + } + } + + /** + * static method creates error object in JSON + * + * @param ctx + * @param errCode + * @param msg + */ + public static void respondWithErrorInJson(DMaaPContext ctx, int errCode, String msg) { + try { + JSONObject o = new JSONObject(); + o.put("status", errCode); + o.put("message", msg); + respondWithError(ctx, errCode, o); + + } catch (Exception excp) { + log.error(excp.getMessage(), excp); + } + } + + /** + * static method used to copy the stream with the help of another method + * copystream + * + * @param in + * @param out + * @throws IOException + */ + public static void copyStream(InputStream in, OutputStream out) throws IOException { + copyStream(in, out, 4096); + } + + /** + * static method to copy the streams + * + * @param in + * @param out + * @param bufferSize + * @throws IOException + */ + public static void copyStream(InputStream in, OutputStream out, int bufferSize) throws IOException { + byte[] buffer = new byte[bufferSize]; + int len; + while ((len = in.read(buffer)) != -1) { + out.write(buffer, 0, len); + } + out.close(); + } + + /** + * interface used to define write method for outputStream + */ + public abstract static interface StreamWriter { + /** + * abstract method used to write the response + * + * @param paramOutputStream + * @throws IOException + */ + public abstract void write(OutputStream paramOutputStream) throws IOException; + } + + /** + * static method returns stream for binary response + * + * @param ctx + * @return + * @throws IOException + */ + public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx) throws IOException { + return getStreamForBinaryResponse(ctx, "application/octet-stream"); + } + + /** + * static method returns stream for binaryResponses + * + * @param ctx + * @param contentType + * @return + * @throws IOException + */ + public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx, String contentType) throws IOException { + ctx.getResponse().setContentType(contentType); + + + boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD"))); + + if (fResponseEntityAllowed) { + try(OutputStream os = ctx.getResponse().getOutputStream()){ + return os; + }catch (Exception e){ + log.error("Exception in getStreamForBinaryResponse",e); + throw new IOException(); + } + } else { + try(OutputStream os = new NullStream()){ + return os; + }catch (Exception e){ + log.error("Exception in getStreamForBinaryResponse",e); + throw new IOException(); + } + } + } + + /** + * + * @author anowarul.islam + * + */ + private static class NullStream extends OutputStream { + /** + * @param b + * integer + */ + public void write(int b) { + } + } + + private static class NullWriter extends Writer { + /** + * write method + * @param cbuf + * @param off + * @param len + */ + public void write(char[] cbuf, int off, int len) { + } + + /** + * flush method + */ + public void flush() { + } + + /** + * close method + */ + public void close() { + } + } + + /** + * sttaic method fetch stream for text + * + * @param ctx + * @param err + * @param content + * @param mimeType + */ + public static void sendErrorAndBody(DMaaPContext ctx, int err, String content, String mimeType) { + try { + setStatus(ctx, err); + getStreamForTextResponse(ctx, mimeType).println(content); + } catch (IOException e) { + log.error(new StringBuilder().append("Error sending error response: ").append(e.getMessage()).toString(), + e); + } + } + + /** + * method to set the code + * + * @param ctx + * @param code + */ + public static void setStatus(DMaaPContext ctx, int code) { + ctx.getResponse().setStatus(code); + } + + /** + * static method returns stream for text response + * + * @param ctx + * @return + * @throws IOException + */ + public static PrintWriter getStreamForTextResponse(DMaaPContext ctx) throws IOException { + return getStreamForTextResponse(ctx, "text/html"); + } + + /** + * static method returns stream for text response + * + * @param ctx + * @param contentType + * @return + * @throws IOException + */ + public static PrintWriter getStreamForTextResponse(DMaaPContext ctx, String contentType) throws IOException { + ctx.getResponse().setContentType(contentType); + + PrintWriter pw = null; + boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD"))); + + if (fResponseEntityAllowed) { + pw = ctx.getResponse().getWriter(); + } else { + pw = new PrintWriter(new NullWriter()); + } + return pw; + } +} \ No newline at end of file diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/Emailer.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/Emailer.java new file mode 100644 index 0000000..cb4fcdc --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/utils/Emailer.java @@ -0,0 +1,211 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.utils; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +import javax.mail.BodyPart; +import javax.mail.Message; +import javax.mail.Multipart; +import javax.mail.PasswordAuthentication; +import javax.mail.Session; +import javax.mail.Transport; +import javax.mail.internet.InternetAddress; +import javax.mail.internet.MimeBodyPart; +import javax.mail.internet.MimeMessage; +import javax.mail.internet.MimeMultipart; + + + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * Send an email from a message. + * + * @author peter + */ +public class Emailer +{ + public static final String kField_To = "to"; + public static final String kField_Subject = "subject"; + public static final String kField_Message = "message"; + + public Emailer() + { + fExec = Executors.newCachedThreadPool (); + + } + + public void send ( String to, String subj, String body ) throws IOException + { + final String[] addrs = to.split ( "," ); + + if ( to.length () > 0 ) + { + final MailTask mt = new MailTask ( addrs, subj, body ); + fExec.submit ( mt ); + } + else + { + log.warn ( "At least one address is required." ); + } + } + + public void close () + { + fExec.shutdown (); + } + + private final ExecutorService fExec; + + + + + private static final EELFLogger log = EELFManager.getInstance().getLogger(Emailer.class); + + public static final String kSetting_MailAuthUser = "mailLogin"; + public static final String kSetting_MailFromEmail = "mailFromEmail"; + public static final String kSetting_MailFromName = "mailFromName"; + public static final String kSetting_SmtpServer = "mailSmtpServer"; + public static final String kSetting_SmtpServerPort = "mailSmtpServerPort"; + public static final String kSetting_SmtpServerSsl = "mailSmtpServerSsl"; + public static final String kSetting_SmtpServerUseAuth = "mailSmtpServerUseAuth"; + + private class MailTask implements Runnable + { + public MailTask ( String[] to, String subject, String msgBody ) + { + fToAddrs = to; + fSubject = subject; + fBody = msgBody; + } + + private String getSetting ( String settingKey, String defval ) + { + + String strSet = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,settingKey); + if(strSet==null)strSet=defval; + return strSet; + } + + // we need to get setting values from the evaluator but also the channel config + private void makeSetting ( Properties props, String propKey, String settingKey, String defval ) + { + props.put ( propKey, getSetting ( settingKey, defval ) ); + } + + private void makeSetting ( Properties props, String propKey, String settingKey, int defval ) + { + makeSetting ( props, propKey, settingKey, "" + defval ); + } + + private void makeSetting ( Properties props, String propKey, String settingKey, boolean defval ) + { + makeSetting ( props, propKey, settingKey, "" + defval ); + } + + @Override + public void run () + { + final StringBuffer tag = new StringBuffer (); + final StringBuffer addrList = new StringBuffer (); + tag.append ( "(" ); + for ( String to : fToAddrs ) + { + if ( addrList.length () > 0 ) + { + addrList.append ( ", " ); + } + addrList.append ( to ); + } + tag.append ( addrList.toString () ); + tag.append ( ") \"" ); + tag.append ( fSubject ); + tag.append ( "\"" ); + + log.info ( "sending mail to " + tag ); + + try + { + final Properties prop = new Properties (); + makeSetting ( prop, "mail.smtp.port", kSetting_SmtpServerPort, 587 ); + prop.put ( "mail.smtp.socketFactory.fallback", "false" ); + prop.put ( "mail.smtp.quitwait", "false" ); + makeSetting ( prop, "mail.smtp.host", kSetting_SmtpServer, "smtp.it.att.com" ); + makeSetting ( prop, "mail.smtp.auth", kSetting_SmtpServerUseAuth, true ); + makeSetting ( prop, "mail.smtp.starttls.enable", kSetting_SmtpServerSsl, true ); + + final String un = getSetting ( kSetting_MailAuthUser, "" ); + final String value=(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"mailPassword")!=null)?AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"mailPassword"):""; + final Session session = Session.getInstance ( prop, + new javax.mail.Authenticator() + { + @Override + protected PasswordAuthentication getPasswordAuthentication() + { + return new PasswordAuthentication ( un, value ); + } + } + ); + + final Message msg = new MimeMessage ( session ); + + final InternetAddress from = new InternetAddress ( + getSetting ( kSetting_MailFromEmail, "team@sa2020.it.att.com" ), + getSetting ( kSetting_MailFromName, "The GFP/SA2020 Team" ) ); + msg.setFrom ( from ); + msg.setReplyTo ( new InternetAddress[] { from } ); + msg.setSubject ( fSubject ); + + for ( String toAddr : fToAddrs ) + { + final InternetAddress to = new InternetAddress ( toAddr ); + msg.addRecipient ( Message.RecipientType.TO, to ); + } + + final Multipart multipart = new MimeMultipart ( "related" ); + final BodyPart htmlPart = new MimeBodyPart (); + htmlPart.setContent ( fBody, "text/plain" ); + multipart.addBodyPart ( htmlPart ); + msg.setContent ( multipart ); + + Transport.send ( msg ); + + log.info ( "mailing " + tag + " off without error" ); + } + catch ( Exception e ) + { + log.warn ( "Exception caught for " + tag, e ); + } + } + + private final String[] fToAddrs; + private final String fSubject; + private final String fBody; + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/PropertyReader.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/PropertyReader.java new file mode 100644 index 0000000..ee503af --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/utils/PropertyReader.java @@ -0,0 +1,125 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.utils; + +import java.util.Map; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.drumlin.till.nv.impl.nvReadableStack; + +/** + * + * @author nilesh.labde + * + * + */ +public class PropertyReader extends nvReadableStack { + /** + * + * initializing logger + * + */ + + private static final EELFLogger log = EELFManager.getInstance().getLogger(PropertyReader.class); + + + /** + * constructor initialization + * + * @throws loadException + * + */ + public PropertyReader() throws loadException { + + + + + + } + + /** + * + * + * @param argMap + * @param key + * @param defaultValue + * @return + * + */ + @SuppressWarnings("unused") + private static String getSetting(Map argMap, final String key, final String defaultValue) { + String val = (String) argMap.get(key); + if (null == val) { + return defaultValue; + } + return val; + } + + /** + * + * @param resourceName + * @param clazz + * @return + * @exception MalformedURLException + * + */ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/utils/Utils.java b/src/main/java/org/onap/dmaap/dmf/mr/utils/Utils.java new file mode 100644 index 0000000..f568d77 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/utils/Utils.java @@ -0,0 +1,175 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.utils; + +import java.io.IOException; +import java.io.InputStream; +import java.text.DecimalFormat; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Enumeration; +import java.util.LinkedList; +import java.util.List; +import java.util.Properties; + +import javax.servlet.http.HttpServletRequest; + +import org.onap.dmaap.dmf.mr.backends.kafka.KafkaPublisher; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +/** + * This is an utility class for various operations for formatting + * @author nilanjana.maity + * + */ +public class Utils { + + private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS"; + public static final String CAMBRIA_AUTH_HEADER = "X-CambriaAuth"; + private static final String BATCH_ID_FORMAT = "000000"; + private static final EELFLogger log = EELFManager.getInstance().getLogger(Utils.class); + + private Utils() { + super(); + } + + /** + * Formatting the date + * @param date + * @return date or null + */ + public static String getFormattedDate(Date date) { + SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT); + if (null != date){ + return sdf.format(date); + } + return null; + } + /** + * to get the details of User Api Key + * @param request + * @return authkey or null + */ + public static String getUserApiKey(HttpServletRequest request) { + final String auth = request.getHeader(CAMBRIA_AUTH_HEADER); + if (null != auth) { + final String[] splittedAuthKey = auth.split(":"); + return splittedAuthKey[0]; + }else if (null!=request.getHeader("Authorization")){ + /** + * AAF implementation enhancement + */ + String user= request.getUserPrincipal().getName().toString(); + return user.substring(0, user.lastIndexOf("@")); + } + return null; + } + /** + * to format the batch sequence id + * @param batchId + * @return batchId + */ + public static String getFromattedBatchSequenceId(Long batchId) { + DecimalFormat format = new DecimalFormat(BATCH_ID_FORMAT); + return format.format(batchId); + } + + /** + * to get the message length in bytes + * @param message + * @return bytes or 0 + */ + public static long messageLengthInBytes(String message) { + if (null != message) { + return message.getBytes().length; + } + return 0; + } + /** + * To get transaction id details + * @param transactionId + * @return transactionId or null + */ + public static String getResponseTransactionId(String transactionId) { + if (null != transactionId && !transactionId.isEmpty()) { + return transactionId.substring(0, transactionId.lastIndexOf("::")); + } + return null; + } + + /** + * get the thread sleep time + * @param ratePerMinute + * @return ratePerMinute or 0 + */ + public static long getSleepMsForRate ( double ratePerMinute ) + { + if ( ratePerMinute <= 0.0 ) return 0; + return Math.max ( 1000, Math.round ( 60 * 1000 / ratePerMinute ) ); + } + + public static String getRemoteAddress(DMaaPContext ctx) + { + String reqAddr = ctx.getRequest().getRemoteAddr(); + String fwdHeader = getFirstHeader("X-Forwarded-For",ctx); + return ((fwdHeader != null) ? fwdHeader : reqAddr); + } + public static String getFirstHeader(String h,DMaaPContext ctx) + { + List l = getHeader(h,ctx); + return ((l.size() > 0) ? (String)l.iterator().next() : null); + } + public static List getHeader(String h,DMaaPContext ctx) + { + LinkedList list = new LinkedList(); + Enumeration e = ctx.getRequest().getHeaders(h); + while (e.hasMoreElements()) + { + list.add(e.nextElement().toString()); + } + return list; + } + + public static String getKafkaproperty(){ + InputStream input = new Utils().getClass().getResourceAsStream("/kafka.properties"); + Properties props = new Properties(); + try { + props.load(input); + } catch (IOException e) { + log.error("failed to read kafka.properties"); + } + return props.getProperty("key"); + + + } + + public static boolean isCadiEnabled(){ + boolean enableCadi=false; + if(System.getenv("enableCadi")!=null){ + enableCadi=Boolean.getBoolean(System.getenv("enableCadi")); + } + + return enableCadi; + } + +} diff --git a/src/main/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java b/src/main/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java new file mode 100644 index 0000000..76999e1 --- /dev/null +++ b/src/main/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java @@ -0,0 +1,197 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.apiServer.metrics.cambria; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + +import org.json.JSONException; +import org.json.JSONObject; +//import org.slf4j.Logger; +//import org.slf4j.LoggerFactory; + +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.metrics.publisher.CambriaPublisher; +import org.onap.dmaap.dmf.mr.metrics.publisher.DMaaPCambriaClientFactory; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.apiServer.metrics.cambria.MetricsSender; +import com.att.nsa.metrics.CdmMetricsRegistry; +import com.att.nsa.metrics.impl.CdmConstant; + +/** + * MetricsSender will send the given metrics registry content as an event on the + * Cambria event broker to the given topic. + * + * @author peter + * + */ +public class DMaaPMetricsSender implements Runnable { + public static final String kSetting_CambriaEnabled = "metrics.send.cambria.enabled"; + public static final String kSetting_CambriaBaseUrl = "metrics.send.cambria.baseUrl"; + public static final String kSetting_CambriaTopic = "metrics.send.cambria.topic"; + public static final String kSetting_CambriaSendFreqSecs = "metrics.send.cambria.sendEverySeconds"; + + /** + * Schedule a periodic send of the given metrics registry using the given + * settings container for the Cambria location, topic, and send frequency. + *
+ *
+ * If the enabled flag is false, this method returns null. + * + * @param scheduler + * @param metrics + * @param settings + * @param defaultTopic + * @return a handle to the scheduled task + */ + public static ScheduledFuture sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics, + String defaultTopic) { + log.info("Inside : DMaaPMetricsSender : sendPeriodically"); + String cambriaSetting= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled); + boolean setEnable=true; + if (cambriaSetting!=null && cambriaSetting.equals("false") ) + setEnable= false; + + if (setEnable) { + String Setting_CambriaBaseUrl=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled); + + Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?"localhost":Setting_CambriaBaseUrl; + + String Setting_CambriaTopic=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaTopic); + if(Setting_CambriaTopic==null) Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap"; + + + + String Setting_CambriaSendFreqSecs=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaSendFreqSecs); + + int _CambriaSendFreqSecs =30; + if(Setting_CambriaSendFreqSecs!=null){ + _CambriaSendFreqSecs = Integer.parseInt(Setting_CambriaSendFreqSecs); + } + + + return DMaaPMetricsSender.sendPeriodically(scheduler, metrics, + Setting_CambriaBaseUrl,Setting_CambriaTopic,_CambriaSendFreqSecs + ); + /*return DMaaPMetricsSender.sendPeriodically(scheduler, metrics, + settings.getString(kSetting_CambriaBaseUrl, "localhost"), + settings.getString(kSetting_CambriaTopic, defaultTopic), + settings.getInt(kSetting_CambriaSendFreqSecs, 30));*/ + } else { + return null; + } + } + + /** + * Schedule a periodic send of the metrics registry to the given Cambria + * broker and topic. + * + * @param scheduler + * @param metrics + * the registry to send + * @param cambriaBaseUrl + * the base URL for Cambria + * @param topic + * the topic to publish on + * @param everySeconds + * how frequently to publish + * @return a handle to the scheduled task + */ + public static ScheduledFuture sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics, + String cambriaBaseUrl, String topic, int everySeconds) { + return scheduler.scheduleAtFixedRate(new DMaaPMetricsSender(metrics, cambriaBaseUrl, topic), everySeconds, + everySeconds, TimeUnit.SECONDS); + } + + /** + * Create a metrics sender. + * + * @param metrics + * @param cambriaBaseUrl + * @param topic + */ + public DMaaPMetricsSender(CdmMetricsRegistry metrics, String cambriaBaseUrl, String topic) { + try { + fMetrics = metrics; + fHostname = InetAddress.getLocalHost().getHostName(); + + // setup a "simple" publisher that will send metrics immediately + fCambria = DMaaPCambriaClientFactory.createSimplePublisher(cambriaBaseUrl, topic); + } catch (UnknownHostException e) { + log.warn("Unable to get localhost address in MetricsSender constructor.", e); + throw new RuntimeException(e); + } + } + + /** + * Send on demand. + */ + public void send() { + try { + final JSONObject o = fMetrics.toJson(); + o.put("hostname", fHostname); + o.put("now", System.currentTimeMillis()); + o.put("metricsSendTime", addTimeStamp()); + o.put("transactionEnabled", false); + fCambria.send(fHostname, o.toString()); + } catch (JSONException e) { + log.warn("Error posting metrics to Cambria: " + e.getMessage()); + } catch (IOException e) { + log.warn("Error posting metrics to Cambria: " + e.getMessage()); + } + } + + /** + * Run() calls send(). It's meant for use in a background-scheduled task. + */ + @Override + public void run() { + send(); + } + + private final CdmMetricsRegistry fMetrics; + private final CambriaPublisher fCambria; + private final String fHostname; + + + + private static final EELFLogger log = EELFManager.getInstance().getLogger(MetricsSender.class); + /** + * method creates and returnd CdmConstant object using current timestamp + * + * @return + */ + public CdmConstant addTimeStamp() { + // Add the timestamp with every metrics send + final long metricsSendTime = System.currentTimeMillis(); + final Date d = new Date(metricsSendTime); + final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d); + return new CdmConstant(metricsSendTime / 1000, "Metrics Send Time (epoch); " + text); + } +} diff --git a/src/main/java/org/onap/dmaap/mr/filter/ContentLengthFilter.java b/src/main/java/org/onap/dmaap/mr/filter/ContentLengthFilter.java new file mode 100644 index 0000000..a175b16 --- /dev/null +++ b/src/main/java/org/onap/dmaap/mr/filter/ContentLengthFilter.java @@ -0,0 +1,134 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.filter; + +import java.io.IOException; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; + +import org.apache.http.HttpStatus; +import org.json.JSONObject; +import org.springframework.context.ApplicationContext; +import org.springframework.web.context.support.WebApplicationContextUtils; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +/** + * Servlet Filter implementation class ContentLengthFilter + */ +public class ContentLengthFilter implements Filter { + + private DefaultLength defaultLength; + + private FilterConfig filterConfig = null; + DMaaPErrorMessages errorMessages = null; + + private static final EELFLogger log = EELFManager.getInstance().getLogger(ContentLengthFilter.class); + /** + * Default constructor. + */ + + public ContentLengthFilter() { + // TODO Auto-generated constructor stub + } + + /** + * @see Filter#destroy() + */ + public void destroy() { + // TODO Auto-generated method stub + } + + /** + * @see Filter#doFilter(ServletRequest, ServletResponse, FilterChain) + */ + public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, + ServletException { + // TODO Auto-generated method stub + // place your code here + log.info("inside servlet do filter content length checking before pub/sub"); + HttpServletRequest request = (HttpServletRequest) req; + JSONObject jsonObj = null; + int requestLength = 0; + try { + // retrieving content length from message header + + if (null != request.getHeader("Content-Length")) { + requestLength = Integer.parseInt(request.getHeader("Content-Length")); + } + // retrieving encoding from message header + String transferEncoding = request.getHeader("Transfer-Encoding"); + // checking for no encoding, chunked and requestLength greater then + // default length + if (null != transferEncoding && !(transferEncoding.contains("chunked")) + && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) { + jsonObj = new JSONObject().append("defaultlength", defaultLength) + .append("requestlength", requestLength); + log.error("message length is greater than default"); + throw new CambriaApiException(jsonObj); + } else if (null == transferEncoding && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) { + jsonObj = new JSONObject().append("defaultlength", defaultLength.getDefaultLength()).append( + "requestlength", requestLength); + log.error("Request message is not chunked or request length is greater than default length"); + throw new CambriaApiException(jsonObj); + } else { + chain.doFilter(req, res); + } + } catch (CambriaApiException | NumberFormatException e) { + log.error("message size is greater then default"); + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED, + DMaaPResponseCode.MSG_SIZE_EXCEEDS_MSG_LIMIT.getResponseCode(), errorMessages.getMsgSizeExceeds() + + jsonObj.toString()); + log.info(errRes.toString()); + + } + + } + + /** + * @see Filter#init(FilterConfig) + */ + public void init(FilterConfig fConfig) throws ServletException { + // TODO Auto-generated method stub + this.filterConfig = fConfig; + log.info("Filter Content Length Initialize"); + ApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(fConfig + .getServletContext()); + DefaultLength defLength = (DefaultLength) ctx.getBean("defLength"); + DMaaPErrorMessages errorMessages = (DMaaPErrorMessages) ctx.getBean("DMaaPErrorMessages"); + this.errorMessages = errorMessages; + this.defaultLength = defLength; + + } + +} diff --git a/src/main/java/org/onap/dmaap/mr/filter/DefaultLength.java b/src/main/java/org/onap/dmaap/mr/filter/DefaultLength.java new file mode 100644 index 0000000..598ef1b --- /dev/null +++ b/src/main/java/org/onap/dmaap/mr/filter/DefaultLength.java @@ -0,0 +1,37 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.filter; + + +public class DefaultLength { + + String defaultLength; + + public String getDefaultLength() { + return defaultLength; + } + + public void setDefaultLength(String defaultLength) { + this.defaultLength = defaultLength; + } + +} diff --git a/src/test/java/com/att/mr/test/dmaap/ApiKeyBean.java b/src/test/java/com/att/mr/test/dmaap/ApiKeyBean.java deleted file mode 100644 index 3c0bb79..0000000 --- a/src/test/java/com/att/mr/test/dmaap/ApiKeyBean.java +++ /dev/null @@ -1,72 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dmaap; - -import java.io.Serializable; - -public class ApiKeyBean implements Serializable { - - /*private static final long serialVersionUID = -8219849086890567740L; - - // private static final String KEY_CHARS = - // "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - - - private String email; - private String description; - - public ApiKeyBean() { - super(); - } - - public ApiKeyBean(String email, String description) { - super(); - this.email = email; - this.description = description; - } - - public String getEmail() { - return email; - } - - public void setEmail(String email) { - this.email = email; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - /* - * public String getKey() { return generateKey(16); } - * - * public String getSharedSecret() { return generateKey(24); } - * - * private static String generateKey ( int length ) { return - * uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length ); } - */ - -} diff --git a/src/test/java/com/att/mr/test/dmaap/DMaapPubSubTest.java b/src/test/java/com/att/mr/test/dmaap/DMaapPubSubTest.java deleted file mode 100644 index fc91598..0000000 --- a/src/test/java/com/att/mr/test/dmaap/DMaapPubSubTest.java +++ /dev/null @@ -1,138 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dmaap; - -import java.io.InputStream; -import java.util.Scanner; - -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; -import javax.ws.rs.client.Entity; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.Response; - -import junit.framework.TestCase; - -import org.json.JSONObject; -import org.apache.http.HttpStatus; -import org.apache.log4j.Logger; - -import com.att.nsa.drumlin.till.data.sha1HmacSigner; - -public class DMaapPubSubTest { -/* private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class); - Client client = ClientBuilder.newClient(); - String url = LoadPropertyFile.getPropertyFileData().getProperty("url"); - WebTarget target = client.target(url); - String topicapikey; - String topicsecretKey; - String serverCalculatedSignature; - String date = LoadPropertyFile.getPropertyFileData().getProperty("date"); - // changes by islam - String topic_name = LoadPropertyFile.getPropertyFileData().getProperty("topicName"); - DmaapApiKeyTest keyInstance = new DmaapApiKeyTest(); - - - public void testProduceMessage() { - LOGGER.info("test case publish message"); - // DMaapTopicTest topicCreation = new DMaapTopicTest(); - DmaapApiKeyTest keyInstance = new DmaapApiKeyTest(); - // creating topic - createTopic(topic_name); - - target = client.target(url); - target = target.path("/events/"); - target = target.path(topic_name); - Response response2 = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) - .header("X-CambriaDate", date).post(Entity.json("{message:producing first message}")); - keyInstance.assertStatus(response2); - LOGGER.info("successfully published message"); - } - - public void testConsumeMessage() { - LOGGER.info("test case subscribing message"); - createTopic(topic_name); - target = client.target(url); - target = target.path("/events/"); - target = target.path(topic_name); - target = target.path("consumGrp"); - target = target.path(topicapikey); - Response response = target.request().get(); - keyInstance.assertStatus(response); - LOGGER.info("successfully consumed messages"); - InputStream is = (InputStream) response.getEntity(); - Scanner s = new Scanner(is); - s.useDelimiter("\\A"); - String data = s.next(); - s.close(); - LOGGER.info("Consumed Message data: " + data); - } - - public void createTopic(String name) { - if (!topicExist(name)) { - TopicBean topicbean = new TopicBean(); - topicbean.setDescription("creating topic"); - topicbean.setPartitionCount(1); - topicbean.setReplicationCount(1); - topicbean.setTopicName(name); - topicbean.setTransactionEnabled(true); - target = client.target(url); - target = target.path("/topics/create"); - JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "topic creation")); - topicapikey = (String) jsonObj.get("key"); - topicsecretKey = (String) jsonObj.get("secret"); - serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey); - Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) - .header("X-CambriaDate", date).post(Entity.json(topicbean)); - keyInstance.assertStatus(response); - } - } - - public boolean topicExist(String topicName) { - target = target.path("/topics/" + topicName); - InputStream is, issecret; - Response response = target.request().get(); - if (response.getStatus() == HttpStatus.SC_OK) { - is = (InputStream) response.getEntity(); - Scanner s = new Scanner(is); - s.useDelimiter("\\A"); - JSONObject dataObj = new JSONObject(s.next()); - s.close(); - // get owner of a topic - topicapikey = (String) dataObj.get("owner"); - target = client.target(url); - target = target.path("/apiKeys/"); - target = target.path(topicapikey); - Response response2 = target.request().get(); - issecret = (InputStream) response2.getEntity(); - Scanner st = new Scanner(issecret); - st.useDelimiter("\\A"); - JSONObject dataObj1 = new JSONObject(st.next()); - st.close(); - // get secret key of this topic// - topicsecretKey = (String) dataObj1.get("secret"); - serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey); - return true; - } else - return false; - }*/ -} diff --git a/src/test/java/com/att/mr/test/dmaap/DMaapTopicTest.java b/src/test/java/com/att/mr/test/dmaap/DMaapTopicTest.java deleted file mode 100644 index 1b5e746..0000000 --- a/src/test/java/com/att/mr/test/dmaap/DMaapTopicTest.java +++ /dev/null @@ -1,267 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dmaap; - -import java.io.InputStream; -import java.util.Properties; -import java.util.Scanner; - -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; -import javax.ws.rs.client.Entity; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.Response; - -import junit.framework.TestCase; - -import org.apache.http.HttpStatus; -import org.json.JSONObject; -import org.apache.log4j.Logger; - -import com.att.nsa.drumlin.till.data.sha1HmacSigner; - -public class DMaapTopicTest { - /*private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class); - Client client = ClientBuilder.newClient(); - String topicapikey, topicsecretKey, serverCalculatedSignature; - Properties prop = LoadPropertyFile.getPropertyFileData(); - String topicName = prop.getProperty("topicName"); - String url = prop.getProperty("url"); - String date = prop.getProperty("date"); - WebTarget target = client.target(url); - DmaapApiKeyTest keyInstance = new DmaapApiKeyTest(); - - - public void createTopic(String name) { - if (!topicExist(name)) { - TopicBean topicbean = new TopicBean(); - topicbean.setDescription("creating topic"); - topicbean.setPartitionCount(1); - topicbean.setReplicationCount(1); - topicbean.setTopicName(name); - topicbean.setTransactionEnabled(true); - target = client.target(url); - target = target.path("/topics/create"); - JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("nm254w@att.com", "topic creation")); - topicapikey = (String) jsonObj.get("key"); - topicsecretKey = (String) jsonObj.get("secret"); - serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey); - Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) - .header("X-CambriaDate", date).post(Entity.json(topicbean)); - keyInstance.assertStatus(response); - } - - } - - public boolean topicExist(String topicName) { - target = target.path("/topics/" + topicName); - InputStream is, issecret; - Response response = target.request().get(); - if (response.getStatus() == HttpStatus.SC_OK) { - is = (InputStream) response.getEntity(); - Scanner s = new Scanner(is); - s.useDelimiter("\\A"); - JSONObject dataObj = new JSONObject(s.next()); - s.close(); - // get owner of a topic - topicapikey = (String) dataObj.get("owner"); - target = client.target(url); - target = target.path("/apiKeys/"); - target = target.path(topicapikey); - Response response2 = target.request().get(); - issecret = (InputStream) response2.getEntity(); - Scanner st = new Scanner(issecret); - st.useDelimiter("\\A"); - JSONObject dataObj1 = new JSONObject(st.next()); - st.close(); - // get secret key of this topic// - topicsecretKey = (String) dataObj1.get("secret"); - serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey); - return true; - } else - return false; - } - - public void testCreateTopic() { - LOGGER.info("test case create topic"); - createTopic(topicName); - LOGGER.info("Returning after create topic"); - } - - public void testOneTopic() { - LOGGER.info("test case get specific topic name " + topicName); - createTopic(topicName); - target = client.target(url); - target = target.path("/topics/"); - target = target.path(topicName); - Response response = target.request().get(); - LOGGER.info("Successfully returned after fetching topic" + topicName); - keyInstance.assertStatus(response); - InputStream is = (InputStream) response.getEntity(); - Scanner s = new Scanner(is); - s.useDelimiter("\\A"); - JSONObject dataObj = new JSONObject(s.next()); - LOGGER.info("Details of " + topicName + " : " + dataObj.toString()); - s.close(); - } - - public void testdeleteTopic() { - LOGGER.info("test case delete topic name " + topicName); - createTopic(topicName); - target = client.target(url); - target = target.path("/topics/"); - target = target.path(topicName); - Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) - .header("X-CambriaDate", date).delete(); - keyInstance.assertStatus(response); - LOGGER.info("Successfully returned after deleting topic" + topicName); - } - - public void testAllTopic() { - LOGGER.info("test case fetch all topic"); - target = client.target(url); - target = target.path("/topics"); - Response response = target.request().get(); - keyInstance.assertStatus(response); - LOGGER.info("successfully returned after fetching all the topic"); - InputStream is = (InputStream) response.getEntity(); - Scanner s = new Scanner(is); - s.useDelimiter("\\A"); - JSONObject dataObj = new JSONObject(s.next()); - s.close(); - LOGGER.info("List of all topics " + dataObj.toString()); - } - - public void testPublisherForTopic() { - LOGGER.info("test case get all publishers for topic: " + topicName); - // creating topic to check - createTopic(topicName); - target = client.target(url); - target = target.path("/topics/"); - target = target.path(topicName); - target = target.path("/producers"); - // checking all producer for a particular topic - Response response = target.request().get(); - keyInstance.assertStatus(response); - LOGGER.info("Successfully returned after getting all the publishers" + topicName); - } - - public void testPermitPublisherForTopic() { - LOGGER.info("test case permit user for topic " + topicName); - JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to ")); - String userapikey = (String) jsonObj.get("key"); - createTopic(topicName); - // adding user to a topic// - target = client.target(url); - target = target.path("/topics/"); - target = target.path(topicName); - target = target.path("/producers/"); - target = target.path(userapikey); - Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) - .header("X-CambriaDate", date).put(Entity.json("")); - keyInstance.assertStatus(response); - LOGGER.info("successfully returned after permiting the user for topic " + topicName); - } - - public void testDenyPublisherForTopic() { - LOGGER.info("test case denying user for topic " + topicName); - JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to ")); - String userapikey = (String) jsonObj.get("key"); - createTopic(topicName); - // adding user to a topic// - target = client.target(url); - target = target.path("/topics/"); - target = target.path(topicName); - target = target.path("/producers/"); - target = target.path(userapikey); - target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) - .header("X-CambriaDate", date).put(Entity.json("")); - // deleting user who is just added// - target = client.target(url); - target = target.path("/topics/"); - target = target.path(topicName); - target = target.path("/producers/"); - target = target.path(userapikey); - Response response2 = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) - .header("X-CambriaDate", date).delete(); - keyInstance.assertStatus(response2); - LOGGER.info("successfully returned after denying the user for topic " + topicName); - } - - public void testConsumerForTopic() { - LOGGER.info("test case get all consumers for topic: " + topicName); - // creating topic to check - createTopic(topicName); - target = client.target(url); - target = target.path("/topics/"); - target = target.path(topicName); - target = target.path("/consumers"); - // checking all consumer for a particular topic - Response response = target.request().get(); - keyInstance.assertStatus(response); - LOGGER.info("Successfully returned after getting all the consumers" + topicName); - } - - public void testPermitConsumerForTopic() { - LOGGER.info("test case get all consumer for topic: " + topicName); - // creating user for adding to topic// - JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to ")); - String userapikey = (String) jsonObj.get("key"); - createTopic(topicName); - // adding user to a topic// - target = client.target(url); - target = target.path("/topics/"); - target = target.path(topicName); - target = target.path("/consumers/"); - target = target.path(userapikey); - Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) - .header("X-CambriaDate", date).put(Entity.json("")); - keyInstance.assertStatus(response); - LOGGER.info("Successfully returned after getting all the consumers" + topicName); - } - - public void testDenyConsumerForTopic() { - LOGGER.info("test case denying consumer for topic " + topicName); - // creating user for adding and deleting from topic// - JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to ")); - String userapikey = (String) jsonObj.get("key"); - createTopic(topicName); - // adding user to a topic// - target = client.target(url); - target = target.path("/topics/"); - target = target.path(topicName); - target = target.path("/consumers/"); - target = target.path(userapikey); - target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) - .header("X-CambriaDate", date).put(Entity.json("")); - // deleting user who is just added// - target = client.target(url); - target = target.path("/topics/"); - target = target.path(topicName); - target = target.path("/consumers/"); - target = target.path(userapikey); - Response response2 = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) - .header("X-CambriaDate", date).delete(); - keyInstance.assertStatus(response2); - LOGGER.info("successfully returned after denying the consumer for topic " + topicName); - }*/ -} diff --git a/src/test/java/com/att/mr/test/dmaap/DmaapAdminTest.java b/src/test/java/com/att/mr/test/dmaap/DmaapAdminTest.java deleted file mode 100644 index f3e7fa9..0000000 --- a/src/test/java/com/att/mr/test/dmaap/DmaapAdminTest.java +++ /dev/null @@ -1,60 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dmaap; - - - -public class DmaapAdminTest { - /*private static final Logger LOGGER = Logger.getLogger(DmaapAdminTest.class); - Client client = ClientBuilder.newClient(); - WebTarget target = client.target(LoadPropertyFile.getPropertyFileData().getProperty("url")); - - - public void assertStatus(Response response) { - assertTrue(response.getStatus() == HttpStatus.SC_OK); - } - - // 1.get consumer cache - public void testConsumerCache() { - LOGGER.info("test case consumer cache"); - target = target.path("/admin/consumerCache"); - Response response = target.request().get(); - assertStatus(response); - LOGGER.info("Successfully returned after fetching consumer cache"); - InputStream is = (InputStream) response.getEntity(); - Scanner s = new Scanner(is); - s.useDelimiter("\\A"); - String data = s.next(); - s.close(); - LOGGER.info("Details of consumer cache :" + data); - } - - // 2.drop consumer cache - public void testDropConsumerCache() { - LOGGER.info("test case drop consumer cache"); - target = target.path("/admin/dropConsumerCache"); - Response response = target.request().post(Entity.json(null)); - assertStatus(response); - LOGGER.info("Successfully returned after dropping consumer cache"); - } -*/ -} diff --git a/src/test/java/com/att/mr/test/dmaap/DmaapApiKeyTest.java b/src/test/java/com/att/mr/test/dmaap/DmaapApiKeyTest.java deleted file mode 100644 index 77a2500..0000000 --- a/src/test/java/com/att/mr/test/dmaap/DmaapApiKeyTest.java +++ /dev/null @@ -1,162 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dmaap; - -import java.io.InputStream; -import java.util.Properties; -import java.util.Scanner; - -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; -import javax.ws.rs.client.Entity; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.Response; - -import junit.framework.TestCase; - -import org.apache.log4j.Logger; -import org.apache.http.HttpStatus; -import org.json.JSONObject; - -import com.att.nsa.drumlin.till.data.sha1HmacSigner; - -public class DmaapApiKeyTest { - /* - private static final Logger LOGGER = Logger.getLogger(DmaapApiKeyTest.class); - Client client = ClientBuilder.newClient(); - Properties prop = LoadPropertyFile.getPropertyFileData(); - String url = prop.getProperty("url"); - WebTarget target = client.target(url); - String date = prop.getProperty("date"); - - - public JSONObject returnKey(ApiKeyBean apikeybean) { - LOGGER.info("Call to return newly created key"); - target = client.target(url); - target = target.path("/apiKeys/create"); - Response response = target.request().post(Entity.json(apikeybean)); - assertStatus(response); - LOGGER.info("successfully created keys"); - InputStream is = (InputStream) response.getEntity(); - Scanner s = new Scanner(is); - s.useDelimiter("\\A"); - JSONObject dataObj = new JSONObject(s.next()); - s.close(); - LOGGER.info("key details :" + dataObj.toString()); - return dataObj; - } - - // 1. create key - public void testCreateKey() { - LOGGER.info("test case create key"); - ApiKeyBean apiKeyBean = new ApiKeyBean("nm254w@att.com", "Creating Api Key."); - returnKey(apiKeyBean); - LOGGER.info("Successfully returned after creating key"); - } - - public void assertStatus(Response response) { - assertTrue(response.getStatus() == HttpStatus.SC_OK); - } - - // 2. get Allkey details - public void testAllKey() { - LOGGER.info("test case get all key"); - target = target.path("/apiKeys"); - Response response = target.request().get(); - assertStatus(response); - LOGGER.info("successfully returned after get all key"); - InputStream is = (InputStream) response.getEntity(); - Scanner s = new Scanner(is); - s.useDelimiter("\\A"); - LOGGER.info("Details of key: " + s.next()); - s.close(); - - } - - // 3. get specific key - public void testSpecificKey() { - LOGGER.info("test case get specific key"); - String apiKey = ""; - ApiKeyBean apiKeyBean = new ApiKeyBean("ai039@att.com", "Creating Api Key."); - - apiKey = (String) returnKey(apiKeyBean).get("key"); - target = client.target(url); - target = target.path("/apiKeys/"); - target = target.path(apiKey); - Response response = target.request().get(); - assertStatus(response); - LOGGER.info("successfully returned after fetching specific key"); - } - - // 4. update key - - public void testUpdateKey() { - LOGGER.info("test case update key"); - String apiKey = ""; - String secretKey = ""; - final String serverCalculatedSignature; - final String X_CambriaAuth; - final String X_CambriaDate; - JSONObject jsonObj; - - ApiKeyBean apiKeyBean = new ApiKeyBean("ai039@att.com", "Creating Api Key for update"); - ApiKeyBean apiKeyBean1 = new ApiKeyBean("ai03911@att.com", "updating Api Key."); - jsonObj = returnKey(apiKeyBean); - apiKey = (String) jsonObj.get("key"); - secretKey = (String) jsonObj.get("secret"); - - serverCalculatedSignature = sha1HmacSigner.sign(date, secretKey); - X_CambriaAuth = apiKey + ":" + serverCalculatedSignature; - X_CambriaDate = date; - target = client.target(url); - target = target.path("/apiKeys/" + apiKey); - Response response1 = target.request().header("X-CambriaAuth", X_CambriaAuth) - .header("X-CambriaDate", X_CambriaDate).put(Entity.json(apiKeyBean1)); - assertStatus(response1); - LOGGER.info("successfully returned after updating key"); - } - - // 5. delete key - public void testDeleteKey() { - LOGGER.info("test case delete key"); - String apiKey = ""; - String secretKey = ""; - final String serverCalculatedSignature; - final String X_CambriaAuth; - final String X_CambriaDate; - JSONObject jsonObj; - ApiKeyBean apiKeyBean = new ApiKeyBean("ai039@att.com", "Creating Api Key."); - jsonObj = returnKey(apiKeyBean); - apiKey = (String) jsonObj.get("key"); - secretKey = (String) jsonObj.get("secret"); - serverCalculatedSignature = sha1HmacSigner.sign(date, secretKey); - X_CambriaAuth = apiKey + ":" + serverCalculatedSignature; - X_CambriaDate = date; - target = client.target(url); - target = target.path("/apiKeys/" + apiKey); - Response response2 = target.request().header("X-CambriaAuth", X_CambriaAuth) - .header("X-CambriaDate", X_CambriaDate).delete(); - assertStatus(response2); - LOGGER.info("successfully returned after deleting key"); - } -*/ -} \ No newline at end of file diff --git a/src/test/java/com/att/mr/test/dmaap/DmaapMetricsTest.java b/src/test/java/com/att/mr/test/dmaap/DmaapMetricsTest.java deleted file mode 100644 index 9728687..0000000 --- a/src/test/java/com/att/mr/test/dmaap/DmaapMetricsTest.java +++ /dev/null @@ -1,77 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dmaap; - -import java.io.InputStream; -import java.util.Scanner; - -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.Response; - -import junit.framework.TestCase; - -import org.apache.log4j.Logger; -import org.apache.http.HttpStatus; - -public class DmaapMetricsTest { - /*private static final Logger LOGGER = Logger.getLogger(DmaapMetricsTest.class); - Client client = ClientBuilder.newClient(); - WebTarget target = client.target(LoadPropertyFile.getPropertyFileData().getProperty("url")); - - public void assertStatus(Response response) { - assertTrue(response.getStatus() == HttpStatus.SC_OK); - } - - - // 1.get metrics - public void testMetrics() { - LOGGER.info("test case get all metrics"); - target = target.path("/metrics"); - Response response = target.request().get(); - assertStatus(response); - LOGGER.info("successfully returned after fetching all metrics"); - InputStream is = (InputStream) response.getEntity(); - Scanner s = new Scanner(is); - s.useDelimiter("\\A"); - String data = s.next(); - s.close(); - LOGGER.info("DmaapMetricTest Test all metrics" + data); - } - - // 2.get metrics by name - public void testMetricsByName() { - LOGGER.info("test case get metrics by name"); - target = target.path("/metrics/startTime"); - Response response = target.request().get(); - assertStatus(response); - LOGGER.info("successfully returned after fetching specific metrics"); - InputStream is = (InputStream) response.getEntity(); - Scanner s = new Scanner(is); - s.useDelimiter("\\A"); - String data = s.next(); - s.close(); - LOGGER.info("DmaapMetricTest metrics by name" + data); - } -*/ -} diff --git a/src/test/java/com/att/mr/test/dmaap/JUnitTestSuite.java b/src/test/java/com/att/mr/test/dmaap/JUnitTestSuite.java deleted file mode 100644 index 40a5286..0000000 --- a/src/test/java/com/att/mr/test/dmaap/JUnitTestSuite.java +++ /dev/null @@ -1,44 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dmaap; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ DMaapPubSubTest.class, DmaapApiKeyTest.class, DMaapTopicTest.class, DmaapMetricsTest.class, - DmaapAdminTest.class }) -public class JUnitTestSuite { - /*private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class); - - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - }*/ - -} diff --git a/src/test/java/com/att/mr/test/dmaap/LoadPropertyFile.java b/src/test/java/com/att/mr/test/dmaap/LoadPropertyFile.java deleted file mode 100644 index c9d0387..0000000 --- a/src/test/java/com/att/mr/test/dmaap/LoadPropertyFile.java +++ /dev/null @@ -1,48 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dmaap; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Properties; - -import org.apache.log4j.Logger; - -public class LoadPropertyFile { - /*private static final Logger LOGGER = Logger - .getLogger(LoadPropertyFile.class); - - static public Properties getPropertyFileData() { - Properties prop = new Properties(); - LOGGER.info("loading the property file"); - - try { - InputStream inputStream = LoadPropertyFile.class.getClassLoader() - .getResourceAsStream("DMaaPUrl.properties"); - prop.load(inputStream); - LOGGER.info("successfully loaded the property file"); - } catch (IOException e) { - LOGGER.error("Error while retrieving API keys: " + e); - } - return prop; - }*/ -} diff --git a/src/test/java/com/att/mr/test/dmaap/TestRunner.java b/src/test/java/com/att/mr/test/dmaap/TestRunner.java deleted file mode 100644 index 9a744d9..0000000 --- a/src/test/java/com/att/mr/test/dmaap/TestRunner.java +++ /dev/null @@ -1,42 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dmaap; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - /*private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - } - LOGGER.info(result.wasSuccessful()); - } -*/ -} diff --git a/src/test/java/com/att/mr/test/dmaap/TopicBean.java b/src/test/java/com/att/mr/test/dmaap/TopicBean.java deleted file mode 100644 index 62f9d0c..0000000 --- a/src/test/java/com/att/mr/test/dmaap/TopicBean.java +++ /dev/null @@ -1,72 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -/** - * - */ -package com.att.mr.test.dmaap; - -import java.io.Serializable; - -public class TopicBean implements Serializable { - - /* - * private static final long serialVersionUID = -8620390377775457949L; - * private String topicName; private String description; - * - * - * private int partitionCount; private int replicationCount; private boolean - * transactionEnabled = false; - * - * public boolean isTransactionEnabled() { return transactionEnabled; } - * - * public void setTransactionEnabled(boolean transactionEnabled) { - * this.transactionEnabled = transactionEnabled; } - * - * public TopicBean() { super(); } - * - * public TopicBean(String topicName, String description, int - * partitionCount, int replicationCount, boolean transactionEnabled) { - * super(); this.topicName = topicName; this.description = description; - * this.partitionCount = partitionCount; this.replicationCount = - * replicationCount; this.transactionEnabled = transactionEnabled; } - * - * public String getTopicName() { return topicName; } - * - * public void setTopicName(String topicName) { this.topicName = topicName; - * } - * - * public String getDescription() { return description; } - * - * public void setDescription(String description) { this.description = - * description; } - * - * public int getPartitionCount() { return partitionCount; } - * - * public void setPartitionCount(int partitionCount) { this.partitionCount = - * partitionCount; } - * - * public int getReplicationCount() { return replicationCount; } - * - * public void setReplicationCount(int replicationCount) { - * this.replicationCount = replicationCount; } - */ -} diff --git a/src/test/java/com/att/mr/test/dme2/ApiKeyBean.java b/src/test/java/com/att/mr/test/dme2/ApiKeyBean.java deleted file mode 100644 index e6545e1..0000000 --- a/src/test/java/com/att/mr/test/dme2/ApiKeyBean.java +++ /dev/null @@ -1,72 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import java.io.Serializable; - -public class ApiKeyBean implements Serializable { - - private static final long serialVersionUID = -8219849086890567740L; - - // private static final String KEY_CHARS = - // "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - - - private String email; - private String description; - - public ApiKeyBean() { - super(); - } - - public ApiKeyBean(String email, String description) { - super(); - this.email = email; - this.description = description; - } - - public String getEmail() { - return email; - } - - public void setEmail(String email) { - this.email = email; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - /* - * public String getKey() { return generateKey(16); } - * - * public String getSharedSecret() { return generateKey(24); } - * - * private static String generateKey ( int length ) { return - * uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length ); } - */ - -} diff --git a/src/test/java/com/att/mr/test/dme2/DME2AdminTest.java b/src/test/java/com/att/mr/test/dme2/DME2AdminTest.java deleted file mode 100644 index 5f6f9c4..0000000 --- a/src/test/java/com/att/mr/test/dme2/DME2AdminTest.java +++ /dev/null @@ -1,149 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Properties; - -import org.apache.log4j.Logger; - -import junit.framework.TestCase; - -import com.att.aft.dme2.api.DME2Client; -import com.att.aft.dme2.api.DME2Exception; -import com.att.mr.test.dmaap.DmaapAdminTest; -//import com.ibm.disthub2.impl.client.PropSchema; - -public class DME2AdminTest extends TestCase { - private static final Logger LOGGER = Logger.getLogger(DME2AdminTest.class); - - protected String url; - - protected Properties props; - - protected HashMap hm; - - protected String methodType; - - protected String contentType; - - protected String user; - - protected String password; - - @Override - protected void setUp() throws Exception { - super.setUp(); - System.setProperty("AFT_DME2_CLIENT_SSL_INCLUDE_PROTOCOLS", "SSLv3,TLSv1,TLSv1.1"); - System.setProperty("AFT_DME2_CLIENT_IGNORE_SSL_CONFIG", "false"); - System.setProperty("AFT_DME2_CLIENT_KEYSTORE_PASSWORD", "changeit"); - this.props = LoadPropertyFile.getPropertyFileDataProducer(); - String latitude = props.getProperty("Latitude"); - String longitude = props.getProperty("Longitude"); - String version = props.getProperty("Version"); - String serviceName = props.getProperty("ServiceName"); - serviceName = "dmaap-v1.dev.dmaap.dt.saat.acsi.att.com/admin"; - String env = props.getProperty("Environment"); - String partner = props.getProperty("Partner"); - String protocol = props.getProperty("Protocol"); - - methodType = props.getProperty("MethodTypeGet"); - contentType = props.getProperty("contenttype"); - user = props.getProperty("user"); - password = props.getProperty("password"); - - - this.url = protocol + "://" + serviceName + "?" + "version=" + version + "&" + "envContext=" + env + "&" - + "routeOffer=" + partner + "&partner=BOT_R"; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - } - - public void testGetConsumerCache() { - LOGGER.info("test case consumer cache started"); - - String subContextPath = props.getProperty("SubContextPathGetAdminConsumerCache"); - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(methodType); - sender.setSubContext(subContextPath); - sender.setPayload(""); - sender.addHeader("Content-Type", contentType); - - sender.addHeader("X-CambriaAuth", "user1:7J49YriFlyRgebyOsSJhZvY/C60="); - sender.addHeader("X-X-CambriaDate", "2016-10-18T09:56:04-05:00"); - - //sender.setCredentials(user, password); - sender.setHeaders(hm); - LOGGER.info("Getting consumer Cache"); - String reply = sender.sendAndWait(5000L); - System.out.println(reply); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - assertNotNull(reply); - LOGGER.info("response from consumer cache=" + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - public void ttestDropConsumerCache() { - LOGGER.info("Drom consumer cache initiated"); - - String subContextPath = props.getProperty("SubContextPathDropAdminConsumerCache"); - - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(methodType); - sender.setSubContext(subContextPath); - sender.setPayload(""); - sender.addHeader("Content-Type", contentType); - sender.setCredentials(user, password); - sender.setHeaders(hm); - - LOGGER.info("Dropping consumer cache..........."); - String reply = sender.sendAndWait(5000L); - - // assertTrue(LoadPropertyFile.isValidJsonString(reply)); - assertNotNull(reply); - LOGGER.info("response =" + reply); - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - -} diff --git a/src/test/java/com/att/mr/test/dme2/DME2ApiKeyTest.java b/src/test/java/com/att/mr/test/dme2/DME2ApiKeyTest.java deleted file mode 100644 index bde61d5..0000000 --- a/src/test/java/com/att/mr/test/dme2/DME2ApiKeyTest.java +++ /dev/null @@ -1,229 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.Properties; - -import org.apache.log4j.Logger; -import org.json.JSONObject; - -import com.att.aft.dme2.api.DME2Client; -import com.att.aft.dme2.api.DME2Exception; -import com.att.aft.dme2.internal.jackson.map.ObjectMapper; - -import junit.framework.TestCase; - -public class DME2ApiKeyTest extends TestCase { - private static final Logger LOGGER = Logger.getLogger(DME2ApiKeyTest.class); - - protected String url; - - protected Properties props; - - @Override - protected void setUp() throws Exception { - super.setUp(); - System.setProperty("AFT_DME2_CLIENT_SSL_INCLUDE_PROTOCOLS", "SSLv3,TLSv1,TLSv1.1"); - System.setProperty("AFT_DME2_CLIENT_IGNORE_SSL_CONFIG", "false"); - System.setProperty("AFT_DME2_CLIENT_KEYSTORE_PASSWORD", "changeit"); - this.props = LoadPropertyFile.getPropertyFileDataProducer(); - String latitude = props.getProperty("Latitude"); - String longitude = props.getProperty("Longitude"); - String version = props.getProperty("Version"); - String serviceName = props.getProperty("ServiceName"); - String env = props.getProperty("Environment"); - String partner = props.getProperty("Partner"); - String protocol = props.getProperty("Protocol"); - this.url = protocol + "://" + serviceName + "?" + "version=" + version + "&" + "envContext=" + env + "&" - + "routeOffer=" + partner + "&partner=BOT_R"; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - } - - public void testCreateKey() { - LOGGER.info("Create Key test case initiated"); - - ApiKeyBean apiKeyBean = new ApiKeyBean("user1@us.att.com", "Creating Api Key.m"); - - System.out.println(url); - - returnKey(apiKeyBean, url, props); - - } - - public String returnKey(ApiKeyBean apibean, String url, Properties props) { - - String reply = null; - try { - LOGGER.info("Call to return key "); - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(props.getProperty("MethodTypePost")); - sender.setSubContext(props.getProperty("SubContextPathGetCreateKeys")); - String jsonStringApiBean = new ObjectMapper().writeValueAsString(apibean); - sender.setPayload(jsonStringApiBean); - sender.addHeader("content-type", props.getProperty("contenttype")); - sender.setCredentials(props.getProperty("user"), props.getProperty("password")); - LOGGER.info("creating ApiKey"); - reply = sender.sendAndWait(5000L); - System.out.println("reply: " + reply); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - LOGGER.info("response =" + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - return reply; - } - - public void testGetAllKey() { - LOGGER.info("Test case Get All key initiated...."); - try { - DME2Client sender = new DME2Client(new URI(this.url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(this.props.getProperty("MethodTypeGet")); - String subcontextPath = this.props.getProperty("SubContextPathGetApiKeys"); - // sender.setSubContext(subcontextPath); - sender.setPayload(""); - sender.addHeader("content-type", props.getProperty("contenttype")); - sender.setCredentials(props.getProperty("user"), props.getProperty("password")); - LOGGER.info("Fetching all keys"); - String reply = sender.sendAndWait(5000L); - System.out.println(reply); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - LOGGER.info("response =" + reply); - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - public void testGetOneKey() {/* - LOGGER.info("Test case get one key initiated"); - ApiKeyBean apiKeyBean = new ApiKeyBean("user1@att.com", "Creating Api Key.m"); - JSONObject jsonObj = new JSONObject(returnKey(apiKeyBean, url, props)); - String apiKey = (String) jsonObj.get("key"); - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(props.getProperty("MethodTypeGet")); - sender.setSubContext(props.getProperty("SubContextPathGetOneKey") + apiKey); - sender.setPayload(""); - sender.addHeader("content-type", props.getProperty("contenttype")); - sender.setCredentials(props.getProperty("user"), props.getProperty("password")); - - LOGGER.info("Fetching details of api key: " + apiKey); - String reply = sender.sendAndWait(5000L); - System.out.println(reply); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - LOGGER.info("response =" + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - */} - - // ............. test case update key is not applicable in - // DME2.................// - public void testUpdateKey() {/* - ApiKeyBean apiKeyBean = new ApiKeyBean("user1@att.com", "Creating Api Key.m"); - - JSONObject jsonObj = new JSONObject(returnKey(apiKeyBean, url, props)); - String apiKey = (String) jsonObj.get("key"); - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - String p = props.getProperty("MethodTypePut"); - sender.setMethod(p); - String s = props.getProperty("SubContextPathUpdateKeys") + apiKey; - sender.setSubContext(s); - - String jsonStringApiBean = new ObjectMapper() - .writeValueAsString(new ApiKeyBean("user1@att.com", "updating key")); - sender.setPayload(jsonStringApiBean); - System.out.println(jsonStringApiBean); - String c = props.getProperty("contenttype"); - sender.addHeader("content-type", c); - sender.setCredentials(props.getProperty("keyUser"), props.getProperty("keyPass")); - - System.out.println("creating ApiKey"); - String reply = sender.sendAndWait(5000L); - assertNotNull(reply); - System.out.println("response =" + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - */} - - // ............. test case delete key is not applicable in - // DME2.................// - - public void testDeleteKey() {/* - ApiKeyBean apiKeyBean = new ApiKeyBean("user1@att.com", "Creating Api Key.m"); - - JSONObject jsonObj = new JSONObject(returnKey(apiKeyBean, url, props)); - String apiKey = (String) jsonObj.get("key"); - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - String p = props.getProperty("MethodTypeDelete"); - sender.setMethod(p); - String s = props.getProperty("SubContextPathDeleteteKeys") + apiKey; - sender.setSubContext(s); - - sender.setPayload(""); // System.out.println(jsonStringApiBean); - String c = props.getProperty("contenttype"); - sender.addHeader("content-type", c); - sender.setCredentials(props.getProperty("keyUser"), props.getProperty("keyPass")); - - System.out.println("creating ApiKey"); - String reply = sender.sendAndWait(5000L); - assertNotNull(reply); - System.out.println("response =" + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - */} - -} diff --git a/src/test/java/com/att/mr/test/dme2/DME2ConsumerFilterTest.java b/src/test/java/com/att/mr/test/dme2/DME2ConsumerFilterTest.java deleted file mode 100644 index 30c13ea..0000000 --- a/src/test/java/com/att/mr/test/dme2/DME2ConsumerFilterTest.java +++ /dev/null @@ -1,97 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URLEncoder; -import java.util.HashMap; -import java.util.Properties; - -import org.apache.log4j.Logger; - -import junit.framework.TestCase; - -import com.att.aft.dme2.api.DME2Client; -import com.att.aft.dme2.api.DME2Exception; -import com.att.mr.test.dmaap.DmaapAdminTest; - -public class DME2ConsumerFilterTest extends TestCase { - private static final Logger LOGGER = Logger.getLogger(DME2ConsumerFilterTest.class); - - public void testConsumerFilter() { - LOGGER.info("Test case consumer filter initiated"); - - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - String latitude = props.getProperty("Latitude"); - String longitude = props.getProperty("Longitude"); - String version = props.getProperty("Version"); - String serviceName = props.getProperty("ServiceName"); - String env = props.getProperty("Environment"); - String partner = props.getProperty("Partner"); - String protocol = props.getProperty("Protocol"); - String methodType = props.getProperty("MethodTypeGet"); - String user = props.getProperty("user"); - String password = props.getProperty("password"); - String contenttype = props.getProperty("contenttype"); - - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - - try { - // ..checking for topic exist is commented - // if (!topicTestObj.topicExist(url, props, hm)) { - // throw new Exception("Topic does not exist"); - // } else { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(methodType); - String subContextPathConsumer = props.getProperty("SubContextPathConsumer") + props.getProperty("newTopic") - + "/" + props.getProperty("group") + "/" + props.getProperty("id") + "?" - + props.getProperty("filterType"); - - sender.setSubContext(URLEncoder.encode(subContextPathConsumer, "UTF-8")); - sender.setPayload(""); - - sender.addHeader("Content-Type", contenttype); - sender.setCredentials(user, password); - sender.setHeaders(hm); - - LOGGER.info("Consuming Message for Filter"); - String reply = sender.sendAndWait(5000L); - assertNotNull(reply); - LOGGER.info("Message received = " + reply); - // } - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } -} diff --git a/src/test/java/com/att/mr/test/dme2/DME2ConsumerTest.java b/src/test/java/com/att/mr/test/dme2/DME2ConsumerTest.java deleted file mode 100644 index 810f451..0000000 --- a/src/test/java/com/att/mr/test/dme2/DME2ConsumerTest.java +++ /dev/null @@ -1,95 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Properties; - -import org.apache.log4j.Logger; - -import junit.framework.TestCase; - -import com.att.aft.dme2.api.DME2Client; -import com.att.aft.dme2.api.DME2Exception; -import com.att.mr.test.dmaap.DmaapAdminTest; - -public class DME2ConsumerTest extends TestCase { - private static final Logger LOGGER = Logger.getLogger(DME2ConsumerTest.class); - - public void testConsumer() { - LOGGER.info("Test case subcribing initiated"); - - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - String latitude = props.getProperty("Latitude"); - String longitude = props.getProperty("Longitude"); - String version = props.getProperty("Version"); - String serviceName = props.getProperty("ServiceName"); - String env = props.getProperty("Environment"); - String partner = props.getProperty("Partner"); - String protocol = props.getProperty("Protocol"); - String methodType = props.getProperty("MethodTypeGet"); - String user = props.getProperty("user"); - String password = props.getProperty("password"); - String contenttype = props.getProperty("contenttype"); - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - try { - - // topic exist logic is commented - // if (!topicTestObj.topicExist(url, props, hm)) { - // throw new Exception("Topic does not exist"); - // } else { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(methodType); - String subContextPathConsumer = props.getProperty("SubContextPathConsumer") + props.getProperty("newTopic") - + "/" + props.getProperty("group") + "/" + props.getProperty("id"); - sender.setSubContext(subContextPathConsumer); - sender.setPayload(""); - - sender.addHeader("Content-Type", contenttype); - sender.setCredentials(user, password); - sender.setHeaders(hm); - - LOGGER.info("Consuming Message"); - String reply = sender.sendAndWait(5000L); - - assertNotNull(reply); - LOGGER.info("Message received = " + reply); - // } - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - -} diff --git a/src/test/java/com/att/mr/test/dme2/DME2MetricsTest.java b/src/test/java/com/att/mr/test/dme2/DME2MetricsTest.java deleted file mode 100644 index e7ccebe..0000000 --- a/src/test/java/com/att/mr/test/dme2/DME2MetricsTest.java +++ /dev/null @@ -1,133 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Properties; - -import org.apache.log4j.Logger; - -import junit.framework.TestCase; - -import com.att.aft.dme2.api.DME2Client; -import com.att.aft.dme2.api.DME2Exception; -import com.att.mr.test.dmaap.DmaapAdminTest; - -public class DME2MetricsTest extends TestCase { - private static final Logger LOGGER = Logger.getLogger(DME2MetricsTest.class); - - public void testGetMetrics() { - LOGGER.info("Test case get metrics initiated..."); - - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - String latitude = props.getProperty("Latitude"); - String longitude = props.getProperty("Longitude"); - String version = props.getProperty("Version"); - String serviceName = props.getProperty("ServiceName"); - String env = props.getProperty("Environment"); - String partner = props.getProperty("Partner"); - String subContextPath = props.getProperty("SubContextPathGetMetrics"); - String protocol = props.getProperty("Protocol"); - String methodType = props.getProperty("MethodTypeGet"); - String user = props.getProperty("user"); - String password = props.getProperty("password"); - String contenttype = props.getProperty("contenttype"); - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(methodType); - sender.setSubContext(subContextPath); - sender.setPayload(""); - sender.addHeader("Content-Type", contenttype); - sender.setCredentials(user, password); - sender.setHeaders(hm); - - LOGGER.info("Getting Metrics Details"); - String reply = sender.sendAndWait(5000L); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - LOGGER.info("response =" + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - public void testGetMetricsByName() { - LOGGER.info("Test case get metrics by name initiated"); - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - String latitude = props.getProperty("Latitude"); - String longitude = props.getProperty("Longitude"); - String version = props.getProperty("Version"); - String serviceName = props.getProperty("ServiceName"); - String env = props.getProperty("Environment"); - String partner = props.getProperty("Partner"); - String subContextPath = props.getProperty("SubContextPathGetMetricsByName"); - String protocol = props.getProperty("Protocol"); - String methodType = props.getProperty("MethodTypeGet"); - String user = props.getProperty("user"); - String password = props.getProperty("password"); - String contenttype = props.getProperty("contenttype"); - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(methodType); - sender.setSubContext(subContextPath); - sender.setPayload(""); - sender.addHeader("Content-Type", contenttype); - sender.setCredentials(user, password); - sender.setHeaders(hm); - - LOGGER.info("Getting Metrics By name"); - String reply = sender.sendAndWait(5000L); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - LOGGER.info("response =" + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } -} diff --git a/src/test/java/com/att/mr/test/dme2/DME2ProducerTest.java b/src/test/java/com/att/mr/test/dme2/DME2ProducerTest.java deleted file mode 100644 index 71c5e0f..0000000 --- a/src/test/java/com/att/mr/test/dme2/DME2ProducerTest.java +++ /dev/null @@ -1,101 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Properties; - -import org.apache.log4j.Logger; - -import junit.framework.TestCase; - -import com.att.aft.dme2.api.DME2Client; -import com.att.aft.dme2.api.DME2Exception; -import com.att.aft.dme2.internal.jackson.map.ObjectMapper; -import com.att.mr.test.dmaap.DmaapAdminTest; - -public class DME2ProducerTest extends TestCase { - private static final Logger LOGGER = Logger.getLogger(DmaapAdminTest.class); - - public void testProducer() { - DME2TopicTest topicTestObj = new DME2TopicTest(); - - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - String latitude = props.getProperty("Latitude"); - String longitude = props.getProperty("Longitude"); - String version = props.getProperty("Version"); - String serviceName = props.getProperty("ServiceName"); - String env = props.getProperty("Environment"); - String partner = props.getProperty("Partner"); - String protocol = props.getProperty("Protocol"); - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - // checking whether topic exist or not - if (!topicTestObj.topicExist(url, props, hm)) { - // if topic doesn't exist then create the topic - topicTestObj.createTopic(url, props, hm); - // after creating the topic publish on that topic - publishMessage(url, props, hm); - } else { - // if topic already exist start publishing on the topic - publishMessage(url, props, hm); - } - - } - - public void publishMessage(String url, Properties props, HashMap mapData) { - try { - LOGGER.info("Call to publish message "); - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(props.getProperty("MethodTypePost")); - String subcontextpathPublish = props.getProperty("SubContextPathproducer") + props.getProperty("newTopic"); - sender.setSubContext(subcontextpathPublish); - String jsonStringApiBean = new ObjectMapper().writeValueAsString(new ApiKeyBean("example@att.com", - "description")); - sender.setPayload(jsonStringApiBean); - - sender.setCredentials(props.getProperty("user"), props.getProperty("password")); - sender.addHeader("content-type", props.getProperty("contenttype")); - LOGGER.info("Publishing message"); - String reply = sender.sendAndWait(5000L); - // assertTrue(LoadPropertyFile.isValidJsonString(reply)); - assertNotNull(reply); - LOGGER.info("response =" + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - - } -} diff --git a/src/test/java/com/att/mr/test/dme2/DME2TopicTest.java b/src/test/java/com/att/mr/test/dme2/DME2TopicTest.java deleted file mode 100644 index 9d26083..0000000 --- a/src/test/java/com/att/mr/test/dme2/DME2TopicTest.java +++ /dev/null @@ -1,546 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Properties; - -import javax.ws.rs.client.Entity; - -import org.apache.log4j.Logger; - -import junit.framework.TestCase; - -import com.att.aft.dme2.api.DME2Client; -import com.att.aft.dme2.api.DME2Exception; -import com.att.aft.dme2.internal.jackson.map.ObjectMapper; -import com.att.mr.test.dmaap.DmaapAdminTest; - -public class DME2TopicTest extends TestCase { - private String latitude; - private String longitude; - private String version; - private String serviceName; - private String env; - private String partner; - private String protocol; - private String methodTypeGet; - private String methodTypePost; - private String methodTypeDelete; - private String methodTypePut; - - private String user; - private String password; - private String contenttype; - private String subContextPathGetAllTopic; - private String subContextPathGetOneTopic; - private String SubContextPathCreateTopic; - private String SubContextPathGetPublisherl; - private String SubContextPathGetPublisher; - private String SubContextPathGetPermitPublisher; - private String SubContextPathGetConsumer; - private String SubContextPathGetPermitConsumer; - private static final Logger LOGGER = Logger.getLogger(DME2TopicTest.class); - - public void createTopic(String url, Properties props, HashMap mapData) { - LOGGER.info("create topic method starts"); - if (!topicExist(url, props, mapData)) { - LOGGER.info("creating a new topic"); - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(props.getProperty("MethodTypePost")); - sender.setSubContext(props.getProperty("SubContextPathCreateTopic")); - TopicBeanDME2 topicBean = new TopicBeanDME2(props.getProperty("newTopic"), - props.getProperty("topicDescription"), Integer.parseInt(props.getProperty("partition")), - Integer.parseInt(props.getProperty("replication")), Boolean.valueOf(props - .getProperty("txenabled"))); - String jsonStringApiBean = new ObjectMapper().writeValueAsString(topicBean); - sender.setPayload(jsonStringApiBean); - sender.addHeader("content-type", props.getProperty("contenttype")); - sender.setCredentials(props.getProperty("user"), props.getProperty("password")); - - LOGGER.info("creating Topic"); - String reply = sender.sendAndWait(5000L); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - LOGGER.info("response =" + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - } - - public boolean topicExist(String url, Properties props, HashMap mapData) { - boolean topicExist = false; - try { - LOGGER.info("Checking topic exists or not"); - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(props.getProperty("MethodTypeGet")); - String subcontextPath = props.getProperty("subContextPathGetOneTopic") + props.getProperty("newTopic"); - sender.setSubContext(subcontextPath); - sender.setPayload(""); - sender.addHeader("content-type", props.getProperty("contenttype")); - sender.setCredentials(props.getProperty("user"), props.getProperty("password")); - String reply = sender.sendAndWait(5000L); - topicExist = LoadPropertyFile.isValidJsonString(reply); - LOGGER.info("Topic exist =" + topicExist); - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - return topicExist; - } - - public void testAllTopics() { - LOGGER.info("Test case get all topics initiated"); - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - latitude = props.getProperty("Latitude"); - longitude = props.getProperty("Longitude"); - version = props.getProperty("Version"); - serviceName = props.getProperty("ServiceName"); - env = props.getProperty("Environment"); - partner = props.getProperty("Partner"); - subContextPathGetAllTopic = props.getProperty("subContextPathGetAllTopic"); - protocol = props.getProperty("Protocol"); - methodTypeGet = props.getProperty("MethodTypeGet"); - user = props.getProperty("user"); - password = props.getProperty("password"); - contenttype = props.getProperty("contenttype"); - - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); // } else { - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(methodTypeGet); - sender.setSubContext(subContextPathGetAllTopic); - sender.setPayload(""); - - sender.addHeader("Content-Type", contenttype); - sender.setCredentials(user, password); - sender.setHeaders(hm); - - LOGGER.info("Retrieving all topics"); - String reply = sender.sendAndWait(5000L); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - LOGGER.info("All Topics details = " + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - public void testOneTopic() { - LOGGER.info("Test case get one topic initiated"); - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - latitude = props.getProperty("Latitude"); - longitude = props.getProperty("Longitude"); - version = props.getProperty("Version"); - serviceName = props.getProperty("ServiceName"); - env = props.getProperty("Environment"); - partner = props.getProperty("Partner"); - subContextPathGetOneTopic = props.getProperty("subContextPathGetOneTopic"); - protocol = props.getProperty("Protocol"); - methodTypeGet = props.getProperty("MethodTypeGet"); - user = props.getProperty("user"); - password = props.getProperty("password"); - contenttype = props.getProperty("contenttype"); - - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - System.out.println("Retrieving topic detail"); - if (!topicExist(url, props, hm)) { - createTopic(url, props, hm); - } else { - assertTrue(true); - } - } - - public void createTopicForDeletion(String url, Properties props, HashMap mapData) { - LOGGER.info("create topic method starts"); - - LOGGER.info("creating a new topic for deletion"); - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(props.getProperty("MethodTypePost")); - sender.setSubContext(props.getProperty("SubContextPathCreateTopic")); - TopicBeanDME2 topicBean = new TopicBeanDME2(props.getProperty("deleteTopic"), - props.getProperty("topicDescription"), Integer.parseInt(props.getProperty("partition")), - Integer.parseInt(props.getProperty("replication")), Boolean.valueOf(props.getProperty("txenabled"))); - String jsonStringApiBean = new ObjectMapper().writeValueAsString(topicBean); - sender.setPayload(jsonStringApiBean); - sender.addHeader("content-type", props.getProperty("contenttype")); - sender.setCredentials(props.getProperty("user"), props.getProperty("password")); - - LOGGER.info("creating Topic"); - String reply = sender.sendAndWait(5000L); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - LOGGER.info("response =" + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - - } - - public boolean topicExistForDeletion(String url, Properties props, HashMap mapData) { - boolean topicExist = false; - try { - LOGGER.info("Checking topic exists for deletion"); - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(props.getProperty("MethodTypeGet")); - String subcontextPath = props.getProperty("subContextPathGetOneTopic") + props.getProperty("deleteTopic"); - sender.setSubContext(subcontextPath); - sender.setPayload(""); - sender.addHeader("content-type", props.getProperty("contenttype")); - sender.setCredentials(props.getProperty("user"), props.getProperty("password")); - String reply = sender.sendAndWait(5000L); - topicExist = LoadPropertyFile.isValidJsonString(reply); - LOGGER.info("Topic exist for deletion=" + topicExist); - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - return topicExist; - } - - public void testDeleteTopic() { - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - latitude = props.getProperty("Latitude"); - longitude = props.getProperty("Longitude"); - version = props.getProperty("Version"); - serviceName = props.getProperty("ServiceName"); - env = props.getProperty("Environment"); - partner = props.getProperty("Partner"); - SubContextPathCreateTopic = props.getProperty("SubContextPathCreateTopic"); - protocol = props.getProperty("Protocol"); - methodTypePost = props.getProperty("MethodTypeDelete"); - user = props.getProperty("user"); - password = props.getProperty("password"); - contenttype = props.getProperty("contenttypejson"); - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - System.out.println("deleteing topic"); - if (!topicExistForDeletion(url, props, hm)) { - createTopicForDeletion(url, props, hm); - deleteTopic(url, props, hm); - } else { - deleteTopic(url, props, hm); - } - } - - public void deleteTopic(String url, Properties props, HashMap mapData) { - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(props.getProperty("MethodTypeDelete")); - String subsontextPathDelete = props.getProperty("subContextPathGetOneTopic") - + props.getProperty("deleteTopic"); - sender.setSubContext(subsontextPathDelete); - sender.setPayload(""); - sender.addHeader("content-type", props.getProperty("contenttype")); - sender.setCredentials(props.getProperty("user"), props.getProperty("password")); - System.out.println("Deleting Topic " + props.getProperty("deleteTopic")); - String reply = sender.sendAndWait(5000L); - assertNotNull(reply); - System.out.println("response =" + reply); - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - public void testGetProducersTopics() { - LOGGER.info("Test case get list of producers on topic"); - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - latitude = props.getProperty("Latitude"); - longitude = props.getProperty("Longitude"); - version = props.getProperty("Version"); - serviceName = props.getProperty("ServiceName"); - env = props.getProperty("Environment"); - partner = props.getProperty("Partner"); - SubContextPathGetPublisher = props.getProperty("SubContextPathGetPublisher"); - protocol = props.getProperty("Protocol"); - methodTypeGet = props.getProperty("MethodTypeGet"); - user = props.getProperty("user"); - password = props.getProperty("password"); - contenttype = props.getProperty("contenttype"); - - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(methodTypeGet); - sender.setSubContext(SubContextPathGetPublisher); - sender.setPayload(""); - - sender.addHeader("Content-Type", contenttype); - sender.setCredentials(user, password); - sender.setHeaders(hm); - - LOGGER.info("Retrieving List of publishers"); - String reply = sender.sendAndWait(5000L); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - LOGGER.info("All Publishers details = " + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - // permitting a producer on topic is not applicable - // public void testPermitProducersTopics() { - // LOGGER.info("Test case "); - // Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - // latitude = props.getProperty("Latitude"); - // longitude = props.getProperty("Longitude"); - // version = props.getProperty("Version"); - // serviceName = props.getProperty("ServiceName"); - // env = props.getProperty("Environment"); - // partner = props.getProperty("Partner"); - // SubContextPathGetPermitPublisher = - // props.getProperty("SubContextPathGetPermitPublisher"); - // protocol = props.getProperty("Protocol"); - // methodTypePut = props.getProperty("MethodTypePut"); - // user = props.getProperty("user"); - // password = props.getProperty("password"); - // contenttype = props.getProperty("contenttype"); - // - // String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" - // + "version=" + version + "/" - // + "envContext=" + env + "/" + "partner=" + partner; - // LoadPropertyFile.loadAFTProperties(latitude, longitude); - // - // HashMap hm = new HashMap(); - // hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - // hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - // hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - // - // try { - // DME2Client sender = new DME2Client(new URI(url), 5000L); - // sender.setAllowAllHttpReturnCodes(true); - // sender.setMethod(methodTypePut); - // sender.setSubContext(SubContextPathGetPermitPublisher); - // sender.setPayload(""); - // - // sender.addHeader("Content-Type", contenttype); - // sender.setCredentials(user, password); - // sender.setHeaders(hm); - // - // System.out.println("Permitting a producer on topic"); - // String reply = sender.sendAndWait(5000L); - // System.out.println("Reply from server = " + reply); - // - // } catch (DME2Exception e) { - // e.printStackTrace(); - // } catch (URISyntaxException e) { - // e.printStackTrace(); - // } catch (Exception e) { - // e.printStackTrace(); - // } - // } - - public void testGetConsumersTopics() { - LOGGER.info("Test case get list of consumers on topic "); - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - latitude = props.getProperty("Latitude"); - longitude = props.getProperty("Longitude"); - version = props.getProperty("Version"); - serviceName = props.getProperty("ServiceName"); - env = props.getProperty("Environment"); - partner = props.getProperty("Partner"); - SubContextPathGetConsumer = props.getProperty("SubContextPathGetConsumer"); - protocol = props.getProperty("Protocol"); - methodTypeGet = props.getProperty("MethodTypeGet"); - user = props.getProperty("user"); - password = props.getProperty("password"); - contenttype = props.getProperty("contenttype"); - - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - - try { - DME2Client sender = new DME2Client(new URI(url), 5000L); - sender.setAllowAllHttpReturnCodes(true); - sender.setMethod(methodTypeGet); - sender.setSubContext(SubContextPathGetConsumer); - sender.setPayload(""); - - sender.addHeader("Content-Type", contenttype); - sender.setCredentials(user, password); - sender.setHeaders(hm); - - LOGGER.info("Retrieving consumer details on topics"); - String reply = sender.sendAndWait(5000L); - assertTrue(LoadPropertyFile.isValidJsonString(reply)); - System.out.println("Reply from server = " + reply); - - } catch (DME2Exception e) { - e.printStackTrace(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - public void testCreateTopic() { - LOGGER.info("Test case create topic starts"); - Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - latitude = props.getProperty("Latitude"); - longitude = props.getProperty("Longitude"); - version = props.getProperty("Version"); - serviceName = props.getProperty("ServiceName"); - env = props.getProperty("Environment"); - partner = props.getProperty("Partner"); - SubContextPathCreateTopic = props.getProperty("SubContextPathCreateTopic"); - protocol = props.getProperty("Protocol"); - methodTypePost = props.getProperty("MethodTypePost"); - user = props.getProperty("user"); - password = props.getProperty("password"); - contenttype = props.getProperty("contenttypejson"); - - String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" - + "envContext=" + env + "/" + "partner=" + partner; - LoadPropertyFile.loadAFTProperties(latitude, longitude); - HashMap hm = new HashMap(); - hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - - createTopic(url, props, hm); - } - // permitting a consumer on topic is not applicable - // public void testPermitConsumerTopics() { - // Properties props = LoadPropertyFile.getPropertyFileDataProducer(); - // latitude = props.getProperty("Latitude"); - // longitude = props.getProperty("Longitude"); - // version = props.getProperty("Version"); - // serviceName = props.getProperty("ServiceName"); - // env = props.getProperty("Environment"); - // partner = props.getProperty("Partner"); - // SubContextPathGetPermitConsumer = - // props.getProperty("SubContextPathGetPermitConsumer"); - // protocol = props.getProperty("Protocol"); - // methodTypePut = props.getProperty("MethodTypePut"); - // user = props.getProperty("user"); - // password = props.getProperty("password"); - // contenttype = props.getProperty("contenttype"); - // - // String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" - // + "version=" + version + "/" - // + "envContext=" + env + "/" + "partner=" + partner; - // LoadPropertyFile.loadAFTProperties(latitude, longitude); - // - // HashMap hm = new HashMap(); - // hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); - // hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); - // hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); - // - // try { - // DME2Client sender = new DME2Client(new URI(url), 5000L); - // sender.setAllowAllHttpReturnCodes(true); - // sender.setMethod(methodTypePut); - // sender.setSubContext(SubContextPathGetPermitConsumer); - // sender.setPayload(""); - // - // sender.addHeader("Content-Type", contenttype); - // sender.setCredentials(user, password); - // sender.setHeaders(hm); - // - // System.out.println("Permitting a consumer on topic"); - // String reply = sender.sendAndWait(5000L); - // assertNotNull(reply); - // System.out.println("Reply from server = " + reply); - // - // } catch (DME2Exception e) { - // e.printStackTrace(); - // } catch (URISyntaxException e) { - // e.printStackTrace(); - // } catch (Exception e) { - // e.printStackTrace(); - // } - // } - -} diff --git a/src/test/java/com/att/mr/test/dme2/JUnitTestSuite.java b/src/test/java/com/att/mr/test/dme2/JUnitTestSuite.java deleted file mode 100644 index b3f2683..0000000 --- a/src/test/java/com/att/mr/test/dme2/JUnitTestSuite.java +++ /dev/null @@ -1,44 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ DME2AdminTest.class, DME2ApiKeyTest.class, DME2ConsumerTest.class, DME2ConsumerTest.class, - DME2MetricsTest.class, DME2ProducerTest.class, DME2TopicTest.class, }) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/mr/test/dme2/LoadPropertyFile.java b/src/test/java/com/att/mr/test/dme2/LoadPropertyFile.java deleted file mode 100644 index a274d68..0000000 --- a/src/test/java/com/att/mr/test/dme2/LoadPropertyFile.java +++ /dev/null @@ -1,69 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Properties; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import org.json.JSONObject; - -public class LoadPropertyFile { - //private static final Logger LOGGER = Logger.getLogger(LoadPropertyFile.class); - private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(LoadPropertyFile.class); - - static public Properties getPropertyFileDataProducer() { - Properties prop = new Properties(); - LOGGER.info("loading the property file"); - try { - InputStream inputStream = LoadPropertyFile.class.getClassLoader() - .getResourceAsStream("dme2testcase.properties"); - - prop.load(inputStream); - LOGGER.info("successfully loaded the property file"); - } catch (IOException e) { - LOGGER.error("Error while retrieving API keys: " + e); - } - return prop; - } - - static public void loadAFTProperties(String lat, String longi) { - System.setProperty("AFT_LATITUDE", lat); - System.setProperty("AFT_LONGITUDE", longi); - System.setProperty("AFT_ENVIRONMENT", "AFTUAT"); - // printProperties(); - System.out.println("Latitude =" + lat); - System.out.println("Longitude =" + longi); - } - - static public boolean isValidJsonString(String chkString) { - boolean isJson = true; - try { - new JSONObject(chkString); - } catch (Exception e) { - isJson = false; - } - return isJson; - } -} diff --git a/src/test/java/com/att/mr/test/dme2/TestRunner.java b/src/test/java/com/att/mr/test/dme2/TestRunner.java deleted file mode 100644 index dce6e6c..0000000 --- a/src/test/java/com/att/mr/test/dme2/TestRunner.java +++ /dev/null @@ -1,42 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.mr.test.dme2; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/mr/test/dme2/TopicBeanDME2.java b/src/test/java/com/att/mr/test/dme2/TopicBeanDME2.java deleted file mode 100644 index 4de5015..0000000 --- a/src/test/java/com/att/mr/test/dme2/TopicBeanDME2.java +++ /dev/null @@ -1,94 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -/** - * - */ -package com.att.mr.test.dme2; - -import java.io.Serializable; - -public class TopicBeanDME2 implements Serializable { - - private static final long serialVersionUID = -8620390377775457949L; - private String topicName; - private String description; - - - private int partitionCount; - private int replicationCount; - private boolean transactionEnabled = false; - - public boolean isTransactionEnabled() { - return transactionEnabled; - } - - public void setTransactionEnabled(boolean transactionEnabled) { - this.transactionEnabled = transactionEnabled; - } - - public TopicBeanDME2() { - super(); - } - - public TopicBeanDME2(String topicName, String description, int partitionCount, int replicationCount, - boolean transactionEnabled) { - super(); - this.topicName = topicName; - this.description = description; - this.partitionCount = partitionCount; - this.replicationCount = replicationCount; - this.transactionEnabled = transactionEnabled; - } - - public String getTopicName() { - return topicName; - } - - public void setTopicName(String topicName) { - this.topicName = topicName; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public int getPartitionCount() { - return partitionCount; - } - - public void setPartitionCount(int partitionCount) { - this.partitionCount = partitionCount; - } - - public int getReplicationCount() { - return replicationCount; - } - - public void setReplicationCount(int replicationCount) { - this.replicationCount = replicationCount; - } - -} diff --git a/src/test/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java b/src/test/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java deleted file mode 100644 index 9967036..0000000 --- a/src/test/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java +++ /dev/null @@ -1,131 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.apiServer.metrics.cambria; - - -import static org.junit.Assert.assertTrue; - -import java.io.File; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.mr.apiServer.metrics.cambria.DMaaPMetricsSender; - -public class DMaaPMetricsSenderTest { - - @Before - public void setUp() throws Exception { - ClassLoader classLoader = getClass().getClassLoader(); - AJSCPropertiesMap.refresh(new File(classLoader.getResource("MsgRtrApi.properties").getFile())); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSendPeriodically() { - - DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic"); - try { - sender.sendPeriodically(null, null, "testTopic"); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (NoClassDefFoundError e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testSendPeriodically2() { - - DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic"); - try { - sender.sendPeriodically(null, null, "url", "testTopic", 2); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testSend() { - - DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic"); - try { - sender.send(); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testRun() { - - DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic"); - try { - sender.run(); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/apiServer/metrics/cambria/JUnitTestSuite.java b/src/test/java/com/att/nsa/apiServer/metrics/cambria/JUnitTestSuite.java deleted file mode 100644 index 3e70420..0000000 --- a/src/test/java/com/att/nsa/apiServer/metrics/cambria/JUnitTestSuite.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.apiServer.metrics.cambria; - -import junit.framework.TestSuite; -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ DMaaPMetricsSenderTest.class}) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/apiServer/metrics/cambria/TestRunner.java b/src/test/java/com/att/nsa/apiServer/metrics/cambria/TestRunner.java deleted file mode 100644 index 9040ee8..0000000 --- a/src/test/java/com/att/nsa/apiServer/metrics/cambria/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.apiServer.metrics.cambria; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/CambriaApiExceptionTest.java b/src/test/java/com/att/nsa/cambria/CambriaApiExceptionTest.java deleted file mode 100644 index f41f33f..0000000 --- a/src/test/java/com/att/nsa/cambria/CambriaApiExceptionTest.java +++ /dev/null @@ -1,74 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria; - -import static org.junit.Assert.*; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.exception.ErrorResponse; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class CambriaApiExceptionTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetErrRes() { - - int status = 1; - String msg = "helloWorld"; - CambriaApiException cambria = new CambriaApiException(status, msg); - - cambria.getErrRes(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testSetErrRes() { - - int status = 1; - String msg = "helloWorld"; - CambriaApiException cambria = new CambriaApiException(status, msg); - - cambria.setErrRes(new ErrorResponse(200, 0, "OK")); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/CambriaApiTestCase.java b/src/test/java/com/att/nsa/cambria/CambriaApiTestCase.java deleted file mode 100644 index cb934af..0000000 --- a/src/test/java/com/att/nsa/cambria/CambriaApiTestCase.java +++ /dev/null @@ -1,51 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.nsa.cambria; - -import java.util.HashMap; -import java.util.Map; - -import junit.framework.TestCase; - -import org.junit.Ignore; - -@Ignore -public class CambriaApiTestCase extends TestCase { - - @Override - protected void setUp() throws Exception { - final Map argMap = new HashMap (); - - argMap.put("broker.type", "memory"); - argMap.put("accounts.dao.class", "com.att.nsa.fe3c.dao.memory.MemoryDAOFactory"); - argMap.put("topic.dao.class", "com.att.nsa.fe3c.dao.memory.MemoryDAOFactory"); - - //CambriaApiServer.start(argMap); - System.out.println("setUp() complete"); - } - - public void tearDown() throws Exception { - System.out.println("tearDown() started"); - //CambriaApiServer.stop(); - System.out.println("tearDown() complete"); - } -} diff --git a/src/test/java/com/att/nsa/cambria/CambriaApiVersionInfoTest.java b/src/test/java/com/att/nsa/cambria/CambriaApiVersionInfoTest.java deleted file mode 100644 index b71bf6e..0000000 --- a/src/test/java/com/att/nsa/cambria/CambriaApiVersionInfoTest.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria; - -import static org.junit.Assert.*; - -import com.att.dmf.mr.CambriaApiVersionInfo; -import com.att.dmf.mr.exception.ErrorResponse; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class CambriaApiVersionInfoTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetVersion() { - CambriaApiVersionInfo.getVersion(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - - -} diff --git a/src/test/java/com/att/nsa/cambria/CambriaRateLimiterTest.java b/src/test/java/com/att/nsa/cambria/CambriaRateLimiterTest.java deleted file mode 100644 index 8e853d2..0000000 --- a/src/test/java/com/att/nsa/cambria/CambriaRateLimiterTest.java +++ /dev/null @@ -1,78 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ -package com.att.nsa.cambria; - -import junit.framework.TestCase; - -import org.junit.Test; - -import com.att.nsa.apiServer.util.NsaTestClock; - -public class CambriaRateLimiterTest -{ - @Test - public void testRateLimiter () - { - /*final NsaTestClock clock = new NsaTestClock(1, false); - - final String topic = "topic"; - final String consumerGroup = "group"; - final String clientId = "id"; - - final int window = 5; - - // rate limit: 1 empty call/min avg over 5 minutes, with 10ms delay - final CambriaRateLimiter rater = new CambriaRateLimiter ( 1.0, window, 10 ); - try - { - // prime with a call to start rate window - rater.onCall ( topic, consumerGroup, clientId ); - rater.onSend ( topic, consumerGroup, clientId, 1 ); - clock.addMs ( 1000*60*window ); - - // rate should now be 0, with a good window - for ( int i=0; i<4; i++ ) - { - clock.addMs ( 1000*15 ); - rater.onCall ( topic, consumerGroup, clientId ); - rater.onSend ( topic, consumerGroup, clientId, 0 ); - } - // rate is now 0.8 = 4 calls in last 5 minutes = 4/5 = 0.8 - - clock.addMs ( 1000*15 ); - rater.onCall ( topic, consumerGroup, clientId ); - rater.onSend ( topic, consumerGroup, clientId, 0 ); - // rate = 1.0 = 5 calls in last 5 mins - - clock.addMs ( 1000 ); - rater.onCall ( topic, consumerGroup, clientId ); - rater.onSend ( topic, consumerGroup, clientId, 0 ); - // rate = 1.2 = 6 calls in last 5 mins, should fire - - fail ( "Should have thrown rate limit exception." ); - } - catch ( CambriaApiException x ) - { - // good - }*/ - } -} diff --git a/src/test/java/com/att/nsa/cambria/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/JUnitTestSuite.java deleted file mode 100644 index 9c98524..0000000 --- a/src/test/java/com/att/nsa/cambria/JUnitTestSuite.java +++ /dev/null @@ -1,42 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ CambriaApiExceptionTest.class, CambriaApiVersionInfoTest.class, CambriaApiTestCase.class, CambriaRateLimiterTest.class, }) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/TestRunner.java b/src/test/java/com/att/nsa/cambria/TestRunner.java deleted file mode 100644 index 24a9bc2..0000000 --- a/src/test/java/com/att/nsa/cambria/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/CuratorFrameworkImpl.java b/src/test/java/com/att/nsa/cambria/backends/kafka/CuratorFrameworkImpl.java deleted file mode 100644 index a12e96c..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/kafka/CuratorFrameworkImpl.java +++ /dev/null @@ -1,278 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.kafka; - -import java.util.concurrent.TimeUnit; - -import org.apache.curator.CuratorZookeeperClient; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.WatcherRemoveCuratorFramework; -import org.apache.curator.framework.api.CreateBuilder; -import org.apache.curator.framework.api.CuratorListener; -import org.apache.curator.framework.api.DeleteBuilder; -import org.apache.curator.framework.api.ExistsBuilder; -import org.apache.curator.framework.api.GetACLBuilder; -import org.apache.curator.framework.api.GetChildrenBuilder; -import org.apache.curator.framework.api.GetConfigBuilder; -import org.apache.curator.framework.api.GetDataBuilder; -import org.apache.curator.framework.api.ReconfigBuilder; -import org.apache.curator.framework.api.RemoveWatchesBuilder; -import org.apache.curator.framework.api.SetACLBuilder; -import org.apache.curator.framework.api.SetDataBuilder; -import org.apache.curator.framework.api.SyncBuilder; -import org.apache.curator.framework.api.UnhandledErrorListener; -import org.apache.curator.framework.api.transaction.CuratorMultiTransaction; -import org.apache.curator.framework.api.transaction.CuratorTransaction; -import org.apache.curator.framework.api.transaction.TransactionOp; -import org.apache.curator.framework.imps.CuratorFrameworkState; -import org.apache.curator.framework.listen.Listenable; -import org.apache.curator.framework.schema.SchemaSet; -import org.apache.curator.framework.state.ConnectionStateErrorPolicy; -import org.apache.curator.framework.state.ConnectionStateListener; -import org.apache.curator.utils.EnsurePath; -import org.apache.zookeeper.Watcher; -import org.apache.zookeeper.server.quorum.flexible.QuorumVerifier; - -public class CuratorFrameworkImpl implements CuratorFramework { - - @Override - public void blockUntilConnected() throws InterruptedException { - // TODO Auto-generated method stub - - } - - @Override - public boolean blockUntilConnected(int arg0, TimeUnit arg1) throws InterruptedException { - // TODO Auto-generated method stub - return false; - } - - @Override - public ExistsBuilder checkExists() { - // TODO Auto-generated method stub - return null; - } - - @Override - public void clearWatcherReferences(Watcher arg0) { - // TODO Auto-generated method stub - - } - - @Override - public void close() { - // TODO Auto-generated method stub - - } - - @Override - public CreateBuilder create() { - // TODO Auto-generated method stub - return null; - } - - @Override - public DeleteBuilder delete() { - // TODO Auto-generated method stub - return null; - } - - @Override - public GetACLBuilder getACL() { - // TODO Auto-generated method stub - return null; - } - - @Override - public GetChildrenBuilder getChildren() { - // TODO Auto-generated method stub - return null; - } - - @Override - public Listenable getConnectionStateListenable() { - // TODO Auto-generated method stub - return null; - } - - @Override - public Listenable getCuratorListenable() { - // TODO Auto-generated method stub - return null; - } - - @Override - public GetDataBuilder getData() { - // TODO Auto-generated method stub - return null; - } - - @Override - public String getNamespace() { - // TODO Auto-generated method stub - return null; - } - - @Override - public CuratorFrameworkState getState() { - // TODO Auto-generated method stub - return null; - } - - @Override - public Listenable getUnhandledErrorListenable() { - // TODO Auto-generated method stub - return null; - } - - @Override - public CuratorZookeeperClient getZookeeperClient() { - // TODO Auto-generated method stub - return null; - } - - @Override - public CuratorTransaction inTransaction() { - // TODO Auto-generated method stub - return null; - } - - @Override - public boolean isStarted() { - // TODO Auto-generated method stub - return false; - } - - @Override - public EnsurePath newNamespaceAwareEnsurePath(String arg0) { - // TODO Auto-generated method stub - return null; - } - - @Override - public CuratorFramework nonNamespaceView() { - // TODO Auto-generated method stub - return null; - } - - @Override - public SetACLBuilder setACL() { - // TODO Auto-generated method stub - return null; - } - - @Override - public SetDataBuilder setData() { - // TODO Auto-generated method stub - return null; - } - - @Override - public void start() { - // TODO Auto-generated method stub - - } - - @Override - public SyncBuilder sync() { - // TODO Auto-generated method stub - return null; - } - - @Override - public void sync(String arg0, Object arg1) { - // TODO Auto-generated method stub - - } - - @Override - public CuratorFramework usingNamespace(String arg0) { - // TODO Auto-generated method stub - return null; - } - - @Override - public ReconfigBuilder reconfig() { - // TODO Auto-generated method stub - return null; - } - - @Override - public GetConfigBuilder getConfig() { - // TODO Auto-generated method stub - return null; - } - - @Override - public CuratorMultiTransaction transaction() { - // TODO Auto-generated method stub - return null; - } - - @Override - public TransactionOp transactionOp() { - // TODO Auto-generated method stub - return null; - } - - @Override - public void createContainers(String path) throws Exception { - // TODO Auto-generated method stub - - } - - @Override - public RemoveWatchesBuilder watches() { - // TODO Auto-generated method stub - return null; - } - - @Override - public WatcherRemoveCuratorFramework newWatcherRemoveCuratorFramework() { - // TODO Auto-generated method stub - return null; - } - - @Override - public ConnectionStateErrorPolicy getConnectionStateErrorPolicy() { - // TODO Auto-generated method stub - return null; - } - - @Override - public QuorumVerifier getCurrentConfig() { - // TODO Auto-generated method stub - return null; - } - - @Override - public SchemaSet getSchemaSet() { - // TODO Auto-generated method stub - return null; - } - - @Override - public boolean isZk34CompatibilityMode() { - // TODO Auto-generated method stub - return false; - } - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/backends/kafka/JUnitTestSuite.java deleted file mode 100644 index 54ff469..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/kafka/JUnitTestSuite.java +++ /dev/null @@ -1,42 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.kafka; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ KafkaConsumerCacheTest.class, KafkaPublisherTest.class, }) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCacheTest.java b/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCacheTest.java deleted file mode 100644 index 06d7b58..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCacheTest.java +++ /dev/null @@ -1,256 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.kafka; - -import static org.junit.Assert.*; - -import java.util.concurrent.ConcurrentHashMap; - -import org.apache.curator.framework.CuratorFramework; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.modules.junit4.PowerMockRunner; -import org.powermock.core.classloader.annotations.PrepareForTest; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; - -import com.att.dmf.mr.backends.MetricsSet; -import com.att.dmf.mr.backends.kafka.Kafka011Consumer; -import com.att.dmf.mr.backends.kafka.KafkaConsumerCache; -import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({ AJSCPropertiesMap.class }) -public class KafkaConsumerCacheTest { - private KafkaConsumerCache kafkaConsumerCache =null; - @Mock - private ConcurrentHashMap fConsumers; - @Mock - private MetricsSet fMetrics; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - - } - - @After - public void tearDown() throws Exception { - } - - - @Test - public void testSweep() { - kafkaConsumerCache = new KafkaConsumerCache(); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "kSetting_TouchEveryMs")).thenReturn("100"); - kafkaConsumerCache.sweep(); - - } - - - // DOES NOT WORK - @Test - public void testStartCache() { - - /* - * KafkaConsumerCache kafka = null; - * - * try { kafka = new KafkaConsumerCache("123", null); - * - * } catch (NoClassDefFoundError e) { try { kafka.startCache("DMAAP", - * null); } catch (NullPointerException e1) { // TODO Auto-generated - * catch block assertTrue(true); } catch (KafkaConsumerCacheException - * e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } - */ - - - new CuratorFrameworkImpl(); - new MetricsSetImpl(); - KafkaConsumerCache kafka=null; - try { - kafka = new KafkaConsumerCache(); - kafka.setfApiId("1"); - kafka.startCache("DMAAP", null); - } catch (NoClassDefFoundError e) { - - } catch (KafkaConsumerCacheException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Test - public void testGetCuratorFramework() { - - CuratorFramework curator = new CuratorFrameworkImpl(); - new MetricsSetImpl(); - try { - - } catch (NoClassDefFoundError e) { - - KafkaConsumerCache.getCuratorFramework(curator); - } - - } - - /* - * @Test public void testStopCache() { - * - * KafkaConsumerCache kafka = null; new CuratorFrameworkImpl(); new - * MetricsSetImpl(); try { kafka = new KafkaConsumerCache("123", null); - * kafka.stopCache(); } catch (NoClassDefFoundError e) { - * - * } - * - * } - */ - - @Test - public void testGetConsumerFor() { - - KafkaConsumerCache kafka = null; - - try { - kafka = new KafkaConsumerCache(); - kafka.getConsumerFor("testTopic", "CG1", "23"); - } catch (NoClassDefFoundError e) { - - } catch (KafkaConsumerCacheException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Test - public void testPutConsumerFor() { - - Kafka011Consumer consumer = null; - KafkaConsumerCache kafka = null; - - try { - kafka = new KafkaConsumerCache(); - - } catch (NoClassDefFoundError e) { - try { - kafka.putConsumerFor("testTopic", "CG1", "23", consumer); - } catch (NullPointerException e1) { - // TODO Auto-generated catch block - assertTrue(true); - } catch (KafkaConsumerCacheException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - } - - } - - @Test - public void testGetConsumers() { - - KafkaConsumerCache kafka = null; - - try { - kafka = new KafkaConsumerCache(); - - } catch (NoClassDefFoundError e) { - try { - kafka.getConsumers(); - } catch (NullPointerException e1) { - // TODO Auto-generated catch block - assertTrue(true); - } - } - - } - - @Test - public void testDropAllConsumers() { - - KafkaConsumerCache kafka = null; - try { - kafka = new KafkaConsumerCache(); - - } catch (NoClassDefFoundError e) { - try { - kafka.dropAllConsumers(); - } catch (NullPointerException e1) { - // TODO Auto-generated catch block - assertTrue(true); - } - } - - } - - @Test - public void testSignalOwnership() { - - KafkaConsumerCache kafka = null; - - try { - kafka = new KafkaConsumerCache(); - // kafka.signalOwnership("testTopic", "CG1", "23"); - } catch (NoClassDefFoundError e) { - try { - kafka.signalOwnership("testTopic", "CG1", "23"); - } catch (KafkaConsumerCacheException e1) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e1) { - // TODO Auto-generated catch block - // assertTrue(true); - e1.printStackTrace(); - } - - } - - // assertTrue(true); - } - - @Test - public void testDropConsumer() { - - KafkaConsumerCache kafka = null; - - try { - kafka = new KafkaConsumerCache(); - // kafka.dropConsumer("testTopic", "CG1", "23"); - } catch (NoClassDefFoundError e) { - try { - kafka.dropConsumer("testTopic", "CG1", "23"); - } catch (NullPointerException e1) { - // TODO Auto-generated catch block - assertTrue(true); - } - } - - } - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaPublisherTest.java b/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaPublisherTest.java deleted file mode 100644 index 3673845..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaPublisherTest.java +++ /dev/null @@ -1,153 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.kafka; - -import static org.junit.Assert.assertTrue; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; - -import kafka.common.FailedToSendMessageException; -import kafka.producer.KeyedMessage; - -public class KafkaPublisherTest { - - - - /*@Before - public void setUp() throws Exception { - ClassLoader classLoader = getClass().getClassLoader(); - AJSCPropertiesMap.refresh(new File(classLoader.getResource("MsgRtrApi.properties").getFile())); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSendMessages() { - - String topic = "testTopic"; - - KafkaPublisher kafka = null; - try { - kafka = new KafkaPublisher(null); - - } catch (missingReqdSetting e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NoClassDefFoundError e) { - try { - kafka.sendMessage(topic, null); - } catch (NullPointerException e1) { - // TODO Auto-generated catch block - assertTrue(true); - } catch (FailedToSendMessageException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (IOException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - } catch (FailedToSendMessageException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Test - public void testSendBatchMessage() { - - String topic = "testTopic"; - - KafkaPublisher kafka = null; - ArrayList> kms = null; - try { - kafka = new KafkaPublisher(null); - - } catch (missingReqdSetting e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NoClassDefFoundError e) { - try { - kafka.sendBatchMessage(topic, kms); - } catch (NullPointerException e1) { - // TODO Auto-generated catch block - assertTrue(true); - } catch (IOException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - } catch (FailedToSendMessageException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Test - public void sendMessages() { - - String topic = "testTopic"; - - List msgs = null; - - KafkaPublisher kafka = null; - //ArrayList> kms = null; - try { - kafka = new KafkaPublisher(null); - - } catch (missingReqdSetting e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NoClassDefFoundError e) { - try { - kafka.sendMessages(topic, msgs); - } catch (NullPointerException e1) { - // TODO Auto-generated catch block - assertTrue(true); - } catch (FailedToSendMessageException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (IOException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - } catch (FailedToSendMessageException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - }*/ - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/MetricsSetImpl.java b/src/test/java/com/att/nsa/cambria/backends/kafka/MetricsSetImpl.java deleted file mode 100644 index b5f7b74..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/kafka/MetricsSetImpl.java +++ /dev/null @@ -1,123 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.kafka; - -import java.util.List; -import java.util.Map; - -import org.json.JSONObject; - -import com.att.dmf.mr.backends.MetricsSet; -import com.att.nsa.metrics.CdmMeasuredItem; - -public class MetricsSetImpl implements MetricsSet { - - @Override - public List getEntries() { - // TODO Auto-generated method stub - return null; - } - - @Override - public CdmMeasuredItem getItem(String arg0) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Map getItems() { - // TODO Auto-generated method stub - return null; - } - - @Override - public void putItem(String arg0, CdmMeasuredItem arg1) { - // TODO Auto-generated method stub - - } - - @Override - public void removeItem(String arg0) { - // TODO Auto-generated method stub - - } - - @Override - public int size() { - // TODO Auto-generated method stub - return 0; - } - - @Override - public JSONObject toJson() { - // TODO Auto-generated method stub - return null; - } - - @Override - public void setupCambriaSender() { - // TODO Auto-generated method stub - - } - - @Override - public void onRouteComplete(String name, long durationMs) { - // TODO Auto-generated method stub - - } - - @Override - public void publishTick(int amount) { - // TODO Auto-generated method stub - - } - - @Override - public void consumeTick(int amount) { - // TODO Auto-generated method stub - - } - - @Override - public void onKafkaConsumerCacheMiss() { - // TODO Auto-generated method stub - - } - - @Override - public void onKafkaConsumerCacheHit() { - // TODO Auto-generated method stub - - } - - @Override - public void onKafkaConsumerClaimed() { - // TODO Auto-generated method stub - - } - - @Override - public void onKafkaConsumerTimeout() { - // TODO Auto-generated method stub - - } - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/TestRunner.java b/src/test/java/com/att/nsa/cambria/backends/kafka/TestRunner.java deleted file mode 100644 index 53ea31b..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/kafka/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.kafka; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/memory/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/backends/memory/JUnitTestSuite.java deleted file mode 100644 index fc11552..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/memory/JUnitTestSuite.java +++ /dev/null @@ -1,43 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.memory; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ MemoryConsumerFactoryTest.class, MemoryMetaBrokerTest.class, MemoryQueueTest.class, - MemoryQueuePublisherTest.class, MessageLoggerTest.class, }) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactoryTest.java b/src/test/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactoryTest.java deleted file mode 100644 index d6fe6e6..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactoryTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.memory; - -import static org.junit.Assert.*; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.backends.memory.MemoryConsumerFactory; - -public class MemoryConsumerFactoryTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetConsumerFor() { - MemoryConsumerFactory factory = new MemoryConsumerFactory(null); - - - String topic = "testTopic"; - String consumerGroupId = "CG1"; - String clientId = "C1"; - String remoteHost="remoteHost"; - int timeoutMs = 1000; - factory.getConsumerFor(topic, consumerGroupId, clientId, timeoutMs,remoteHost); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testDropCache() { - MemoryConsumerFactory factory = new MemoryConsumerFactory(null); - - factory.dropCache(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testGetConsumers() { - MemoryConsumerFactory factory = new MemoryConsumerFactory(null); - - factory.getConsumers(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/memory/MemoryMetaBrokerTest.java b/src/test/java/com/att/nsa/cambria/backends/memory/MemoryMetaBrokerTest.java deleted file mode 100644 index 2c41068..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/memory/MemoryMetaBrokerTest.java +++ /dev/null @@ -1,92 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.memory; - -import static org.junit.Assert.*; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.backends.memory.MemoryMetaBroker; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; - -public class MemoryMetaBrokerTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetAllTopics() { - MemoryMetaBroker broker = new MemoryMetaBroker(null, null); - - broker.getAllTopics(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testGeTopic() { - MemoryMetaBroker broker = new MemoryMetaBroker(null, null); - - broker.getTopic("testTopic"); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testCreateTopic() { - - //uncommenting this gives a Null Pointer Exception - - MemoryMetaBroker broker = new MemoryMetaBroker(null, null); - - int timeoutMs = 1000; - try { - broker.createTopic("testTopic","topic for testing", "ABCD123", 1,3, true); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisherTest.java b/src/test/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisherTest.java deleted file mode 100644 index 4522734..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisherTest.java +++ /dev/null @@ -1,102 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.memory; - -import static org.junit.Assert.*; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.backends.memory.MemoryQueuePublisher; - - -public class MemoryQueuePublisherTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSendBatchMessage() { - MemoryQueuePublisher publisher = new MemoryQueuePublisher(null, null); - - try { - publisher.sendBatchMessageNew("testTopic", null); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testSendMessage() { - MemoryQueuePublisher publisher = new MemoryQueuePublisher(null, null); - - try { - publisher.sendMessage("testTopic", null); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testSendMessages() { - MemoryQueuePublisher publisher = new MemoryQueuePublisher(null, null); - - - try { - publisher.sendMessages("testTopic", null); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/memory/MemoryQueueTest.java b/src/test/java/com/att/nsa/cambria/backends/memory/MemoryQueueTest.java deleted file mode 100644 index 637add2..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/memory/MemoryQueueTest.java +++ /dev/null @@ -1,95 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.memory; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.backends.memory.MemoryQueue; - - -public class MemoryQueueTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testCreateTopic() { - MemoryQueue queue = new MemoryQueue(); - - queue.createTopic("testTopic"); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - @Test - public void testRemoveTopic() { - MemoryQueue queue = new MemoryQueue(); - - queue.removeTopic("testTopic"); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testPut() { - MemoryQueue queue = new MemoryQueue(); - - try { - queue.put("testTopic", null); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testGet() { - MemoryQueue queue = new MemoryQueue(); - - queue.get("testTopic", "consumer"); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/backends/memory/MessageLoggerTest.java b/src/test/java/com/att/nsa/cambria/backends/memory/MessageLoggerTest.java deleted file mode 100644 index 81e620c..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/memory/MessageLoggerTest.java +++ /dev/null @@ -1,104 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.memory; - -import static org.junit.Assert.*; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.backends.memory.MessageLogger; - - -public class MessageLoggerTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSendMessage() { - MessageLogger dropper = new MessageLogger(); - - try { - dropper.sendMessage("testTopic", null); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testSendMessages() { - MessageLogger dropper = new MessageLogger(); - - try { - dropper.sendMessages("testTopic", null); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testSendBatchMessage() { - MessageLogger dropper = new MessageLogger(); - - try { - dropper.sendBatchMessageNew("testTopic", null); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} - - - - diff --git a/src/test/java/com/att/nsa/cambria/backends/memory/TestRunner.java b/src/test/java/com/att/nsa/cambria/backends/memory/TestRunner.java deleted file mode 100644 index 2c75929..0000000 --- a/src/test/java/com/att/nsa/cambria/backends/memory/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.backends.memory; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest.java b/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest.java deleted file mode 100644 index 7fa2bfd..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import com.att.dmf.mr.beans.ApiKeyBean; -import com.att.nsa.metrics.CdmMetricsRegistry; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class ApiKeyBeanTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetEmail() { - - ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); - - bean.getEmail(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest2.java b/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest2.java deleted file mode 100644 index 763c2e7..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest2.java +++ /dev/null @@ -1,58 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import com.att.dmf.mr.beans.ApiKeyBean; -import com.att.nsa.metrics.CdmMetricsRegistry; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class ApiKeyBeanTest2 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSetEmail() { - - ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); - - bean.setEmail("rs857c@att.com"); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest3.java b/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest3.java deleted file mode 100644 index 27f7c15..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest3.java +++ /dev/null @@ -1,58 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import com.att.dmf.mr.beans.ApiKeyBean; -import com.att.nsa.metrics.CdmMetricsRegistry; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class ApiKeyBeanTest3 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetDescription() { - - ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); - - bean.getDescription(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest4.java b/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest4.java deleted file mode 100644 index 6191c70..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest4.java +++ /dev/null @@ -1,58 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import com.att.dmf.mr.beans.ApiKeyBean; -import com.att.nsa.metrics.CdmMetricsRegistry; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class ApiKeyBeanTest4 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSetDescription() { - - ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); - - bean.setDescription("new testing description"); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest5.java b/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest5.java deleted file mode 100644 index 99fc9aa..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest5.java +++ /dev/null @@ -1,58 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import com.att.dmf.mr.beans.ApiKeyBean; -import com.att.nsa.metrics.CdmMetricsRegistry; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class ApiKeyBeanTest5 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetSharedSecret() { - - ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); - - bean.getSharedSecret(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest6.java b/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest6.java deleted file mode 100644 index ca368e3..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/ApiKeyBeanTest6.java +++ /dev/null @@ -1,58 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import com.att.dmf.mr.beans.ApiKeyBean; -import com.att.nsa.metrics.CdmMetricsRegistry; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class ApiKeyBeanTest6 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetKey() { - - ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); - - bean.getKey(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiterTest.java b/src/test/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiterTest.java deleted file mode 100644 index 8c3568f..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiterTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPCambriaLimiter; - -public class DMaaPCambriaLimiterTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetSleepMsForRate() { - - - double value = 3; - DMaaPCambriaLimiter.getSleepMsForRate(value); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testOnCall() { - - DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(1,2, 3); - try { - limiter.onCall("testTopic", "ConsumerGroup1", "client2","remoteHost"); - } catch (CambriaApiException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testOnSend() { - - DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(3,3, 3); - limiter.onSend("testTopic", "consumerGroup1", "client1", 100); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest.java b/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest.java deleted file mode 100644 index 784b97b..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest.java +++ /dev/null @@ -1,53 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.DMaaPContext; - -public class DMaaPContextTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetBatchID() { - - DMaaPContext.getBatchID(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest2.java b/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest2.java deleted file mode 100644 index 88baf58..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest2.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import javax.servlet.http.HttpServletRequest; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.DMaaPContext; - -public class DMaaPContextTest2 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetRequest() { - - DMaaPContext context = new DMaaPContext(); - - context.getRequest(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest3.java b/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest3.java deleted file mode 100644 index b6b12b0..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest3.java +++ /dev/null @@ -1,57 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import javax.servlet.http.HttpServletRequest; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.DMaaPContext; - -public class DMaaPContextTest3 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetResponse() { - - DMaaPContext context = new DMaaPContext(); - - context.getResponse(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest4.java b/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest4.java deleted file mode 100644 index 95fbeef..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest4.java +++ /dev/null @@ -1,60 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import javax.servlet.http.HttpServletRequest; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.mock.web.MockHttpServletRequest; - -import com.att.dmf.mr.beans.DMaaPContext; - -public class DMaaPContextTest4 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetSession() { - - DMaaPContext context = new DMaaPContext(); - MockHttpServletRequest request = new MockHttpServletRequest(); - context.setRequest(request); - - context.getSession(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest5.java b/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest5.java deleted file mode 100644 index 2b2c9e1..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest5.java +++ /dev/null @@ -1,57 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import javax.servlet.http.HttpServletRequest; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.DMaaPContext; - -public class DMaaPContextTest5 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetConfigReader() { - - DMaaPContext context = new DMaaPContext(); - - context.getConfigReader(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest6.java b/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest6.java deleted file mode 100644 index b87021d..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/DMaaPContextTest6.java +++ /dev/null @@ -1,57 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - -import javax.servlet.http.HttpServletRequest; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.DMaaPContext; - -public class DMaaPContextTest6 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetConsumerRequestTime() { - - DMaaPContext context = new DMaaPContext(); - - context.getConsumerRequestTime(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBrokerTest.java b/src/test/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBrokerTest.java deleted file mode 100644 index 67ade96..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBrokerTest.java +++ /dev/null @@ -1,252 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.assertTrue; - -import org.I0Itec.zkclient.ZkClient; -import org.I0Itec.zkclient.exception.ZkNoNodeException; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import static org.mockito.Matchers.any; - -import java.util.Properties; - -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; - -import com.att.dmf.mr.CambriaApiException; -import org.apache.kafka.clients.admin.AdminClient; - -import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.metabroker.Broker1.TopicExistsException; -import com.att.nsa.configs.ConfigDb; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.configs.ConfigPath; - - -@RunWith(PowerMockRunner.class) -@PrepareForTest({ AdminClient.class}) -public class DMaaPKafkaMetaBrokerTest { - - @InjectMocks - private DMaaPKafkaMetaBroker dMaaPKafkaMetaBroker; - @Mock - private ZkClient fZk; - @Mock - private AdminClient fKafkaAdminClient; - @Mock - private AdminClient client; - @Mock - private ConfigDb configDb; - @Mock - ConfigPath fBaseTopicData; - @Mock - private ZkClient zkClient; - @Mock - Topic mockTopic; - - @Before - public void setUp() { - MockitoAnnotations.initMocks(this); - PowerMockito.mockStatic(AdminClient.class); - //PowerMockito.when(AdminClient.create (any(Properties.class) )).thenReturn(fKafkaAdminClient); - - //PowerMockito.mockStatic(AdminUtils.class); - PowerMockito.when(configDb.parse("/topics")).thenReturn(fBaseTopicData); - - - } - - @Test - public void testGetAlltopics() { - try { - dMaaPKafkaMetaBroker.getAllTopics(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Test - public void testcreateTopic() { - try { - dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true); - } catch (CambriaApiException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - // TODO Auto-generatee.printStackTrace(); - } - - } - - - @Test - public void testcreateTopic_wrongPartition() { - try { - - dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 0, 1, true); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - // TODO Auto-generatee.printStackTrace(); - } - - } - - @Test - public void testcreateTopic_wrongReplica() { - try { - - dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 0, true); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - // TODO Auto-generatee.printStackTrace(); - } - - } - - @Test - public void testcreateTopic_error1() { - try { - dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - - } - - @Test - public void testcreateTopic_error2() { - try { - dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - @Test - public void testcreateTopic_error3() { - try { - dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true); - } catch (CambriaApiException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - assertTrue(true); - - } catch (Exception e) { - e.printStackTrace(); - } - - } - - @Test - public void testDeleteTopic() { - try { - dMaaPKafkaMetaBroker.deleteTopic("testtopic"); - } catch (CambriaApiException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - assertTrue(true); - - } - - @Test - public void testDeleteTopic_error1() { - try { - dMaaPKafkaMetaBroker.deleteTopic("testtopic"); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - - } - - @Test - public void testDeleteTopic_error2() { - try { - dMaaPKafkaMetaBroker.deleteTopic("testtopic"); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - e.printStackTrace(); - } - - } - - @Test - public void testDeleteTopic_error3() { - try { - dMaaPKafkaMetaBroker.deleteTopic("testtopic"); - } catch (CambriaApiException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - assertTrue(true); - } catch (Exception e) { - e.printStackTrace(); - } - - } - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/beans/JUnitTestSuite.java deleted file mode 100644 index e90675e..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/JUnitTestSuite.java +++ /dev/null @@ -1,49 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ ApiKeyBeanTest.class, ApiKeyBeanTest2.class, ApiKeyBeanTest3.class, ApiKeyBeanTest4.class, ApiKeyBeanTest5.class, ApiKeyBeanTest6.class, - DMaaPCambriaLimiterTest.class, DMaaPContextTest.class, DMaaPContextTest2.class, - DMaaPContextTest3.class,DMaaPContextTest4.class,DMaaPContextTest5.class,DMaaPContextTest6.class, - LogDetailsTest.class, LogDetailsTest2.class,LogDetailsTest3.class,LogDetailsTest4.class,LogDetailsTest5.class,LogDetailsTest6.class, - LogDetailsTest7.class,LogDetailsTest8.class,LogDetailsTest9.class,LogDetailsTest10.class,LogDetailsTest11.class,LogDetailsTest12.class, - LogDetailsTest13.class,LogDetailsTest14.class,LogDetailsTest15.class,LogDetailsTest16.class, TopicBeanTest.class,TopicBeanTest2.class,TopicBeanTest3.class, - TopicBeanTest4.class,TopicBeanTest5.class,TopicBeanTest6.class,TopicBeanTest7.class,TopicBeanTest8.class,TopicBeanTest9.class,TopicBeanTest10.class,}) - -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest.java deleted file mode 100644 index e7c2173..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest.java +++ /dev/null @@ -1,70 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetPublisherId() { - - LogDetails details = new LogDetails(); - - details.getPublisherId(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testGetPublisherLogDetails(){ - - LogDetails details = new LogDetails(); - details.setTotalMessageCount(1); - details.setConsumeTimestamp("02-27-1018"); - details.setSubscriberGroupId("1"); - details.setSubscriberId("1"); - assertEquals(details.getTotalMessageCount(),1); - assertEquals(details.getConsumeTimestamp(),"02-27-1018"); - assertEquals(details.getSubscriberId(),"1"); - assertEquals(details.getSubscriberGroupId(),"1"); - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest10.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest10.java deleted file mode 100644 index 44a6e45..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest10.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest10 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetPublishTimestamp() { - - LogDetails details = new LogDetails(); - - details.getPublishTimestamp(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest11.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest11.java deleted file mode 100644 index 3a935e6..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest11.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest11 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetMessageLengthInBytes() { - - LogDetails details = new LogDetails(); - - details.getMessageLengthInBytes(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest12.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest12.java deleted file mode 100644 index 1b2b3f5..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest12.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest12 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetConsumeTimestamp() { - - LogDetails details = new LogDetails(); - - details.getConsumeTimestamp(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest13.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest13.java deleted file mode 100644 index a30112e..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest13.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest13 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetTotalMessageCount() { - - LogDetails details = new LogDetails(); - - details.getTotalMessageCount(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest14.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest14.java deleted file mode 100644 index b04573d..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest14.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest14 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testIsTransactionEnabled() { - - LogDetails details = new LogDetails(); - - details.isTransactionEnabled(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest15.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest15.java deleted file mode 100644 index 8bbbb28..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest15.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest15 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetTransactionIdTs() { - - LogDetails details = new LogDetails(); - - details.getTransactionIdTs(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest16.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest16.java deleted file mode 100644 index eef7322..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest16.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest16 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetServerIp() { - - LogDetails details = new LogDetails(); - - details.getServerIp(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest17.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest17.java deleted file mode 100644 index c19b46d..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest17.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest17 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetPublisherLogDetails() { - - LogDetails details = new LogDetails(); - - details.getPublisherLogDetails(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest18.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest18.java deleted file mode 100644 index e894ae4..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest18.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest18 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetPublisherLogDetails() { - - LogDetails details = new LogDetails(); - - details.getPublisherLogDetails(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest2.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest2.java deleted file mode 100644 index acb818d..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest2.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest2 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetTransactionId() { - - LogDetails details = new LogDetails(); - - details.getTransactionId(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest3.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest3.java deleted file mode 100644 index b9b664d..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest3.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest3 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetTopicId() { - - LogDetails details = new LogDetails(); - - details.getTopicId(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest4.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest4.java deleted file mode 100644 index b17aadc..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest4.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest4 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetSubscriberGroupId() { - - LogDetails details = new LogDetails(); - - details.getSubscriberGroupId(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest5.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest5.java deleted file mode 100644 index 3beeab8..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest5.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest5 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetSubscriberId() { - - LogDetails details = new LogDetails(); - - details.getSubscriberId(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest6.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest6.java deleted file mode 100644 index 98d85d8..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest6.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest6 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetPublisherIp() { - - LogDetails details = new LogDetails(); - - details.getPublisherIp(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest7.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest7.java deleted file mode 100644 index 8753762..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest7.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest7 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetMessageBatchId() { - - LogDetails details = new LogDetails(); - - details.getMessageBatchId(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest8.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest8.java deleted file mode 100644 index d88ed11..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest8.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest8 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetMessageTimestamp() { - - LogDetails details = new LogDetails(); - - details.getMessageTimestamp(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest9.java b/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest9.java deleted file mode 100644 index b47b152..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/LogDetailsTest9.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.LogDetails; - -public class LogDetailsTest9 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetMessageSequence() { - - LogDetails details = new LogDetails(); - - details.getMessageSequence(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TestRunner.java b/src/test/java/com/att/nsa/cambria/beans/TestRunner.java deleted file mode 100644 index 25eb3c4..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest.java b/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest.java deleted file mode 100644 index 5a60767..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.TopicBean; - -public class TopicBeanTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetTopicName() { - - TopicBean bean = new TopicBean(); - - bean.getTopicName(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest10.java b/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest10.java deleted file mode 100644 index 4ebe601..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest10.java +++ /dev/null @@ -1,55 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.TopicBean; - -public class TopicBeanTest10 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSetTransactionEnabled() { - - TopicBean bean = new TopicBean(); - bean.setTransactionEnabled(true); - - /* String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True"));*/ - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest2.java b/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest2.java deleted file mode 100644 index e6958de..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest2.java +++ /dev/null @@ -1,55 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.TopicBean; - -public class TopicBeanTest2 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSetTopicName() { - - TopicBean bean = new TopicBean(); - bean.setTopicName("testTopic"); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest3.java b/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest3.java deleted file mode 100644 index 61dd6f9..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest3.java +++ /dev/null @@ -1,55 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.TopicBean; - -public class TopicBeanTest3 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetPartitionCount() { - - TopicBean bean = new TopicBean(); - bean.getPartitionCount(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest4.java b/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest4.java deleted file mode 100644 index 894f959..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest4.java +++ /dev/null @@ -1,55 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.TopicBean; - -public class TopicBeanTest4 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSetPartitionCount() { - - TopicBean bean = new TopicBean(); - bean.setPartitionCount(8); - - /* String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True"));*/ - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest5.java b/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest5.java deleted file mode 100644 index bd69e2e..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest5.java +++ /dev/null @@ -1,55 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.TopicBean; - -public class TopicBeanTest5 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetReplicationCount() { - - TopicBean bean = new TopicBean(); - bean.getReplicationCount(); - - /* String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True"));*/ - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest6.java b/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest6.java deleted file mode 100644 index db68525..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest6.java +++ /dev/null @@ -1,55 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.TopicBean; - -public class TopicBeanTest6 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSetReplicationCount() { - - TopicBean bean = new TopicBean(); - bean.setReplicationCount(3); - - /* String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True"));*/ - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest7.java b/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest7.java deleted file mode 100644 index 3a6fdb6..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest7.java +++ /dev/null @@ -1,55 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.TopicBean; - -public class TopicBeanTest7 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testIsTransactionEnabled() { - - TopicBean bean = new TopicBean(); - bean.isTransactionEnabled(); - - /* String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True"));*/ - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest8.java b/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest8.java deleted file mode 100644 index 7cf8049..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest8.java +++ /dev/null @@ -1,55 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.TopicBean; - -public class TopicBeanTest8 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetTopicDescription() { - - TopicBean bean = new TopicBean(); - bean.getTopicDescription(); - - /* String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True"));*/ - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest9.java b/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest9.java deleted file mode 100644 index c6487e1..0000000 --- a/src/test/java/com/att/nsa/cambria/beans/TopicBeanTest9.java +++ /dev/null @@ -1,55 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.beans; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.TopicBean; - -public class TopicBeanTest9 { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSetTopicDescription() { - - TopicBean bean = new TopicBean(); - bean.setTopicDescription("testing topic"); - - /* String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True"));*/ - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/embed/EmbedConfigurationReader.java b/src/test/java/com/att/nsa/cambria/embed/EmbedConfigurationReader.java deleted file mode 100644 index 228664b..0000000 --- a/src/test/java/com/att/nsa/cambria/embed/EmbedConfigurationReader.java +++ /dev/null @@ -1,169 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ - -package com.att.nsa.cambria.embed; - -import java.io.File; -import java.util.Arrays; -import java.util.Map; -import java.util.Properties; - -import org.apache.commons.io.FileUtils; -import org.apache.curator.framework.CuratorFramework; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.backends.kafka.KafkaPublisher; -import com.att.dmf.mr.backends.memory.MemoryMetaBroker; -import com.att.dmf.mr.backends.memory.MemoryQueue; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.AdminClientConfig; -import org.apache.kafka.clients.admin.CreateTopicsResult; -import org.apache.kafka.clients.admin.NewTopic; -import org.apache.kafka.common.KafkaFuture; -import com.att.dmf.mr.beans.DMaaPKafkaConsumerFactory; -import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker; -import com.att.dmf.mr.beans.DMaaPMetricsSet; -import com.att.dmf.mr.beans.DMaaPZkClient; -import com.att.dmf.mr.beans.DMaaPZkConfigDb; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.security.DMaaPAuthenticator; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.DMaaPCuratorFactory; -import com.att.dmf.mr.utils.PropertyReader; -import com.att.nsa.security.db.BaseNsaApiDbImpl; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; -import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory; - - -public class EmbedConfigurationReader { - private static final String DEFAULT_KAFKA_LOG_DIR = "/kafka_embedded"; - public static final String TEST_TOPIC = "testTopic"; - private static final int BROKER_ID = 0; - private static final int BROKER_PORT = 5000; - private static final String LOCALHOST_BROKER = String.format("localhost:%d", BROKER_PORT); - - private static final String DEFAULT_ZOOKEEPER_LOG_DIR = "/zookeeper"; - private static final int ZOOKEEPER_PORT = 2000; - private static final String ZOOKEEPER_HOST = String.format("localhost:%d", ZOOKEEPER_PORT); - - private static final String groupId = "groupID"; - String dir; - private AdminClient fKafkaAdminClient; - KafkaLocal kafkaLocal; - - public void setUp() throws Exception { - - ClassLoader classLoader = getClass().getClassLoader(); - AJSCPropertiesMap.refresh(new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile())); - - Properties kafkaProperties; - Properties zkProperties; - - try { - //load properties - dir = new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile()).getParent(); - kafkaProperties = getKafkaProperties(dir + DEFAULT_KAFKA_LOG_DIR, BROKER_PORT, BROKER_ID); - zkProperties = getZookeeperProperties(ZOOKEEPER_PORT,dir + DEFAULT_ZOOKEEPER_LOG_DIR); - - //start kafkaLocalServer - kafkaLocal = new KafkaLocal(kafkaProperties, zkProperties); - - Map map = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop); - map.put(CambriaConstants.kSetting_ZkConfigDbServers, ZOOKEEPER_HOST); - map.put("kafka.client.zookeeper", ZOOKEEPER_HOST); - map.put("kafka.metadata.broker.list", LOCALHOST_BROKER); - - DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader()); - - final Properties props = new Properties (); - props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092" ); - props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret'"); - props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); - props.put("sasl.mechanism", "PLAIN"); - fKafkaAdminClient = AdminClient.create ( props ); - - // if(!AdminUtils.topicExists(dMaaPZkClient, TEST_TOPIC)) - // AdminUtils.createTopic(dMaaPZkClient, TEST_TOPIC, 3, 1, new Properties()); - final NewTopic topicRequest = new NewTopic ( TEST_TOPIC, 3, new Integer(1).shortValue () ); - fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) ); - Thread.sleep(5000); - } catch (Exception e){ - e.printStackTrace(System.out); - } - } - - private static Properties getKafkaProperties(String logDir, int port, int brokerId) { - Properties properties = new Properties(); - properties.put("port", port + ""); - properties.put("broker.id", brokerId + ""); - properties.put("log.dir", logDir); - properties.put("zookeeper.connect", ZOOKEEPER_HOST); - properties.put("default.replication.factor", "1"); - properties.put("delete.topic.enable", "true"); - properties.put("consumer.timeout.ms", -1); - return properties; - } - - private static Properties getZookeeperProperties(int port, String zookeeperDir) { - Properties properties = new Properties(); - properties.put("clientPort", port + ""); - properties.put("dataDir", zookeeperDir); - return properties; - } - - public void tearDown() throws Exception { - DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader()); - if(fKafkaAdminClient!=null) - fKafkaAdminClient.deleteTopics(Arrays.asList(TEST_TOPIC)); - //AdminUtils.deleteTopic(dMaaPZkClient, TEST_TOPIC); - //dMaaPZkClient.delete(dir + DEFAULT_KAFKA_LOG_DIR); - //dMaaPZkClient.delete(dir + DEFAULT_ZOOKEEPER_LOG_DIR); - kafkaLocal.stop(); - FileUtils.cleanDirectory(new File(dir + DEFAULT_KAFKA_LOG_DIR)); - } - - - public ConfigurationReader buildConfigurationReader() throws Exception { - - setUp(); - - PropertyReader propertyReader = new PropertyReader(); - DMaaPMetricsSet dMaaPMetricsSet = new DMaaPMetricsSet(propertyReader); - DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(propertyReader); - DMaaPZkConfigDb dMaaPZkConfigDb = new DMaaPZkConfigDb(dMaaPZkClient, propertyReader); - CuratorFramework curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader()); - DMaaPKafkaConsumerFactory dMaaPKafkaConsumerFactory = new DMaaPKafkaConsumerFactory(dMaaPMetricsSet, curatorFramework,null); - MemoryQueue memoryQueue = new MemoryQueue(); - MemoryMetaBroker memoryMetaBroker = new MemoryMetaBroker(memoryQueue, dMaaPZkConfigDb); - BaseNsaApiDbImpl baseNsaApiDbImpl = new BaseNsaApiDbImpl<>(dMaaPZkConfigDb, new NsaSimpleApiKeyFactory()); - DMaaPAuthenticator dMaaPAuthenticator = new DMaaPAuthenticatorImpl<>(baseNsaApiDbImpl); - KafkaPublisher kafkaPublisher = new KafkaPublisher(propertyReader); - DMaaPKafkaMetaBroker dMaaPKafkaMetaBroker = new DMaaPKafkaMetaBroker(propertyReader, dMaaPZkClient, dMaaPZkConfigDb); - - return new ConfigurationReader(propertyReader, - dMaaPMetricsSet, dMaaPZkClient, dMaaPZkConfigDb, kafkaPublisher, - curatorFramework, dMaaPKafkaConsumerFactory, dMaaPKafkaMetaBroker, - memoryQueue, memoryMetaBroker, baseNsaApiDbImpl, dMaaPAuthenticator); - - } -} diff --git a/src/test/java/com/att/nsa/cambria/embed/KafkaLocal.java b/src/test/java/com/att/nsa/cambria/embed/KafkaLocal.java deleted file mode 100644 index b71976e..0000000 --- a/src/test/java/com/att/nsa/cambria/embed/KafkaLocal.java +++ /dev/null @@ -1,58 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.embed; - -import java.io.IOException; -import java.util.Properties; - -import kafka.server.KafkaConfig; -import kafka.server.KafkaServerStartable; - - -public class KafkaLocal { - - public KafkaServerStartable kafka; - public ZooKeeperLocal zookeeper; - - public KafkaLocal(Properties kafkaProperties, Properties zkProperties) throws IOException, InterruptedException{ - KafkaConfig kafkaConfig = new KafkaConfig(kafkaProperties); - - //start local zookeeper - System.out.println("starting local zookeeper..."); - zookeeper = new ZooKeeperLocal(zkProperties); - System.out.println("done"); - - //start local kafka broker - kafka = new KafkaServerStartable(kafkaConfig); - System.out.println("starting local kafka broker..."); - kafka.startup(); - System.out.println("done"); - } - - - public void stop(){ - //stop kafka broker - System.out.println("stopping kafka..."); - kafka.shutdown(); - System.out.println("done"); - } - -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/cambria/embed/ZooKeeperLocal.java b/src/test/java/com/att/nsa/cambria/embed/ZooKeeperLocal.java deleted file mode 100644 index b107826..0000000 --- a/src/test/java/com/att/nsa/cambria/embed/ZooKeeperLocal.java +++ /dev/null @@ -1,59 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.embed; - -import java.io.FileNotFoundException; -import java.io.IOException; -import java.util.Properties; - -import org.apache.zookeeper.server.ServerConfig; -import org.apache.zookeeper.server.ZooKeeperServerMain; -import org.apache.zookeeper.server.quorum.QuorumPeerConfig; - -public class ZooKeeperLocal { - - ZooKeeperServerMain zooKeeperServer; - - public ZooKeeperLocal(Properties zkProperties) throws FileNotFoundException, IOException{ - QuorumPeerConfig quorumConfiguration = new QuorumPeerConfig(); - try { - quorumConfiguration.parseProperties(zkProperties); - } catch(Exception e) { - throw new RuntimeException(e); - } - - zooKeeperServer = new ZooKeeperServerMain(); - final ServerConfig configuration = new ServerConfig(); - configuration.readFrom(quorumConfiguration); - - - new Thread() { - public void run() { - try { - zooKeeperServer.runFromConfig(configuration); - } catch (IOException e) { - System.out.println("ZooKeeper Failed"); - e.printStackTrace(System.err); - } - } - }.start(); - } -} diff --git a/src/test/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapperTest.java b/src/test/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapperTest.java deleted file mode 100644 index b10c565..0000000 --- a/src/test/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapperTest.java +++ /dev/null @@ -1,60 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.exception; - -import static org.junit.Assert.*; -import com.att.dmf.mr.exception.DMaaPCambriaExceptionMapper; -import com.att.dmf.mr.transaction.TransactionObj; -import com.att.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class DMaaPCambriaExceptionMapperTest { - - @Before - public void setUp() throws Exception { - DMaaPCambriaExceptionMapper exception = new DMaaPCambriaExceptionMapper(); - } - - @After - public void tearDown() throws Exception { - - } - - - @Test - public void testToResponse() { - - DMaaPCambriaExceptionMapper mapper = new DMaaPCambriaExceptionMapper(); - - try { - mapper.toResponse(null); - } catch (NullPointerException e) { - assertTrue(true); - } - - - - } - -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/cambria/exception/DMaaPErrorMessagesTest.java b/src/test/java/com/att/nsa/cambria/exception/DMaaPErrorMessagesTest.java deleted file mode 100644 index 1a97bb8..0000000 --- a/src/test/java/com/att/nsa/cambria/exception/DMaaPErrorMessagesTest.java +++ /dev/null @@ -1,372 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.exception; - -import static org.junit.Assert.*; -import com.att.dmf.mr.exception.DMaaPErrorMessages; -import com.att.dmf.mr.transaction.TransactionObj; -import com.att.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class DMaaPErrorMessagesTest { - - @Before - public void setUp() throws Exception { - - } - - @After - public void tearDown() throws Exception { - - } - - - @Test - public void testGetMsgSizeExceeds() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getMsgSizeExceeds(); - assertTrue(true); - - } - - @Test - public void testSetMsgSizeExceeds() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setMsgSizeExceeds("200"); - assertTrue(true); - - } - - @Test - public void testGetNotFound() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getNotFound(); - assertTrue(true); - - } - - @Test - public void testSetNotFound() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setNotFound("not found"); - assertTrue(true); - - } - - @Test - public void testGetServerUnav() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getServerUnav(); - assertTrue(true); - - } - - @Test - public void testSetServerUnav() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setServerUnav("server1"); - assertTrue(true); - - } - - @Test - public void testGetMethodNotAllowed() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getMethodNotAllowed(); - assertTrue(true); - - } - - @Test - public void testSetMethodNotAllowed() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setMethodNotAllowed("server2"); - assertTrue(true); - - } - - - @Test - public void testGetBadRequest() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getBadRequest(); - assertTrue(true); - - } - - @Test - public void testSetBadRequest() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setBadRequest("badRequest"); - assertTrue(true); - - } - - @Test - public void testGetNwTimeout() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getNwTimeout(); - assertTrue(true); - - } - - @Test - public void testSetNwTimeout() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setNwTimeout("12:00:00"); - assertTrue(true); - - } - - @Test - public void testGetNotPermitted1() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getNotPermitted1(); - assertTrue(true); - - } - - @Test - public void testSetNotPermitted1() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setNotPermitted1("not permitted"); - assertTrue(true); - - } - - @Test - public void testGetNotPermitted2() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getNotPermitted2(); - assertTrue(true); - - } - - @Test - public void testSetNotPermitted2() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setNotPermitted2("not permitted2"); - assertTrue(true); - - } - - @Test - public void testGetTopicsfailure() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getTopicsfailure(); - assertTrue(true); - - } - - @Test - public void testSetTopicsfailure() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setTopicsfailure("failure"); - assertTrue(true); - - } - - @Test - public void testGetTopicDetailsFail() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getTopicDetailsFail(); - assertTrue(true); - - } - - @Test - public void testSetTopicDetailsFail() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setTopicDetailsFail("topic details fail"); - assertTrue(true); - - } - - @Test - public void testGetCreateTopicFail() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getCreateTopicFail(); - assertTrue(true); - - } - - @Test - public void testSetCreateTopicFail() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setCreateTopicFail("topic details fail"); - assertTrue(true); - - } - - @Test - public void testGetIncorrectJson() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getIncorrectJson(); - assertTrue(true); - - } - - @Test - public void testSetIncorrectJson() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setIncorrectJson("incorrect Json"); - assertTrue(true); - - } - - @Test - public void testGetDeleteTopicFail() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getDeleteTopicFail(); - assertTrue(true); - - } - - @Test - public void testSetDeleteTopicFail() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setDeleteTopicFail("delete tpic fail"); - assertTrue(true); - - } - - @Test - public void testGetConsumeMsgError() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getConsumeMsgError(); - assertTrue(true); - - } - - @Test - public void testSetConsumeMsgError() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setConsumeMsgError("consume message error"); - assertTrue(true); - - } - - - @Test - public void testGetPublishMsgError() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getPublishMsgError(); - assertTrue(true); - - } - - @Test - public void testSetPublishMsgError() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setPublishMsgError("publish message error"); - assertTrue(true); - - } - - @Test - public void testGetPublishMsgCount() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getPublishMsgCount(); - assertTrue(true); - - } - - @Test - public void testSetPublishMsgCount() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setPublishMsgCount("200"); - assertTrue(true); - - } - - @Test - public void testGetAuthFailure() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getAuthFailure(); - assertTrue(true); - - } - - @Test - public void testSetAuthFailure() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setAuthFailure("auth failure"); - assertTrue(true); - - } - - @Test - public void testGetTopicNotExist() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.getTopicNotExist(); - assertTrue(true); - - } - - @Test - public void testSetTopicNotExist() { - - DMaaPErrorMessages msg = new DMaaPErrorMessages(); - msg.setTopicNotExist("toopic doesn't exist"); - assertTrue(true); - - } - - -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapperTest.java b/src/test/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapperTest.java deleted file mode 100644 index 0f53416..0000000 --- a/src/test/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapperTest.java +++ /dev/null @@ -1,60 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.exception; - -import static org.junit.Assert.*; -import com.att.dmf.mr.exception.DMaaPWebExceptionMapper; -import com.att.dmf.mr.transaction.TransactionObj; -import com.att.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class DMaaPWebExceptionMapperTest { - - @Before - public void setUp() throws Exception { - - } - - @After - public void tearDown() throws Exception { - - } - - - @Test - public void testToResponse() { - - DMaaPWebExceptionMapper msg = new DMaaPWebExceptionMapper(); - - try { - msg.toResponse(null); - } catch (Exception e) { - assertTrue(true); - } - - - - } - -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/cambria/exception/ErrorResponseTest.java b/src/test/java/com/att/nsa/cambria/exception/ErrorResponseTest.java deleted file mode 100644 index aa463a7..0000000 --- a/src/test/java/com/att/nsa/cambria/exception/ErrorResponseTest.java +++ /dev/null @@ -1,146 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.exception; - -import static org.junit.Assert.*; -import com.att.dmf.mr.exception.ErrorResponse; -import com.att.dmf.mr.transaction.TransactionObj; -import com.att.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class ErrorResponseTest { - - @Before - public void setUp() throws Exception { - - } - - @After - public void tearDown() throws Exception { - - } - - - @Test - public void testGetHttpStatusCode() { - - ErrorResponse resp = new ErrorResponse(200, 500, "no error"); - - resp.getHttpStatusCode(); - assertTrue(true); - - - } - - @Test - public void tesSGetHttpStatusCode() { - - ErrorResponse resp = new ErrorResponse(200, 500, "no error"); - - resp.setHttpStatusCode(200); - assertTrue(true); - - - } - - @Test - public void testGetMrErrorCode() { - - ErrorResponse resp = new ErrorResponse(200, 500, "no error"); - - resp.getMrErrorCode(); - assertTrue(true); - - - } - - @Test - public void testSetMrErrorCode() { - - ErrorResponse resp = new ErrorResponse(200, 500, "no error"); - - resp.setMrErrorCode(500); - assertTrue(true); - - - } - - @Test - public void testGetErrorMessage() { - - ErrorResponse resp = new ErrorResponse(200, 500, "no error"); - - resp.getErrorMessage(); - assertTrue(true); - - - } - - @Test - public void testSetErrorMessage() { - - ErrorResponse resp = new ErrorResponse(200, 500, "no error"); - - resp.setErrorMessage("no error"); - assertTrue(true); - - - } - - @Test - public void testToString() { - - ErrorResponse resp = new ErrorResponse(200, 500, "no error"); - - resp.toString(); - assertTrue(true); - - - } - - @Test - public void testGetErrMapperStr1() { - - ErrorResponse resp = new ErrorResponse(200, 500, "no error"); - - resp.setHelpURL("/help"); - assertTrue(true); - - - } - - @Test - public void testGetErrMapperStr() { - - ErrorResponse resp = new ErrorResponse(200, 500, "no error"); - - resp.getHelpURL(); - assertTrue(true); - - - } - - - -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/cambria/exception/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/exception/JUnitTestSuite.java deleted file mode 100644 index 5b3ab7e..0000000 --- a/src/test/java/com/att/nsa/cambria/exception/JUnitTestSuite.java +++ /dev/null @@ -1,43 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.exception; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ DMaaPCambriaExceptionMapperTest.class, - DMaaPErrorMessagesTest.class, DMaaPWebExceptionMapperTest.class, ErrorResponseTest.class}) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/exception/TestRunner.java b/src/test/java/com/att/nsa/cambria/exception/TestRunner.java deleted file mode 100644 index 75cf48d..0000000 --- a/src/test/java/com/att/nsa/cambria/exception/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.exception; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/listener/CambriaServletContextListenerTest.java b/src/test/java/com/att/nsa/cambria/listener/CambriaServletContextListenerTest.java deleted file mode 100644 index 441064f..0000000 --- a/src/test/java/com/att/nsa/cambria/listener/CambriaServletContextListenerTest.java +++ /dev/null @@ -1,79 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - - -package com.att.nsa.cambria.listener; - -import static org.junit.Assert.*; - -import javax.servlet.ServletContextEvent; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.listener.CambriaServletContextListener; - -public class CambriaServletContextListenerTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testContextDestroyed() { - CambriaServletContextListener listener = new CambriaServletContextListener(); - try { - listener.contextDestroyed(null); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testContextInitialized() { - CambriaServletContextListener listener = new CambriaServletContextListener(); - - try { - listener.contextInitialized(null); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - - -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/cambria/listener/DME2EndPointLoaderTest.java b/src/test/java/com/att/nsa/cambria/listener/DME2EndPointLoaderTest.java deleted file mode 100644 index 31451b7..0000000 --- a/src/test/java/com/att/nsa/cambria/listener/DME2EndPointLoaderTest.java +++ /dev/null @@ -1,78 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.listener; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.listener.DME2EndPointLoader; - -public class DME2EndPointLoaderTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testPublishEndPoints() { - DME2EndPointLoader loader = DME2EndPointLoader.getInstance(); - - - try { - loader.publishEndPoints(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testUnPublishEndPoints() { - DME2EndPointLoader loader = DME2EndPointLoader.getInstance(); - - - try { - loader.unPublishEndPoints(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/listener/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/listener/JUnitTestSuite.java deleted file mode 100644 index fab3777..0000000 --- a/src/test/java/com/att/nsa/cambria/listener/JUnitTestSuite.java +++ /dev/null @@ -1,43 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - - -package com.att.nsa.cambria.listener; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ DME2EndPointLoaderTest.class, CambriaServletContextListenerTest.class }) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/listener/TestRunner.java b/src/test/java/com/att/nsa/cambria/listener/TestRunner.java deleted file mode 100644 index 68820c3..0000000 --- a/src/test/java/com/att/nsa/cambria/listener/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.listener; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/metabroker/BrokerImpl.java b/src/test/java/com/att/nsa/cambria/metabroker/BrokerImpl.java deleted file mode 100644 index 9c5e4e0..0000000 --- a/src/test/java/com/att/nsa/cambria/metabroker/BrokerImpl.java +++ /dev/null @@ -1,71 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metabroker; - -import java.util.ArrayList; -import java.util.List; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.metabroker.Broker; -import com.att.dmf.mr.metabroker.Topic; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -public class BrokerImpl implements Broker { - - @Override - public List getAllTopics() throws ConfigDbException { - // TODO Auto-generated method stub - Topic top = new TopicImplem(); - - List list = new ArrayList(); - - for (int i = 0; i < 5; i++) { - top = new TopicImplem(); - list.add(top); - - } - - return null; - - } - - @Override - public Topic getTopic(String topic) throws ConfigDbException { - // TODO Auto-generated method stub - return new TopicImplem(); - } - - @Override - public Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas, - boolean transactionEnabled) throws TopicExistsException, CambriaApiException { - // TODO Auto-generated method stub - return new TopicImplem(topic, description, ownerApiKey, transactionEnabled); - } - - @Override - public void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException { - // TODO Auto-generated method stub - Topic top = new TopicImplem(); - - } - -} diff --git a/src/test/java/com/att/nsa/cambria/metabroker/BrokerImplTest.java b/src/test/java/com/att/nsa/cambria/metabroker/BrokerImplTest.java deleted file mode 100644 index edc5641..0000000 --- a/src/test/java/com/att/nsa/cambria/metabroker/BrokerImplTest.java +++ /dev/null @@ -1,109 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - - -package com.att.nsa.cambria.metabroker; - -import static org.junit.Assert.*; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -public class BrokerImplTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetOwners() { - - try { - new BrokerImpl().getAllTopics(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - assertTrue(true); - } - - @Test - public void testGetTopic() { - - try { - new BrokerImpl().getTopic("topicName"); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - assertTrue(true); - - } - - @Test - public void testCreateTopic() { - - try { - new BrokerImpl().createTopic("topicName", "testing topic", "owner123", 3, 3, true); - - } catch (CambriaApiException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - assertTrue(true); - - } - - @Test - public void testDeleteTopic() { - - try { - new BrokerImpl().deleteTopic("topicName"); - } catch (CambriaApiException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - assertTrue(true); - - } -} diff --git a/src/test/java/com/att/nsa/cambria/metabroker/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/metabroker/JUnitTestSuite.java deleted file mode 100644 index 0e2535b..0000000 --- a/src/test/java/com/att/nsa/cambria/metabroker/JUnitTestSuite.java +++ /dev/null @@ -1,42 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metabroker; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ BrokerImplTest.class, TopicImplemTest.class, }) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/metabroker/TestRunner.java b/src/test/java/com/att/nsa/cambria/metabroker/TestRunner.java deleted file mode 100644 index e96d9c2..0000000 --- a/src/test/java/com/att/nsa/cambria/metabroker/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metabroker; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/metabroker/TopicImplTest.java b/src/test/java/com/att/nsa/cambria/metabroker/TopicImplTest.java deleted file mode 100644 index 67cf60d..0000000 --- a/src/test/java/com/att/nsa/cambria/metabroker/TopicImplTest.java +++ /dev/null @@ -1,25 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metabroker; - -public class TopicImplTest { - -} diff --git a/src/test/java/com/att/nsa/cambria/metabroker/TopicImplem.java b/src/test/java/com/att/nsa/cambria/metabroker/TopicImplem.java deleted file mode 100644 index 9539073..0000000 --- a/src/test/java/com/att/nsa/cambria/metabroker/TopicImplem.java +++ /dev/null @@ -1,140 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metabroker; - -import java.util.Set; - -import com.att.dmf.mr.metabroker.Topic; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.NsaAcl; -import com.att.nsa.security.NsaApiKey; - -public class TopicImplem implements Topic { - private String name, owner, description; - boolean isTransactionEnabled; - private Set set = null; - private NsaAcl readerAcl, writerAcl; - - public TopicImplem() { - name = getName(); - owner = getOwner(); - description = getDescription(); - isTransactionEnabled = true; - readerAcl = getReaderAcl(); - writerAcl = getWriterAcl(); - } - - public TopicImplem(String topic, String description, String ownerApiKey, boolean transactionEnabled) { - - this.name = topic; - this.owner = ownerApiKey; - this.description = description; - isTransactionEnabled = transactionEnabled; - - - } - @Override - public Set getOwners() { - // TODO Auto-generated method stub - for (int i = 0; i < 5; i++) { - set.add("string" + (i + 1)); - } - return set; - } - - @Override - public String getName() { - // TODO Auto-generated method stub - return "testTopic"; - } - - @Override - public String getOwner() { - // TODO Auto-generated method stub - return "owner"; - } - - @Override - public String getDescription() { - // TODO Auto-generated method stub - return "topic for testing purposes"; - } - - @Override - public boolean isTransactionEnabled() { - // TODO Auto-generated method stub - return true; - } - - @Override - public NsaAcl getReaderAcl() { - // TODO Auto-generated method stub - return new NsaAcl(); - } - - @Override - public NsaAcl getWriterAcl() { - // TODO Auto-generated method stub - return new NsaAcl(); - } - - @Override - public void checkUserRead(NsaApiKey user) throws AccessDeniedException { - // TODO Auto-generated method stub - NsaApiKey u = user; - } - - @Override - public void checkUserWrite(NsaApiKey user) throws AccessDeniedException { - // TODO Auto-generated method stub - - NsaApiKey u = user; - } - - @Override - public void permitWritesFromUser(String publisherId, NsaApiKey asUser) - throws AccessDeniedException, ConfigDbException { - // TODO Auto-generated method stub - String id = publisherId; - - } - - @Override - public void denyWritesFromUser(String publisherId, NsaApiKey asUser) - throws AccessDeniedException, ConfigDbException { - // TODO Auto-generated method stub - String id = publisherId; - - } - - @Override - public void permitReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException, ConfigDbException { - // TODO Auto-generated method stub - String id = consumerId; - } - - @Override - public void denyReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException, ConfigDbException { - // TODO Auto-generated method stub - String id = consumerId; - } - -} diff --git a/src/test/java/com/att/nsa/cambria/metabroker/TopicImplemTest.java b/src/test/java/com/att/nsa/cambria/metabroker/TopicImplemTest.java deleted file mode 100644 index c3ef097..0000000 --- a/src/test/java/com/att/nsa/cambria/metabroker/TopicImplemTest.java +++ /dev/null @@ -1,176 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metabroker; - -import static org.junit.Assert.*; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -public class TopicImplemTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - - @Test - public void testGetOwners() { - - assertNotNull(new TopicImplem().getOwner()); - - } - - @Test - public void testGetName() { - - assertNotNull(new TopicImplem().getName()); - - } - - @Test - public void testGetOwner() { - - assertNotNull(new TopicImplem().getOwner()); - - } - - @Test - public void testGetDescription() { - - assertNotNull(new TopicImplem().getDescription()); - - } - - @Test - public void testIsTransactionEnabled() { - - assertTrue(new TopicImplem().isTransactionEnabled()); - - } - - @Test - public void testGetReaderAcl() { - new TopicImplem().getReaderAcl(); - assertTrue(true); - - } - - @Test - public void testGetWriterAcl() { - new TopicImplem().getReaderAcl(); - assertTrue(true); - - } - - - @Test - public void testCheckUserRead() { - try { - new TopicImplem().checkUserRead(null); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - - } - - @Test - public void testCheckUserWrite() { - try { - new TopicImplem().checkUserWrite(null); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - - } - - @Test - public void testPermitWritesFromUser() { - try { - new TopicImplem().permitWritesFromUser("publisherId", null); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - - } - - @Test - public void testDenyWritesFromUser() { - try { - new TopicImplem().denyWritesFromUser("publisherId", null); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - - } - - @Test - public void testPermitReadsByUser() { - try { - new TopicImplem().permitReadsByUser("consumerId", null); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - - } - - @Test - public void testDenyReadsByUser() { - try { - new TopicImplem().denyReadsByUser("consumerId", null); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - - } -} diff --git a/src/test/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtilityTest.java b/src/test/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtilityTest.java deleted file mode 100644 index 956d27b..0000000 --- a/src/test/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtilityTest.java +++ /dev/null @@ -1,95 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metrics.publisher; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility; - -public class CambriaPublisherUtilityTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testEscape() { - - CambriaPublisherUtility utility = new CambriaPublisherUtility(); - - utility.escape("testTopic"); - assertTrue(true); - - } - - @Test - public void testMakeUrl() { - - CambriaPublisherUtility utility = new CambriaPublisherUtility(); - - utility.makeUrl("testTopic"); - assertTrue(true); - - } - - @Test - public void testMakeConsumerUrl() { - - CambriaPublisherUtility utility = new CambriaPublisherUtility(); - - utility.makeConsumerUrl("testTopic", "CG1", "23"); - assertTrue(true); - - } - - @Test - public void testCreateHostsList() { - - CambriaPublisherUtility utility = new CambriaPublisherUtility(); - - try { - utility.createHostsList(null); - } catch (NullPointerException e) { - assertTrue(true); - } - - - } - - @Test - public void testHostForString() { - - CambriaPublisherUtility utility = new CambriaPublisherUtility(); - - utility.hostForString("hello"); - assertTrue(true); - - } -} diff --git a/src/test/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java b/src/test/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java deleted file mode 100644 index 5916a18..0000000 --- a/src/test/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java +++ /dev/null @@ -1,171 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metrics.publisher; - -import static org.junit.Assert.*; - -import java.util.ArrayList; -import java.util.Collection; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.listener.DME2EndPointLoader; -import com.att.dmf.mr.metrics.publisher.CambriaConsumer; -import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility; -import com.att.dmf.mr.metrics.publisher.DMaaPCambriaClientFactory; - -public class DMaaPCambriaClientFactoryTest { - - private Collection hostSet; - - private String[] hostSetArray; - @Before - public void setUp() throws Exception { - hostSet = new ArrayList(); - - hostSetArray = new String[10]; - - for (int i = 0; i < 10; i++) { - hostSet.add("host" + (i+1)); - hostSetArray[i] = "host" + (i+1); - } - - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testCreateConsumer() { - - - - DMaaPCambriaClientFactory.createConsumer("hostList", "testTopic"); - assertTrue(true); - - } - - @Test - public void testCreateConsumer2() { - - - try { - DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic"); - } catch (NullPointerException e) { - assertTrue(true); - } - - - } - - @Test - public void testCreateConsumer3() { - - DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "filter"); - assertTrue(true); - - } - - @Test - public void testCreateConsumer4() { - DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "CG1", "23"); - assertTrue(true); - - } - - @Test - public void testCreateConsumer5() { - - DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "CG1", "23", 100, 20); - assertTrue(true); - - } - - @Test - public void testCreateConsumer6() { - - - DMaaPCambriaClientFactory.createConsumer("hostList", "testTopic", "CG1", "23", 100, 20, "filter", "apikey", "apisecret"); - assertTrue(true); - - } - - @Test - public void testCreateConsumer7() { - - DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "CG1", "23", 100, 20, "filter", "apikey", "apisecret"); - assertTrue(true); - - } - - @Test - public void testCreateSimplePublisher() { - - DMaaPCambriaClientFactory.createSimplePublisher("hostList", "testTopic"); - assertTrue(true); - - } - - @Test - public void testCreateBatchingPublisher() { - - DMaaPCambriaClientFactory.createBatchingPublisher("hostList", "testTopic", 100, 50); - assertTrue(true); - - } - - @Test - public void testCreateBatchingPublisher2() { - - DMaaPCambriaClientFactory.createBatchingPublisher("hostList", "testTopic", 100, 50, true); - assertTrue(true); - - } - - @Test - public void testCreateBatchingPublisher3() { - - DMaaPCambriaClientFactory.createBatchingPublisher(hostSetArray, "testTopic", 100, 50, true); - assertTrue(true); - - } - - @Test - public void testCreateBatchingPublisher4() { - - DMaaPCambriaClientFactory.createBatchingPublisher(hostSet, "testTopic", 100, 50, true); - assertTrue(true); - - } - - @Test - public void $testInject() { - - DMaaPCambriaClientFactory factory = new DMaaPCambriaClientFactory(); - factory.$testInject(null); - assertTrue(true); - - } - -} diff --git a/src/test/java/com/att/nsa/cambria/metrics/publisher/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/metrics/publisher/JUnitTestSuite.java deleted file mode 100644 index 9b8e363..0000000 --- a/src/test/java/com/att/nsa/cambria/metrics/publisher/JUnitTestSuite.java +++ /dev/null @@ -1,42 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metrics.publisher; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ DMaaPCambriaClientFactoryTest.class, CambriaPublisherUtilityTest.class}) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/metrics/publisher/TestRunner.java b/src/test/java/com/att/nsa/cambria/metrics/publisher/TestRunner.java deleted file mode 100644 index 3b628d9..0000000 --- a/src/test/java/com/att/nsa/cambria/metrics/publisher/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metrics.publisher; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClientTest.java b/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClientTest.java deleted file mode 100644 index 3242e83..0000000 --- a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClientTest.java +++ /dev/null @@ -1,97 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metrics.publisher.impl; - -import static org.junit.Assert.*; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; - -import com.att.dmf.mr.metrics.publisher.impl.CambriaBaseClient; -import com.att.eelf.configuration.EELFLogger; - -import org.json.JSONArray; -import org.json.JSONObject; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class CambriaBaseClientTest { - - private CambriaBaseClient client = null; - @Before - public void setUp() throws Exception { - - Collection hosts = new ArrayList(); - - for (int i = 0; i < 5; i++) { - hosts.add("host"+(i+1)); - } - - - client = new CambriaBaseClient(hosts, "client1"); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testClose() { - client.close(); - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testGetLog() { - client.getLog(); - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testLogTo() { - client.logTo(null); - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - public JSONArray getJSONArray() { - - String[] data = {"stringone", "stringtwo"}; - JSONArray array = new JSONArray(Arrays.asList(data)); - - return array; - } - - @Test - public void testJsonArrayToSet() { - client.jsonArrayToSet(getJSONArray()); - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } -} diff --git a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/ClockTest.java b/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/ClockTest.java deleted file mode 100644 index e484722..0000000 --- a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/ClockTest.java +++ /dev/null @@ -1,84 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metrics.publisher.impl; - -import static org.junit.Assert.*; - - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.metrics.publisher.impl.Clock; - -public class ClockTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetIt() { - - Clock.getIt(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testNow() { - - Clock.now(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testNowImpl() { - - Clock clock = new Clock(); - clock.nowImpl(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testRegister() { - - Clock clock = new Clock(); - Clock.register(clock); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - -} diff --git a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java b/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java deleted file mode 100644 index fe034a5..0000000 --- a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java +++ /dev/null @@ -1,94 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - - -package com.att.nsa.cambria.metrics.publisher.impl; - -import static org.junit.Assert.*; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaConsumerImpl; - -public class DMaaPCambriaConsumerImplTest { - - private DMaaPCambriaConsumerImpl consumer = null; - @Before - public void setUp() throws Exception { - - Collection hosts = new ArrayList(); - - for (int i = 0; i < 5; i++) { - hosts.add("host"+(i+1)); - } - consumer = new DMaaPCambriaConsumerImpl(hosts, "testTopic", "consumerGroup1", "1", 2000, 200, "hi", - "9AMFFNIZpusO54oG","6BY86UQcio2LJdgyU7Cwg5oQ"); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testStringToList() { - - List response = DMaaPCambriaConsumerImpl.stringToList("Hello world, this is a test string"); - assertNotNull(response); - - - } - - @Test - public void testFetch() { - - Iterable response = null; - boolean flag = true; - try { - response = consumer.fetch(200, 20); - } catch (IOException e) { - flag = false; - // TODO Auto-generated catch block - e.printStackTrace(); - } - if(flag) { - assertNotNull(response); - } else { - assertTrue(true); - } - - } - - - @Test - public void testCreateUrlPath() { - - String response = consumer.createUrlPath(200, 20); - assertNotNull(response); - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java b/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java deleted file mode 100644 index 0c2c4ba..0000000 --- a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metrics.publisher.impl; - -import static org.junit.Assert.*; - -import java.util.ArrayList; -import java.util.Collection; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher; - - - -public class DMaaPCambriaSimplerBatchPublisherTest { - - private DMaaPCambriaSimplerBatchPublisher publisher = null; - @Before - public void setUp() throws Exception { - - Collection hosts = new ArrayList(); - - for (int i = 0; i < 5; i++) { - hosts.add("host"+(i+1)); - } - - publisher = new DMaaPCambriaSimplerBatchPublisher.Builder().againstUrls(hosts).onTopic("testTopic") - .batchTo(200, 100).compress(true).build(); - - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSend() { - - publisher.send("hello", "test message"); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testClose() { - - publisher.close(); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - @Test - public void testGetPendingMEssageCount() { - - publisher.getPendingMessageCount(); - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/JUnitTestSuite.java deleted file mode 100644 index b45e8e2..0000000 --- a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/JUnitTestSuite.java +++ /dev/null @@ -1,43 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metrics.publisher.impl; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ DMaaPCambriaSimplerBatchPublisherTest.class, ClockTest.class, - CambriaBaseClientTest.class, DMaaPCambriaConsumerImplTest.class}) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/TestRunner.java b/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/TestRunner.java deleted file mode 100644 index a354678..0000000 --- a/src/test/java/com/att/nsa/cambria/metrics/publisher/impl/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.metrics.publisher.impl; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/resources/CambriaEventSetTest.java b/src/test/java/com/att/nsa/cambria/resources/CambriaEventSetTest.java deleted file mode 100644 index b9fa1e5..0000000 --- a/src/test/java/com/att/nsa/cambria/resources/CambriaEventSetTest.java +++ /dev/null @@ -1,76 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.resources; - -import static org.junit.Assert.*; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.resources.CambriaEventSet; -import com.att.dmf.mr.resources.CambriaOutboundEventStream; - - -public class CambriaEventSetTest { - - private CambriaOutboundEventStream coes = null; - - @Before - public void setUp() throws Exception { - - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testNext() { - CambriaEventSet event = null; - String str = "contains text to be converted to InputStream"; - - InputStream stream = new ByteArrayInputStream(str.getBytes()); - try { - event = new CambriaEventSet("application/cambria", stream, true, "hi"); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - try { - event.next(); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - assertTrue(true); - - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/resources/CambriaOutboundEventStreamTest.java b/src/test/java/com/att/nsa/cambria/resources/CambriaOutboundEventStreamTest.java deleted file mode 100644 index 8310bba..0000000 --- a/src/test/java/com/att/nsa/cambria/resources/CambriaOutboundEventStreamTest.java +++ /dev/null @@ -1,106 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.resources; - -import static org.junit.Assert.*; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.resources.CambriaOutboundEventStream; - -public class CambriaOutboundEventStreamTest { - - private CambriaOutboundEventStream coes = null; - - @Before - public void setUp() throws Exception { - coes = new CambriaOutboundEventStream.Builder(null).timeout(10).limit(1).filter(CambriaConstants.kNoFilter) - .pretty(false).withMeta(true).build(); - DMaaPContext ctx = new DMaaPContext(); - //ctx.set... - coes.setDmaapContext(ctx); - coes.setTopic(null); - coes.setTransEnabled(true); - coes.setTopicStyle(true); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetSentCount() { - int sentCount = coes.getSentCount(); - assertTrue("Doesn't match, got " + sentCount, sentCount==0);; - } - - @Test - public void testWrite() { - //fail("Not yet implemented"); - } - - @Test - public void testForEachMessage() { - //fail("Not yet implemented"); - } - - @Test - public void testGetDmaapContext() { - DMaaPContext ctx = coes.getDmaapContext(); - - assertNotNull(ctx); - } - - @Test - public void testSetDmaapContext() { - DMaaPContext ctx = new DMaaPContext(); - coes.setDmaapContext(ctx); - assertTrue(ctx.equals(coes.getDmaapContext())); - } - - @Test - public void testGetTopic() { - coes.getTopic(); - assertTrue(true); - } - - @Test - public void testSetTopic() { - //fail("Not yet implemented"); - } - - @Test - public void testSetTopicStyle() { - coes.setTopicStyle(true); - assertTrue(true); - } - - @Test - public void testSetTransEnabled() { - coes.setTransEnabled(true); - assertTrue(true); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/resources/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/resources/JUnitTestSuite.java deleted file mode 100644 index a68bbf1..0000000 --- a/src/test/java/com/att/nsa/cambria/resources/JUnitTestSuite.java +++ /dev/null @@ -1,42 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.resources; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ CambriaEventSetTest.class,CambriaOutboundEventStreamTest.class, }) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/resources/TestRunner.java b/src/test/java/com/att/nsa/cambria/resources/TestRunner.java deleted file mode 100644 index ecfa2ef..0000000 --- a/src/test/java/com/att/nsa/cambria/resources/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.resources; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java b/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java deleted file mode 100644 index a308491..0000000 --- a/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java +++ /dev/null @@ -1,72 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.resources.streamReaders; - -import static org.junit.Assert.*; - -import java.io.IOException; -import java.io.InputStream; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.resources.streamReaders.CambriaJsonStreamReader; - -import org.apache.commons.io.IOUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.Publisher.message; - -public class CambriaJsonStreamReaderTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - - @Test - public void testNext() { - - CambriaJsonStreamReader test = null; - - String source = "{'name': 'tester', 'id': '2'}"; - InputStream stream = null; - try { - stream = IOUtils.toInputStream(source, "UTF-8"); - test = new CambriaJsonStreamReader(stream,"hello"); - test.next(); - } catch (IOException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (CambriaApiException e1) { - e1.printStackTrace(); - } - - assertTrue(true); - - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java b/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java deleted file mode 100644 index a5a3aac..0000000 --- a/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java +++ /dev/null @@ -1,72 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.resources.streamReaders; - -import static org.junit.Assert.*; - -import java.io.IOException; -import java.io.InputStream; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.resources.streamReaders.CambriaRawStreamReader; - -import org.apache.commons.io.IOUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.CambriaApiException; - -public class CambriaRawStreamReaderTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testNext() { - - CambriaRawStreamReader test = null; - message msg = null; - - String source = "{'name': 'tester', 'id': '2'}"; - InputStream stream = null; - try { - stream = IOUtils.toInputStream(source, "UTF-8"); - test = new CambriaRawStreamReader(stream,"hello"); - msg = test.next(); - } catch (IOException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (CambriaApiException e1) { - e1.printStackTrace(); - } - - assertNotNull(msg); - - - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReaderTest.java b/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReaderTest.java deleted file mode 100644 index d992183..0000000 --- a/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReaderTest.java +++ /dev/null @@ -1,72 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.resources.streamReaders; - -import static org.junit.Assert.*; - -import java.io.IOException; -import java.io.InputStream; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.resources.streamReaders.CambriaStreamReader; - -import org.apache.commons.io.IOUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.CambriaApiException; - -public class CambriaStreamReaderTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testNext() { - - CambriaStreamReader test = null; - - - String source = "{'name': 'tester', 'id': '2', 'message': 'hello'}"; - InputStream stream = null; - try { - stream = IOUtils.toInputStream(source, "UTF-8"); - test = new CambriaStreamReader(stream); - test.next(); - } catch (IOException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (CambriaApiException e1) { - e1.printStackTrace(); - } - - assertTrue(true); - - - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java b/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java deleted file mode 100644 index e7b0c83..0000000 --- a/src/test/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java +++ /dev/null @@ -1,71 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.resources.streamReaders; - -import static org.junit.Assert.*; - -import java.io.IOException; -import java.io.InputStream; -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.resources.streamReaders.CambriaStreamReader; - -import org.apache.commons.io.IOUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.CambriaApiException; - -public class CambriaTextStreamReaderTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testNext() { - - CambriaStreamReader test = null; - - String source = "{'name': 'tester', 'id': '2', 'message': 'hello'}"; - InputStream stream = null; - try { - stream = IOUtils.toInputStream(source, "UTF-8"); - test = new CambriaStreamReader(stream); - test.next(); - } catch (IOException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (CambriaApiException e1) { - e1.printStackTrace(); - } - - assertTrue(true); - - - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/resources/streamReaders/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/resources/streamReaders/JUnitTestSuite.java deleted file mode 100644 index d80abf9..0000000 --- a/src/test/java/com/att/nsa/cambria/resources/streamReaders/JUnitTestSuite.java +++ /dev/null @@ -1,43 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.resources.streamReaders; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ CambriaJsonStreamReaderTest.class,CambriaRawStreamReaderTest.class, - CambriaStreamReaderTest.class, CambriaTextStreamReaderTest.class, }) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/resources/streamReaders/TestRunner.java b/src/test/java/com/att/nsa/cambria/resources/streamReaders/TestRunner.java deleted file mode 100644 index 0f697cc..0000000 --- a/src/test/java/com/att/nsa/cambria/resources/streamReaders/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.resources.streamReaders; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImplTest.java b/src/test/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImplTest.java deleted file mode 100644 index 82a9c2b..0000000 --- a/src/test/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImplTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.security; - -import static org.junit.Assert.*; - -import javax.servlet.http.HttpServletRequest; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.mock.web.MockHttpServletRequest; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.security.DMaaPAAFAuthenticatorImpl; - - - -public class DMaaPAAFAuthenticatorImplTest { - - private MockHttpServletRequest request = null; - @Before - public void setUp() throws Exception { - //creating servlet object - request = new MockHttpServletRequest(); - request.setServerName("www.example.com"); - request.setRequestURI("/foo"); - request.setQueryString("param1=value1¶m"); - String url = request.getRequestURL() + "?" + request.getQueryString(); - - - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testAafAuthentication() { - - DMaaPAAFAuthenticatorImpl authenticator = new DMaaPAAFAuthenticatorImpl(); - authenticator.aafAuthentication(request, "admin"); - assertTrue(true); - - } - - - - /*@Test - public void testAafPermissionString() { - - DMaaPAAFAuthenticatorImpl authenticator = new DMaaPAAFAuthenticatorImpl(); - try { - authenticator.aafPermissionString("testTopic", "admin"); - } catch (CambriaApiException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - assertTrue(true); - - }*/ - - -} diff --git a/src/test/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImplTest.java b/src/test/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImplTest.java deleted file mode 100644 index 2f91db2..0000000 --- a/src/test/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImplTest.java +++ /dev/null @@ -1,129 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.security; - -import static org.junit.Assert.*; - -import javax.servlet.http.HttpServletRequest; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.mock.web.MockHttpServletRequest; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.security.DMaaPAuthenticator; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; - - - -public class DMaaPAuthenticatorImplTest { - - private MockHttpServletRequest request = null; - @Before - public void setUp() throws Exception { - //creating servlet object - request = new MockHttpServletRequest(); - request.setServerName("www.example.com"); - request.setRequestURI("/foo"); - request.setQueryString("param1=value1¶m"); - String url = request.getRequestURL() + "?" + request.getQueryString(); - - - } - - @After - public void tearDown() throws Exception { - } - - /*@Test - public void testAuthenticate() { - - DMaaPAuthenticatorImpl authenticator = new DMaaPAuthenticatorImpl(null); - - DMaaPContext ctx = new DMaaPContext(); - authenticator.authenticate(ctx); - assertTrue(true); - - }*/ - - - - /*@Test - public void testAafPermissionString() { - - DMaaPAAFAuthenticatorImpl authenticator = new DMaaPAAFAuthenticatorImpl(); - try { - authenticator.aafPermissionString("testTopic", "admin"); - } catch (CambriaApiException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - assertTrue(true); - - }*/ - - @Test - public void testIsAuthentic() { - - DMaaPAuthenticatorImpl authenticator = new DMaaPAuthenticatorImpl(null); - - authenticator.isAuthentic(request); - assertTrue(true); - - } - - @Test - public void testQualify() { - - DMaaPAuthenticatorImpl authenticator = new DMaaPAuthenticatorImpl(null); - - authenticator.qualify(request); - assertTrue(true); - - } - - @Test - public void testAddAuthenticator() { - - DMaaPAuthenticatorImpl authenticator = new DMaaPAuthenticatorImpl(null); - DMaaPAuthenticator authenticator2 = new DMaaPAuthenticatorImpl(null); - - authenticator.addAuthenticator(authenticator2); - assertTrue(true); - - } - - /*@Test - public void testGetAuthenticatedUser() { - - - DMaaPContext ctx = new DMaaPContext(); - DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); - assertTrue(true); - - } - */ - - -} diff --git a/src/test/java/com/att/nsa/cambria/security/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/security/JUnitTestSuite.java deleted file mode 100644 index 911fba3..0000000 --- a/src/test/java/com/att/nsa/cambria/security/JUnitTestSuite.java +++ /dev/null @@ -1,43 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.security; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ DMaaPAAFAuthenticatorImplTest.class,DMaaPAuthenticatorImplTest.class, -}) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/security/TestRunner.java b/src/test/java/com/att/nsa/cambria/security/TestRunner.java deleted file mode 100644 index 3ab0ea7..0000000 --- a/src/test/java/com/att/nsa/cambria/security/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.security; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java b/src/test/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java deleted file mode 100644 index 89ba6c1..0000000 --- a/src/test/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java +++ /dev/null @@ -1,107 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.security.impl; - -import static org.junit.Assert.*; - -import javax.servlet.http.HttpServletRequest; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.mock.web.MockHttpServletRequest; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.security.DMaaPAuthenticator; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.security.impl.DMaaPMechIdAuthenticator; - - - -public class DMaaPMechIdAuthenticatorTest { - - private MockHttpServletRequest request = null; - @Before - public void setUp() throws Exception { - //creating servlet object - request = new MockHttpServletRequest(); - request.setServerName("www.example.com"); - request.setRequestURI("/foo"); - request.setQueryString("param1=value1¶m"); - String url = request.getRequestURL() + "?" + request.getQueryString(); - - - } - - @After - public void tearDown() throws Exception { - } - - - @Test - public void testQualify() { - - DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator(); - - authenticator.qualify(request); - assertTrue(true); - - } - - @Test - public void testAuthenticate() { - - DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator(); - - DMaaPContext ctx = new DMaaPContext(); - authenticator.authenticate(ctx); - assertTrue(true); - - } - - @Test - public void testIsAuthentic() { - DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator(); - - authenticator.isAuthentic(request); - assertTrue(true); - - } - - - - @Test - public void testAddAuthenticator() { - - DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator(); - DMaaPMechIdAuthenticator authenticator2 = new DMaaPMechIdAuthenticator(); - - authenticator.addAuthenticator(authenticator2); - assertTrue(true); - - } - - - - - -} diff --git a/src/test/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java b/src/test/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java deleted file mode 100644 index d437bf3..0000000 --- a/src/test/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java +++ /dev/null @@ -1,117 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.security.impl; - -import static org.junit.Assert.*; - -import javax.servlet.http.HttpServletRequest; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.mock.web.MockHttpServletRequest; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.security.DMaaPAuthenticator; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator; - - - -public class DMaaPOriginalUebAuthenticatorTest { - - private MockHttpServletRequest request = null; - @Before - public void setUp() throws Exception { - //creating servlet object - request = new MockHttpServletRequest(); - request.setServerName("www.example.com"); - request.setRequestURI("/foo"); - request.setQueryString("param1=value1¶m"); - String url = request.getRequestURL() + "?" + request.getQueryString(); - - - } - - @After - public void tearDown() throws Exception { - } - - - @Test - public void testQualify() { - - DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100); - - authenticator.qualify(request); - assertTrue(true); - - } - - @Test - public void testAuthenticate() { - - DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100); - - DMaaPContext ctx = new DMaaPContext(); - authenticator.authenticate(ctx); - assertTrue(true); - - } - - @Test - public void testAuthenticate2() { - - DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100); - DMaaPContext ctx = new DMaaPContext(); - authenticator.authenticate("google.com", "xHMDwk25kwkkyi26JH","Dec 16, 2016", "Dec/16/2016","123"); - - - } - - @Test - public void testIsAuthentic() { - DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100); - - authenticator.isAuthentic(request); - assertTrue(true); - - } - - - - @Test - public void testAddAuthenticator() { - - DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100); - DMaaPOriginalUebAuthenticator authenticator2 = new DMaaPOriginalUebAuthenticator(null, 100); - - authenticator.addAuthenticator(authenticator2); - assertTrue(true); - - } - - - - - -} diff --git a/src/test/java/com/att/nsa/cambria/security/impl/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/security/impl/JUnitTestSuite.java deleted file mode 100644 index 0593ee5..0000000 --- a/src/test/java/com/att/nsa/cambria/security/impl/JUnitTestSuite.java +++ /dev/null @@ -1,43 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.security.impl; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ DMaaPMechIdAuthenticatorTest.class, DMaaPOriginalUebAuthenticatorTest.class, -}) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/security/impl/TestRunner.java b/src/test/java/com/att/nsa/cambria/security/impl/TestRunner.java deleted file mode 100644 index 4688aec..0000000 --- a/src/test/java/com/att/nsa/cambria/security/impl/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.security.impl; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/AdminServiceImplemTest.java b/src/test/java/com/att/nsa/cambria/service/impl/AdminServiceImplemTest.java deleted file mode 100644 index fd87cdb..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/AdminServiceImplemTest.java +++ /dev/null @@ -1,182 +0,0 @@ -/*- -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.service.impl; - -import static org.junit.Assert.*; - -import java.io.IOException; - -import com.att.dmf.mr.backends.ConsumerFactory; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.service.impl.AdminServiceImpl; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.limits.Blacklist; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class }) -public class AdminServiceImplemTest { - - @InjectMocks - AdminServiceImpl adminServiceImpl; - - @Mock - DMaaPContext dmaapContext; - @Mock - ConsumerFactory factory; - - @Mock - ConfigurationReader configReader; - @Mock - Blacklist Blacklist; - - @Before - public void setUp() throws Exception { - - MockitoAnnotations.initMocks(this); - PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class); - NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password"); - - PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader); - PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory); - PowerMockito.when(configReader.getfIpBlackList()).thenReturn(Blacklist); - - PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user); - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - } - - @After - public void tearDown() throws Exception { - } - - // ISSUES WITH AUTHENTICATION - @Test - public void testShowConsumerCache() { - - try { - adminServiceImpl.showConsumerCache(dmaapContext); - } catch (IOException | AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testDropConsumerCache() { - - try { - adminServiceImpl.dropConsumerCache(dmaapContext); - } catch (IOException | AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testGetBlacklist() { - - try { - adminServiceImpl.getBlacklist(dmaapContext); - } catch (IOException | AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testAddToBlacklist() { - - try { - adminServiceImpl.addToBlacklist(dmaapContext, "120.120.120.120"); - } catch (IOException | AccessDeniedException | ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testRemoveFromBlacklist() { - - try { - adminServiceImpl.removeFromBlacklist(dmaapContext, "120.120.120.120"); - } catch (IOException | AccessDeniedException | ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImplTest.java b/src/test/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImplTest.java deleted file mode 100644 index 89b25af..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImplTest.java +++ /dev/null @@ -1,331 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - - -package com.att.nsa.cambria.service.impl; - -import static org.junit.Assert.*; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import com.att.dmf.mr.backends.ConsumerFactory; -import com.att.dmf.mr.beans.ApiKeyBean; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.service.impl.ApiKeysServiceImpl; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.dmf.mr.utils.Emailer; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.limits.Blacklist; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; -import com.att.nsa.security.db.NsaApiDb; -import com.att.nsa.security.db.NsaApiDb.KeyExistsException; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class }) -public class ApiKeysServiceImplTest { - - @InjectMocks - ApiKeysServiceImpl service; - - @Mock - DMaaPContext dmaapContext; - @Mock - ConsumerFactory factory; - - @Mock - ConfigurationReader configReader; - @Mock - Blacklist Blacklist; - @Mock - Emailer emailer; - - @Before - public void setUp() throws Exception { - - MockitoAnnotations.initMocks(this); - PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class); - NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password"); - - PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader); - PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory); - PowerMockito.when(configReader.getfIpBlackList()).thenReturn(Blacklist); - - PowerMockito.when(configReader.getfApiKeyDb()).thenReturn(fApiKeyDb); - PowerMockito.when(configReader.getSystemEmailer()).thenReturn(emailer); - PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user); - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - - } - - @After - public void tearDown() throws Exception { - } - - - @Test - public void testGetAllApiKeys() { - - service = new ApiKeysServiceImpl(); - try { - service.getAllApiKeys(dmaapContext); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - - } - - @Test - public void testGetApiKey() { - - ApiKeysServiceImpl service = new ApiKeysServiceImpl(); - try { - service.getApiKey(dmaapContext, "testkey"); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - } - assertTrue(true); - - } - - @Test - public void testGetApiKey_error() { - - ApiKeysServiceImpl service = new ApiKeysServiceImpl(); - try { - service.getApiKey(dmaapContext, "k35Hdw6Sde"); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - } - - @Test - public void testCreateApiKey() { - - ApiKeysServiceImpl service = new ApiKeysServiceImpl(); - try { - service.createApiKey(dmaapContext, new ApiKeyBean("test@onap.com", "testing apikey bean")); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (KeyExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch(NoClassDefFoundError e) { - - } - assertTrue(true); - } - - @Test - public void testUpdateApiKey() { - - ApiKeysServiceImpl service = new ApiKeysServiceImpl(); - try { - - service.updateApiKey(dmaapContext, "admin", new ApiKeyBean("test@onapt.com", "testing apikey bean")); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - - } - @Test - public void testUpdateApiKey_error() { - - ApiKeysServiceImpl service = new ApiKeysServiceImpl(); - try { - - service.updateApiKey(dmaapContext, null, new ApiKeyBean("test@onapt.com", "testing apikey bean")); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - assertTrue(true); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Test - public void testDeleteApiKey() { - - ApiKeysServiceImpl service = new ApiKeysServiceImpl(); - try { - - service.deleteApiKey(dmaapContext, null); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Test - public void testDeleteApiKey_error() { - - ApiKeysServiceImpl service = new ApiKeysServiceImpl(); - try { - - service.deleteApiKey(dmaapContext, "admin"); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - NsaApiDb fApiKeyDb= new NsaApiDb() { - - - Set keys = new HashSet<>(Arrays.asList("testkey","admin")); - - - @Override - public NsaSimpleApiKey createApiKey(String arg0, String arg1) - throws com.att.nsa.security.db.NsaApiDb.KeyExistsException, ConfigDbException { - // TODO Auto-generated method stub - return new NsaSimpleApiKey(arg0, arg1); - } - - @Override - public boolean deleteApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { - // TODO Auto-generated method stub - return false; - } - - @Override - public boolean deleteApiKey(String arg0) throws ConfigDbException { - // TODO Auto-generated method stub - return false; - } - - @Override - public Map loadAllKeyRecords() throws ConfigDbException { - // TODO Auto-generated method stub - return null; - } - - @Override - public Set loadAllKeys() throws ConfigDbException { - // TODO Auto-generated method stub - - return keys ; - } - - @Override - public NsaSimpleApiKey loadApiKey(String arg0) throws ConfigDbException { - if(!keys.contains(arg0)){ - return null; - } - return new NsaSimpleApiKey(arg0, "password"); - } - - @Override - public void saveApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { - // TODO Auto-generated method stub - - } - }; -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImplTest.java b/src/test/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImplTest.java deleted file mode 100644 index baef7be..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImplTest.java +++ /dev/null @@ -1,158 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.service.impl; - -import static org.junit.Assert.*; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.dmf.mr.service.impl.BaseTransactionDbImpl; -import com.att.dmf.mr.transaction.DMaaPTransactionObjDB.KeyExistsException; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class BaseTransactionDbImplTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testCreateTransactionObj() { - - - try { - - BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null); - service.createTransactionObj("transition"); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (KeyExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testSaveTransactionObj() { - - - try { - - BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null); - service.saveTransactionObj(null); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testLoadTransactionObj() { - - try { - - BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null); - service.loadTransactionObj("34"); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - @Test - public void testLoadAllTransactionObjs() { - - try { - - BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null); - service.loadAllTransactionObjs(); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - - -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/EventsServiceImplTest.java b/src/test/java/com/att/nsa/cambria/service/impl/EventsServiceImplTest.java deleted file mode 100644 index 8cd560d..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/EventsServiceImplTest.java +++ /dev/null @@ -1,312 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.service.impl; - -import static org.mockito.Mockito.when; -import static org.mockito.Matchers.anyString; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.ConcurrentModificationException; -import java.util.Map; -import java.util.Properties; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.mock.web.MockHttpServletRequest; -import org.springframework.mock.web.MockHttpServletResponse; - -import com.att.ajsc.beans.PropertiesMapBean; -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.security.DMaaPAAFAuthenticator; -import com.att.dmf.mr.security.DMaaPAuthenticator; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException; -import com.att.dmf.mr.beans.DMaaPCambriaLimiter; -import com.att.dmf.mr.backends.ConsumerFactory; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.exception.DMaaPAccessDeniedException; -import com.att.dmf.mr.exception.DMaaPErrorMessages; -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.dmf.mr.service.impl.EventsServiceImpl; -import com.att.dmf.mr.utils.PropertyReader; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue; -import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; -import com.att.nsa.limits.Blacklist; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; - -import kafka.admin.AdminUtils; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({ DMaaPAuthenticatorImpl.class, AJSCPropertiesMap.class }) -public class EventsServiceImplTest { - - private InputStream iStream = null; - DMaaPContext dMaapContext = new DMaaPContext(); - EventsServiceImpl service = new EventsServiceImpl(); - DMaaPErrorMessages pErrorMessages = new DMaaPErrorMessages(); - @Mock - ConfigurationReader configurationReader; - @Mock - Blacklist blacklist; - @Mock - DMaaPAuthenticator dmaaPAuthenticator; - @Mock - DMaaPAAFAuthenticator dmaapAAFauthenticator; - @Mock - NsaApiKey user; - @Mock - NsaSimpleApiKey nsaSimpleApiKey; - @Mock - DMaaPKafkaMetaBroker dmaapKafkaMetaBroker; - @Mock - Topic createdTopic; - @Mock - ConsumerFactory factory; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - String source = "source of my InputStream"; - iStream = new ByteArrayInputStream(source.getBytes("UTF-8")); - - MockHttpServletRequest request = new MockHttpServletRequest(); - MockHttpServletResponse response = new MockHttpServletResponse(); - dMaapContext.setRequest(request); - dMaapContext.setResponse(response); - when(blacklist.contains(anyString())).thenReturn(false); - when(configurationReader.getfIpBlackList()).thenReturn(blacklist); - dMaapContext.setConfigReader(configurationReader); - - service.setErrorMessages(pErrorMessages); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout")).thenReturn("100"); - - AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout"); - - } - - @Test(expected = CambriaApiException.class) - public void testGetEvents() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, - TopicExistsException, AccessDeniedException, UnavailableException, IOException { - when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); - when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); - PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); - service.getEvents(dMaapContext, "testTopic", "CG1", "23"); - } - - @Test(expected = CambriaApiException.class) - public void testGetEventsBlackListErr() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, - TopicExistsException, AccessDeniedException, UnavailableException, IOException { - when(blacklist.contains(anyString())).thenReturn(true); - when(configurationReader.getfIpBlackList()).thenReturn(blacklist); - dMaapContext.setConfigReader(configurationReader); - service.getEvents(dMaapContext, "testTopic", "CG1", "23"); - } - - @Test(expected = CambriaApiException.class) - public void testGetEventsNoTopicError() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, - TopicExistsException, AccessDeniedException, UnavailableException, IOException { - when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); - when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(null); - PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); - service.getEvents(dMaapContext, "testTopic", "CG1", "23"); - } - - @Test(expected = CambriaApiException.class) - public void testGetEventsuserNull() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, - TopicExistsException, AccessDeniedException, UnavailableException, IOException { - when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(null); - when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); - PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); - MockHttpServletRequest mockRequest = new MockHttpServletRequest(); - mockRequest.addHeader("Authorization", "passed"); - dMaapContext.setRequest(mockRequest); - dMaapContext.getRequest().getHeader("Authorization"); - service.getEvents(dMaapContext, "testTopic", "CG1", "23"); - } - - @Test(expected = CambriaApiException.class) - public void testGetEventsExcp2() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, - TopicExistsException, AccessDeniedException, UnavailableException, IOException { - when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); - when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); - PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); - when(configurationReader.getfRateLimiter()).thenThrow(new ConcurrentModificationException("Error occurred")); - service.getEvents(dMaapContext, "testTopic", "CG1", "23"); - } - - @Test(expected = CambriaApiException.class) - public void testPushEvents() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, - TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, - invalidSettingValue, loadException { - - // AdminUtils.createTopic(configurationReader.getZk(), "testTopic", 10, - // 1, new Properties()); - - configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader())); - - when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); - when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); - PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); - - service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00"); - - service.getEvents(dMaapContext, "testTopic", "CG1", "23"); - - /* - * String trueValue = "True"; - * assertTrue(trueValue.equalsIgnoreCase("True")); - */ - - } - - @Test(expected = CambriaApiException.class) - public void testPushEventsBlackListedIp() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, - TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, - invalidSettingValue, loadException { - - // AdminUtils.createTopic(configurationReader.getZk(), "testTopic", 10, - // 1, new Properties()); - when(blacklist.contains(anyString())).thenReturn(true); - when(configurationReader.getfIpBlackList()).thenReturn(blacklist); - configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader())); - when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); - when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); - PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); - - service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00"); - - } - - @Test(expected = NullPointerException.class) - public void testPushEventsNoUser() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, - TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, - invalidSettingValue, loadException { - - configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader())); - - when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(null); - when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); - PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); - MockHttpServletRequest mockRequest = new MockHttpServletRequest(); - mockRequest.addHeader("Authorization", "passed"); - mockRequest.addHeader("Authorization", "passed"); - dMaapContext.setRequest(mockRequest); - dMaapContext.getRequest().getHeader("Authorization"); - service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00"); - - } - - @Test(expected = CambriaApiException.class) - public void testPushEventsWtTransaction() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, - TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, - invalidSettingValue, loadException { - - configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader())); - - when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); - when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); - PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); - when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "transidUEBtopicreqd")).thenReturn("true"); - - service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00"); - - } - - @Test(expected = CambriaApiException.class) - public void testPushEventsWtTransactionError() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, - TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, - invalidSettingValue, loadException { - - configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader())); - - when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); - when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); - PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); - when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "transidUEBtopicreqd")).thenReturn("true"); - when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "event.batch.length")).thenReturn("0"); - when(configurationReader.getfPublisher()).thenThrow(new ConcurrentModificationException("Error occurred")); - - service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00"); - - } - - @Test - public void testIsTransEnabled1() { - - when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "transidUEBtopicreqd")).thenReturn("true"); - assertTrue(service.isTransEnabled()); - - } - @Test - public void testIsTransEnabled2() { - - when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, - "transidUEBtopicreqd")).thenReturn("false"); - assertFalse(service.isTransEnabled()); - - } - -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/service/impl/JUnitTestSuite.java deleted file mode 100644 index 8abafcc..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/JUnitTestSuite.java +++ /dev/null @@ -1,43 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.service.impl; - -import junit.framework.TestSuite; -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ UIServiceImplTest.class, AdminServiceImplemTest.class, ApiKeysServiceImplTest.class, - ShowConsumerCacheTest.class,TopicServiceImplTest.class, TransactionServiceImplTest.class, MMServiceImplTest.class, - BaseTransactionDbImplTest.class, MetricsServiceImplTest.class,EventsServiceImplTest.class}) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/MMServiceImplTest.java b/src/test/java/com/att/nsa/cambria/service/impl/MMServiceImplTest.java deleted file mode 100644 index f97952c..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/MMServiceImplTest.java +++ /dev/null @@ -1,382 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.service.impl; - -import static org.junit.Assert.*; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; -import java.util.ConcurrentModificationException; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import com.att.ajsc.beans.PropertiesMapBean; -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.backends.ConsumerFactory; -import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.exception.DMaaPErrorMessages; -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.service.impl.MMServiceImpl; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.dmf.mr.utils.Emailer; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; -import com.att.nsa.limits.Blacklist; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; -import com.att.nsa.security.db.NsaApiDb; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import static org.mockito.Matchers.anyString; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.springframework.mock.web.MockHttpServletRequest; -import org.springframework.mock.web.MockHttpServletResponse; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class, PropertiesMapBean.class, - AJSCPropertiesMap.class }) -public class MMServiceImplTest { - - @InjectMocks - MMServiceImpl service; - - @Mock - DMaaPContext dmaapContext; - @Mock - ConsumerFactory factory; - @Mock - private DMaaPErrorMessages errorMessages; - @Mock - ConfigurationReader configReader; - @Mock - Blacklist Blacklist; - @Mock - Emailer emailer; - @Mock - DMaaPKafkaMetaBroker dmaapKafkaMetaBroker; - @Mock - Topic metatopic; - - @Before - public void setUp() throws Exception { - - MockitoAnnotations.initMocks(this); - PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class); - NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password"); - - PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader); - PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory); - PowerMockito.when(configReader.getfIpBlackList()).thenReturn(Blacklist); - - PowerMockito.when(configReader.getfApiKeyDb()).thenReturn(fApiKeyDb); - PowerMockito.when(configReader.getSystemEmailer()).thenReturn(emailer); - PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user); - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - - MockHttpServletRequest request = new MockHttpServletRequest(); - MockHttpServletResponse response = new MockHttpServletResponse(); - PowerMockito.when(dmaapContext.getRequest()).thenReturn(request); - PowerMockito.when(dmaapContext.getResponse()).thenReturn(response); - - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(PropertiesMapBean.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout")).thenReturn("1000"); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty")).thenReturn("true"); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta")).thenReturn("true"); - PowerMockito.when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSubscribe_Blacklistip() { - - try { - PowerMockito.when(Blacklist.contains("127.0.0.1")).thenReturn(true); - service.subscribe(dmaapContext, "testTopic", "CG1", "23"); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (UnavailableException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Test - public void testSubscribe_NullTopic() { - - try { - PowerMockito.when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(null); - service.subscribe(dmaapContext, "testTopic", "CG1", "23"); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (UnavailableException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Test(expected = CambriaApiException.class) - public void testSubscribe_NullTopic_Error() throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException, - CambriaApiException, IOException { - - PowerMockito.when(configReader.getfMetrics()).thenThrow(new ConcurrentModificationException("Error occurred")); - PowerMockito.when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(metatopic); - service.subscribe(dmaapContext, "testTopic", "CG1", "23"); - } - - @Test - public void testPushEvents_wttransaction() { - - String source = "source of my InputStream"; - - try { - InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8")); - service.pushEvents(dmaapContext, "msgrtr.apinode.metrics.dmaap", iStream, "3", "12:00:00"); - - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (missingReqdSetting e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - - @Test(expected = CambriaApiException.class) - public void testPushEvents_wttransaction_error() throws Exception { - - String source = "source of my InputStream"; - - InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8")); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(PropertiesMapBean.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "event.batch.length")).thenReturn("-5"); - PowerMockito.when(configReader.getfPublisher()).thenThrow(new ConcurrentModificationException("Error occurred")); - service.pushEvents(dmaapContext, "msgrtr.apinode.metrics.dmaap1", iStream, "3", "12:00:00"); - - } - - @Test - public void testPushEvents() { - - String source = "source of my InputStream"; - - try { - InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8")); - service.pushEvents(dmaapContext, "testTopic", iStream, "3", "12:00:00"); - - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (missingReqdSetting e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - @Test - public void testPushEvents_blacklistip() { - - String source = "source of my InputStream"; - - try { - PowerMockito.when(Blacklist.contains("127.0.0.1")).thenReturn(true); - InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8")); - service.pushEvents(dmaapContext, "testTopic", iStream, "3", "12:00:00"); - - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } catch (CambriaApiException e) { - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (TopicExistsException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (AccessDeniedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (missingReqdSetting e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - - NsaApiDb fApiKeyDb = new NsaApiDb() { - - Set keys = new HashSet<>(Arrays.asList("testkey", "admin")); - - @Override - public NsaSimpleApiKey createApiKey(String arg0, String arg1) - throws com.att.nsa.security.db.NsaApiDb.KeyExistsException, ConfigDbException { - // TODO Auto-generated method stub - return new NsaSimpleApiKey(arg0, arg1); - } - - @Override - public boolean deleteApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { - // TODO Auto-generated method stub - return false; - } - - @Override - public boolean deleteApiKey(String arg0) throws ConfigDbException { - // TODO Auto-generated method stub - return false; - } - - @Override - public Map loadAllKeyRecords() throws ConfigDbException { - // TODO Auto-generated method stub - return null; - } - - @Override - public Set loadAllKeys() throws ConfigDbException { - // TODO Auto-generated method stub - - return keys; - } - - @Override - public NsaSimpleApiKey loadApiKey(String arg0) throws ConfigDbException { - if (!keys.contains(arg0)) { - return null; - } - return new NsaSimpleApiKey(arg0, "password"); - } - - @Override - public void saveApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { - // TODO Auto-generated method stub - - } - }; - -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/MessageTest.java b/src/test/java/com/att/nsa/cambria/service/impl/MessageTest.java deleted file mode 100644 index 1d5c9c0..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/MessageTest.java +++ /dev/null @@ -1,64 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.service.impl; - -import com.att.dmf.mr.backends.Publisher.message; -import com.att.dmf.mr.beans.LogDetails; - -public class MessageTest implements message { - - @Override - public String getKey() { - // TODO Auto-generated method stub - return "123"; - } - - @Override - public String getMessage() { - // TODO Auto-generated method stub - return "Msg"; - } - - @Override - public void setLogDetails(LogDetails logDetails) { - // TODO Auto-generated method stub - - } - - @Override - public LogDetails getLogDetails() { - // TODO Auto-generated method stub - return null; - } - - @Override - public boolean isTransactionEnabled() { - // TODO Auto-generated method stub - return false; - } - - @Override - public void setTransactionEnabled(boolean transactionEnabled) { - // TODO Auto-generated method stub - - } - -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/MetricsServiceImplTest.java b/src/test/java/com/att/nsa/cambria/service/impl/MetricsServiceImplTest.java deleted file mode 100644 index e372d38..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/MetricsServiceImplTest.java +++ /dev/null @@ -1,95 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - - -package com.att.nsa.cambria.service.impl; - -import static org.junit.Assert.*; - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.service.impl.MetricsServiceImpl; - -public class MetricsServiceImplTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGet() { - - MetricsServiceImpl service = new MetricsServiceImpl(); - try { - service.get(new DMaaPContext()); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - @Test - public void testGetMetricByName() { - - MetricsServiceImpl service = new MetricsServiceImpl(); - try { - service.getMetricByName(new DMaaPContext(), "uptime"); - } catch (org.json.JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - //e.printStackTrace(); - assertTrue(true); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (CambriaApiException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/ShowConsumerCacheTest.java b/src/test/java/com/att/nsa/cambria/service/impl/ShowConsumerCacheTest.java deleted file mode 100644 index 4dd3076..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/ShowConsumerCacheTest.java +++ /dev/null @@ -1,149 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.service.impl; -import static org.junit.Assert.assertEquals; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Matchers.anyInt; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.Iterator; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.mockito.runners.MockitoJUnitRunner; - -import com.att.aft.dme2.internal.jettison.json.JSONArray; -import com.att.aft.dme2.internal.jettison.json.JSONException; -import com.att.aft.dme2.internal.jettison.json.JSONObject; - -import com.att.ajsc.beans.PropertiesMapBean; -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker; -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.TopicBean; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.exception.DMaaPAccessDeniedException; -import com.att.dmf.mr.exception.DMaaPErrorMessages; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.security.DMaaPAuthenticator; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; - -import jline.internal.Log; -import kafka.consumer.Consumer; - - - -//@RunWith(MockitoJUnitRunner.class) -//@RunWith(PowerMockRunner.class) -//@PrepareForTest(PropertiesMapBean.class) -public class ShowConsumerCacheTest { -/* -@InjectMocks -TopicServiceImpl topicService; - -@Mock -private DMaaPErrorMessages errorMessages; - -@Mock -DMaaPContext dmaapContext; - -@Mock -ConfigurationReader configReader; - - -@Mock -JSONObject consumers; - -@Mock -JSONObject consumerObject; - -@Mock -JSONArray jsonConsumersList; - -@Mock -DMaaPAuthenticator dmaaPAuthenticator; - -@Mock -NsaApiKey user; - -@Mock -NsaSimpleApiKey nsaSimpleApiKey; - -@Mock -HttpServletRequest httpServReq; - - -@Before -public void setUp(){ -MockitoAnnotations.initMocks(this); -} - - -//@Test(expected = DMaaPAccessDeniedException.class) -@Test -public void testShowConsmerCache()throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException, JSONException{ -Assert.assertNotNull(topicService); - -String myName = "Brian"; -Object created = null; -Object accessed = null; -Object log = null; -Object info = null; - -when(consumerObject.put("name", myName)).thenReturn(consumerObject); -when(consumerObject.put("created", created)).thenReturn(consumerObject); -when(consumerObject.put("accessed", accessed)).thenReturn(consumerObject); -when(consumerObject.put("accessed", Consumer.class)).thenReturn(consumerObject); -when(jsonConsumersList.put(consumerObject)).thenReturn(null); - -when(consumers.put("consumers", jsonConsumersList)).thenReturn(consumerObject); - - - -}*/ - - -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/cambria/service/impl/TestRunner.java b/src/test/java/com/att/nsa/cambria/service/impl/TestRunner.java deleted file mode 100644 index cf240b7..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.service.impl; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/TopicServiceImplTest.java b/src/test/java/com/att/nsa/cambria/service/impl/TopicServiceImplTest.java deleted file mode 100644 index 47b351e..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/TopicServiceImplTest.java +++ /dev/null @@ -1,766 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.service.impl; - -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Matchers.anyInt; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; - -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; - -import com.att.ajsc.beans.PropertiesMapBean; -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.CambriaApiException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker; -import com.att.dmf.mr.beans.TopicBean; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.exception.DMaaPAccessDeniedException; -import com.att.dmf.mr.exception.DMaaPErrorMessages; -import com.att.dmf.mr.metabroker.Broker.TopicExistsException; -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.security.DMaaPAAFAuthenticator; -import com.att.dmf.mr.security.DMaaPAuthenticator; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.service.impl.TopicServiceImpl; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.NsaAcl; -import com.att.nsa.security.NsaApiKey; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; - -//@RunWith(MockitoJUnitRunner.class) -@RunWith(PowerMockRunner.class) -@PrepareForTest({ PropertiesMapBean.class, DMaaPAuthenticatorImpl.class,AJSCPropertiesMap.class,DMaaPResponseBuilder.class }) -public class TopicServiceImplTest { - - TopicServiceImpl topicService; - - @Mock - private DMaaPErrorMessages errorMessages; - - @Mock - DMaaPContext dmaapContext; - - @Mock - ConfigurationReader configReader; - - @Mock - ServletOutputStream oStream; - - @Mock - DMaaPAuthenticator dmaaPAuthenticator; - - @Mock - DMaaPAAFAuthenticator dmaapAAFauthenticator; - @Mock - NsaApiKey user; - - @Mock - NsaSimpleApiKey nsaSimpleApiKey; - - @Mock - HttpServletRequest httpServReq; - - @Mock - HttpServletResponse httpServRes; - - @Mock - DMaaPKafkaMetaBroker dmaapKafkaMetaBroker; - - @Mock - Topic createdTopic; - - @Mock - NsaAcl nsaAcl; - - @Mock - JSONObject jsonObj; - - @Mock - JSONArray jsonArray; - - @Before - public void setUp() { - MockitoAnnotations.initMocks(this); - topicService = new TopicServiceImpl(); - topicService.setErrorMessages(errorMessages); - NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password"); - PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class); - PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user); - } - - @Test(expected = NullPointerException.class) - public void testCreateTopicWithEnforcedName() - throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException { - - Assert.assertNotNull(topicService); - PowerMockito.mockStatic(PropertiesMapBean.class); - - when(PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, "enforced.topic.name.AAF")) - .thenReturn("enfTopicName"); - - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(nsaSimpleApiKey); - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - topicService.createTopic(dmaapContext, topicBean); - } - - @Test - public void testCreateTopicWithTopicNameNotEnforced() - throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,IOException,TopicExistsException, com.att.dmf.mr.metabroker.Broker1.TopicExistsException { - - Assert.assertNotNull(topicService); - - PowerMockito.mockStatic(PropertiesMapBean.class); - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - - when(PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, "enforced.topic.name.AAF")) - .thenReturn("enfTopicName"); - - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getMethod()).thenReturn("HEAD"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(nsaSimpleApiKey); - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - - when(nsaAcl.isActive()).thenReturn(true); - when(nsaAcl.getUsers()).thenReturn(new HashSet<>(Arrays.asList("user1,user2".split(",")))); - - when(createdTopic.getName()).thenReturn("topicName"); - when(createdTopic.getOwner()).thenReturn("Owner"); - when(createdTopic.getDescription()).thenReturn("Description"); - when(createdTopic.getReaderAcl()).thenReturn(nsaAcl); - when(createdTopic.getWriterAcl()).thenReturn(nsaAcl); - - when(dmaapKafkaMetaBroker.createTopic(anyString(), anyString(), anyString(), anyInt(), anyInt(), anyBoolean())) - .thenReturn(createdTopic); - - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("NotEnforcedTopicName"); - - topicService.createTopic(dmaapContext, topicBean); - - verify(dmaapKafkaMetaBroker, times(1)).createTopic(anyString(), anyString(), anyString(), anyInt(), anyInt(), - anyBoolean()); - } - - @Test(expected = NullPointerException.class) - public void testCreateTopicNoUserInContextAndNoAuthHeader() - throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException { - - Assert.assertNotNull(topicService); - - PowerMockito.mockStatic(PropertiesMapBean.class); - - when(PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, "enforced.topic.name.AAF")) - .thenReturn("enfTopicName"); - - when(httpServReq.getHeader("Authorization")).thenReturn(null); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - topicService.createTopic(dmaapContext, topicBean); - } - - @Test(expected = NullPointerException.class) - public void testCreateTopicNoUserInContextAndAuthHeaderAndPermitted() - throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException { - - Assert.assertNotNull(topicService); - - PowerMockito.mockStatic(PropertiesMapBean.class); - - when(PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, "enforced.topic.name.AAF")) - .thenReturn("enfTopicName"); - - when(httpServReq.getHeader("Authorization")).thenReturn("Authorization"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - - // when(dmaapAAFauthenticator.aafAuthentication(httpServReq, - // anyString())).thenReturn(false); - - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - topicService.createTopic(dmaapContext, topicBean); - } - - @Test(expected = TopicExistsException.class) - public void testGetTopics_null_topic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException { - - Assert.assertNotNull(topicService); - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(httpServReq.getMethod()).thenReturn("HEAD"); - - when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(null); - - topicService.getTopic(dmaapContext, "topicName"); - } - - @Test - public void testGetTopics_NonNull_topic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException { - - Assert.assertNotNull(topicService); - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - - when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(createdTopic); - - when(createdTopic.getName()).thenReturn("topicName"); - when(createdTopic.getDescription()).thenReturn("topicDescription"); - when(createdTopic.getOwners()).thenReturn(new HashSet<>(Arrays.asList("user1,user2".split(",")))); - - when(createdTopic.getReaderAcl()).thenReturn(nsaAcl); - when(createdTopic.getWriterAcl()).thenReturn(nsaAcl); - - when(httpServReq.getMethod()).thenReturn("HEAD"); - - when(httpServRes.getOutputStream()).thenReturn(oStream); - - topicService.getTopic(dmaapContext, "topicName"); - } - - @Test(expected = TopicExistsException.class) - public void testGetPublishersByTopicName_nullTopic() throws DMaaPAccessDeniedException, CambriaApiException, - IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(httpServReq.getMethod()).thenReturn("HEAD"); - - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(null); - - topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name"); - - } - - @Test - public void testGetPublishersByTopicName_nonNullTopic() throws DMaaPAccessDeniedException, CambriaApiException, - IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(httpServReq.getMethod()).thenReturn("HEAD"); - - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(createdTopic); - when(createdTopic.getWriterAcl()).thenReturn(nsaAcl); - topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name"); - } - - @Test(expected = TopicExistsException.class) - public void testGetConsumersByTopicName_nullTopic() throws DMaaPAccessDeniedException, CambriaApiException, - IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(httpServReq.getMethod()).thenReturn("HEAD"); - - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(null); - - topicService.getConsumersByTopicName(dmaapContext, "topicNamespace.name"); - - } - - @Test - public void testGetConsumersByTopicName_nonNullTopic() throws DMaaPAccessDeniedException, CambriaApiException, - IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(httpServReq.getMethod()).thenReturn("HEAD"); - - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(createdTopic); - - when(createdTopic.getReaderAcl()).thenReturn(nsaAcl); - - topicService.getConsumersByTopicName(dmaapContext, "topicNamespace.name"); - } - - @Test - public void testGetPublishersByTopicName() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - - PowerMockito.mockStatic(AJSCPropertiesMap.class); - - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("topicFactoryAAF"); - - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(httpServReq.getMethod()).thenReturn("HEAD"); - - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(createdTopic); - - when(createdTopic.getReaderAcl()).thenReturn(nsaAcl); - - topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name"); - } - - @Test(expected=TopicExistsException.class) - public void testGetPublishersByTopicNameError() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - - PowerMockito.mockStatic(AJSCPropertiesMap.class); - - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("topicFactoryAAF"); - - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(httpServReq.getMethod()).thenReturn("HEAD"); - - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(null); - - when(createdTopic.getReaderAcl()).thenReturn(nsaAcl); - - topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name"); - } - - @Test - public void testdeleteTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.deleteTopic(dmaapContext, "topicNamespace.topic"); - } - - @Test(expected=TopicExistsException.class) - public void testdeleteTopic_nulltopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.deleteTopic(dmaapContext, "topicNamespace.topic"); - } - - /*@Test(expected=DMaaPAccessDeniedException.class) - public void testdeleteTopic_authHeader() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(null); - topicService.deleteTopic(dmaapContext, "topicNamespace.topic"); - }*/ - - @Test - public void testPermitConsumerForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.permitConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin"); - } - - @Test(expected=TopicExistsException.class) - public void testPermitConsumerForTopic_nulltopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.permitConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin"); - } - - @Test - public void testdenyConsumerForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.denyConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin"); - } - - @Test(expected=TopicExistsException.class) - public void testdenyConsumerForTopic_nulltopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.denyConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin"); - } - - - @Test - public void testPermitPublisherForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.permitPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin"); - } - - @Test(expected=TopicExistsException.class) - public void testPermitPublisherForTopic_nulltopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.permitPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin"); - } - - @Test - public void testDenyPublisherForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - when(dmaapContext.getResponse()).thenReturn(httpServRes); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.denyPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin");; - } - - @Test(expected=TopicExistsException.class) - public void testDenyPublisherForTopic_nulltopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.denyPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin");; - } - - @Test - public void testGetAllTopics() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.getAllTopics(dmaapContext); - } - - @Test - public void testGetTopics() throws DMaaPAccessDeniedException, CambriaApiException, IOException, - TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { - - Assert.assertNotNull(topicService); - - // PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.mockStatic(AJSCPropertiesMap.class); - PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) - .thenReturn("hello"); - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); - when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); - when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); - when(dmaapContext.getRequest()).thenReturn(httpServReq); - - when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); - when(dmaapContext.getConfigReader()).thenReturn(configReader); - when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); - TopicBean topicBean = new TopicBean(); - topicBean.setTopicName("enfTopicNamePlusExtra"); - - topicService.getTopics(dmaapContext); - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/TransactionServiceImplTest.java b/src/test/java/com/att/nsa/cambria/service/impl/TransactionServiceImplTest.java deleted file mode 100644 index d7fcde4..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/TransactionServiceImplTest.java +++ /dev/null @@ -1,97 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - - - -package com.att.nsa.cambria.service.impl; - -import static org.junit.Assert.*; - -import java.io.IOException; - -import com.att.aft.dme2.internal.jettison.json.JSONException; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.service.impl.TransactionServiceImpl; -import com.att.dmf.mr.transaction.TransactionObj; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class TransactionServiceImplTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testCheckTransaction() { - - TransactionServiceImpl service = new TransactionServiceImpl(); - service.checkTransaction(new TransactionObj("23", 1100, 1000, 10)); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testGetAllTransactionObjs() { - - TransactionServiceImpl service = new TransactionServiceImpl(); - try { - service.getAllTransactionObjs(new DMaaPContext()); - } catch (ConfigDbException | IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testGetTransactionObj() { - - TransactionServiceImpl service = new TransactionServiceImpl(); - try { - service.getTransactionObj(new DMaaPContext(), "23"); - } catch (ConfigDbException | IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (JSONException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/service/impl/UIServiceImplTest.java b/src/test/java/com/att/nsa/cambria/service/impl/UIServiceImplTest.java deleted file mode 100644 index d47c2d6..0000000 --- a/src/test/java/com/att/nsa/cambria/service/impl/UIServiceImplTest.java +++ /dev/null @@ -1,302 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.service.impl; - -import static org.junit.Assert.*; -import static org.mockito.Mockito.when; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import com.att.dmf.mr.backends.ConsumerFactory; -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker; -import com.att.dmf.mr.metabroker.Topic; -import com.att.dmf.mr.service.impl.UIServiceImpl; -import com.att.dmf.mr.security.DMaaPAuthenticatorImpl; -import com.att.dmf.mr.utils.ConfigurationReader; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; -import com.att.nsa.configs.ConfigDbException; -import com.att.nsa.security.db.NsaApiDb; -import com.att.nsa.security.db.simple.NsaSimpleApiKey; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class }) -public class UIServiceImplTest { - - @InjectMocks - UIServiceImpl service; - - @Mock - DMaaPContext dmaapContext; - @Mock - ConsumerFactory factory; - - @Mock - ConfigurationReader configReader; - - @Mock - DMaaPKafkaMetaBroker dmaapKafkaMetaBroker; - - @Mock - Topic metatopic; - - @Before - public void setUp() throws Exception { - MockitoAnnotations.initMocks(this); - PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class); - NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password"); - - PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader); - PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory); - - PowerMockito.when(configReader.getfApiKeyDb()).thenReturn(fApiKeyDb); - PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user); - PowerMockito.mockStatic(DMaaPResponseBuilder.class); - PowerMockito.when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testHello() { - - try { - service.hello(dmaapContext); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - @Test - public void testGetApiKeysTable() { - - try { - service.getApiKeysTable(dmaapContext); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - - } - - @Test - public void testGetApiKey() { - - try { - service.getApiKey(dmaapContext, "admin"); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - } - - @Test - public void testGetApiKey_invalidkey() { - - try { - service.getApiKey(dmaapContext, "k56HmWT72J"); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - assertTrue(true); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - } - - @Test - public void testGetTopicsTable() { - - try { - List topics = new ArrayList(); - topics.add(metatopic); - when(dmaapKafkaMetaBroker.getAllTopics()).thenReturn(topics); - service.getTopicsTable(dmaapContext); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - - } - - @Test - public void testGetTopic() { - - try { - when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(metatopic); - service.getTopic(dmaapContext, "testTopic"); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - assertTrue(true); - } - - @Test - public void testGetTopic_nulltopic() { - - try { - when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); - service.getTopic(dmaapContext, "testTopic"); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - // e.printStackTrace(); - } catch (ConfigDbException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (Exception e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - } - - NsaApiDb fApiKeyDb = new NsaApiDb() { - - Set keys = new HashSet<>(Arrays.asList("testkey", "admin")); - - @Override - public NsaSimpleApiKey createApiKey(String arg0, String arg1) - throws com.att.nsa.security.db.NsaApiDb.KeyExistsException, ConfigDbException { - // TODO Auto-generated method stub - return new NsaSimpleApiKey(arg0, arg1); - } - - @Override - public boolean deleteApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { - // TODO Auto-generated method stub - return false; - } - - @Override - public boolean deleteApiKey(String arg0) throws ConfigDbException { - // TODO Auto-generated method stub - return false; - } - - @Override - public Map loadAllKeyRecords() throws ConfigDbException { - Map map = new HashMap(); - map.put("testkey", new NsaSimpleApiKey("testkey", "password")); - map.put("admin", new NsaSimpleApiKey("admin", "password")); - - return map; - } - - @Override - public Set loadAllKeys() throws ConfigDbException { - // TODO Auto-generated method stub - - return keys; - } - - @Override - public NsaSimpleApiKey loadApiKey(String arg0) throws ConfigDbException { - if (!keys.contains(arg0)) { - return null; - } - return new NsaSimpleApiKey(arg0, "password"); - } - - @Override - public void saveApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { - // TODO Auto-generated method stub - - } - }; - -} diff --git a/src/test/java/com/att/nsa/cambria/transaction/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/transaction/JUnitTestSuite.java deleted file mode 100644 index 2391679..0000000 --- a/src/test/java/com/att/nsa/cambria/transaction/JUnitTestSuite.java +++ /dev/null @@ -1,42 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.transaction; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ TransactionObjTest.class, TrnRequestTest.class, }) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/transaction/TestRunner.java b/src/test/java/com/att/nsa/cambria/transaction/TestRunner.java deleted file mode 100644 index e420175..0000000 --- a/src/test/java/com/att/nsa/cambria/transaction/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.transaction; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/transaction/TransactionObjTest.java b/src/test/java/com/att/nsa/cambria/transaction/TransactionObjTest.java deleted file mode 100644 index 2e6aa21..0000000 --- a/src/test/java/com/att/nsa/cambria/transaction/TransactionObjTest.java +++ /dev/null @@ -1,175 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.transaction; - -import static org.junit.Assert.*; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.transaction.TransactionObj; - -public class TransactionObjTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testAsJsonObject() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - - try { - obj.asJsonObject(); - - } catch(NullPointerException e) { - assertTrue(true); - } - - } - - @Test - public void testGetId() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.getId(); - assertTrue(true); - - } - - @Test - public void testSetId() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.setId("23"); - assertTrue(true); - - } - - @Test - public void testGetCreateTime() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.getCreateTime(); - assertTrue(true); - - } - - @Test - public void testSetCreateTime() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.setCreateTime("12:00:00"); - assertTrue(true); - - } - - @Test - public void testSerialize() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.serialize(); - assertTrue(true); - - } - - @Test - public void testGetTotalMessageCount() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.getTotalMessageCount(); - assertTrue(true); - - } - - @Test - public void testSetTotalMessageCount() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.setTotalMessageCount(200); - assertTrue(true); - - } - - @Test - public void testGetSuccessMessageCount() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.getSuccessMessageCount(); - assertTrue(true); - - } - - @Test - public void testSetSuccessMessageCount() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.setSuccessMessageCount(198); - assertTrue(true); - - } - - @Test - public void testGetFailureMessageCount() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.getFailureMessageCount(); - assertTrue(true); - - } - - @Test - public void testSetFailureMessageCount() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.setFailureMessageCount(2); - assertTrue(true); - - } - - @Test - public void testGetfData() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.getfData(); - assertTrue(true); - - } - - @Test - public void testSetfData() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.setfData(null); - assertTrue(true); - - } - - @Test - public void testGetTrnRequest() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.getTrnRequest(); - assertTrue(true); - - } - - @Test - public void testSetTrnRequest() { - TransactionObj obj = new TransactionObj("23", 100, 98, 2); - obj.setTrnRequest(null); - assertTrue(true); - - } - - -} diff --git a/src/test/java/com/att/nsa/cambria/transaction/TrnRequestTest.java b/src/test/java/com/att/nsa/cambria/transaction/TrnRequestTest.java deleted file mode 100644 index 40e59af..0000000 --- a/src/test/java/com/att/nsa/cambria/transaction/TrnRequestTest.java +++ /dev/null @@ -1,187 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.transaction; - -import static org.junit.Assert.*; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.transaction.TransactionObj; -import com.att.dmf.mr.transaction.TrnRequest; - -public class TrnRequestTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetId() { - TrnRequest req = new TrnRequest(); - - req.getId(); - assertTrue(true); - - } - - @Test - public void testSetId() { - TrnRequest req = new TrnRequest(); - - req.setId("23"); - assertTrue(true); - - } - - @Test - public void testGetRequestCreate() { - TrnRequest req = new TrnRequest(); - - req.getRequestCreate(); - assertTrue(true); - - } - - @Test - public void testSetRequestCreate() { - TrnRequest req = new TrnRequest(); - - req.setRequestCreate("createRequest"); - assertTrue(true); - - } - - @Test - public void testGetRequestHost() { - TrnRequest req = new TrnRequest(); - - req.getRequestHost(); - assertTrue(true); - - } - - @Test - public void testSetRequestHost() { - TrnRequest req = new TrnRequest(); - - req.setRequestHost("requestHost"); - assertTrue(true); - - } - - @Test - public void testGetServerHost() { - TrnRequest req = new TrnRequest(); - - req.getServerHost(); - assertTrue(true); - - } - - @Test - public void testSetServerHost() { - TrnRequest req = new TrnRequest(); - - req.setServerHost("requestHost"); - assertTrue(true); - - } - - @Test - public void testGetMessageProceed() { - TrnRequest req = new TrnRequest(); - - req.getMessageProceed(); - assertTrue(true); - - } - - @Test - public void testSetMessageProceed() { - TrnRequest req = new TrnRequest(); - - req.setMessageProceed("messageProceed"); - assertTrue(true); - - } - - @Test - public void testGetTotalMessage() { - TrnRequest req = new TrnRequest(); - - req.getTotalMessage(); - assertTrue(true); - - } - - @Test - public void testSetTotalMessage() { - TrnRequest req = new TrnRequest(); - - req.setTotalMessage("200"); - assertTrue(true); - - } - - - @Test - public void testGetClientType() { - TrnRequest req = new TrnRequest(); - - req.getClientType(); - assertTrue(true); - - } - - @Test - public void testSetClientType() { - TrnRequest req = new TrnRequest(); - - req.setClientType("admin"); - assertTrue(true); - - } - - @Test - public void testGetUrl() { - TrnRequest req = new TrnRequest(); - - req.getUrl(); - assertTrue(true); - - } - - @Test - public void testSetUrl() { - TrnRequest req = new TrnRequest(); - - req.setUrl("http://google.com"); - assertTrue(true); - - } - -} diff --git a/src/test/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java b/src/test/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java deleted file mode 100644 index 5260993..0000000 --- a/src/test/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.transaction.impl; - -import static org.junit.Assert.*; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory; - -public class DMaaPSimpleTransactionFactoryTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testMakeNewTransactionObj() { - - DMaaPSimpleTransactionFactory factory = new DMaaPSimpleTransactionFactory(); - - factory.makeNewTransactionObj("{'transactionId': '123', 'totalMessageCount': '200', " - + "'successMessageCount': '200', 'failureMessageCount': '0'}"); - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - - } - - @Test - public void testMakeNewTransactionId() { - - DMaaPSimpleTransactionFactory factory = new DMaaPSimpleTransactionFactory(); - factory.makeNewTransactionId("123"); - - String trueValue = "True"; - assertTrue(trueValue.equalsIgnoreCase("True")); - - } - - - -} diff --git a/src/test/java/com/att/nsa/cambria/transaction/impl/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/transaction/impl/JUnitTestSuite.java deleted file mode 100644 index c18ac8e..0000000 --- a/src/test/java/com/att/nsa/cambria/transaction/impl/JUnitTestSuite.java +++ /dev/null @@ -1,42 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.transaction.impl; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ DMaaPSimpleTransactionFactoryTest.class, }) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/transaction/impl/TestRunner.java b/src/test/java/com/att/nsa/cambria/transaction/impl/TestRunner.java deleted file mode 100644 index 430509e..0000000 --- a/src/test/java/com/att/nsa/cambria/transaction/impl/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.transaction.impl; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/utils/ConfigurationReaderTest.java b/src/test/java/com/att/nsa/cambria/utils/ConfigurationReaderTest.java deleted file mode 100644 index 907b935..0000000 --- a/src/test/java/com/att/nsa/cambria/utils/ConfigurationReaderTest.java +++ /dev/null @@ -1,56 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.utils; - -import static org.junit.Assert.assertNotNull; - -import org.junit.After; -import org.junit.Test; - -import com.att.nsa.cambria.embed.EmbedConfigurationReader; -import com.att.dmf.mr.utils.ConfigurationReader; - -public class ConfigurationReaderTest { - - EmbedConfigurationReader embedConfigurationReader = new EmbedConfigurationReader(); - - @After - public void tearDown() throws Exception { - embedConfigurationReader.tearDown(); - } - - @Test - public void testConfigurationReader() throws Exception { - - ConfigurationReader configurationReader = embedConfigurationReader.buildConfigurationReader(); - - assertNotNull(configurationReader); - assertNotNull(configurationReader.getfApiKeyDb()); - assertNotNull(configurationReader.getfConfigDb()); - assertNotNull(configurationReader.getfConsumerFactory()); - assertNotNull(configurationReader.getfIpBlackList()); - assertNotNull(configurationReader.getfMetaBroker()); - assertNotNull(configurationReader.getfMetrics()); - assertNotNull(configurationReader.getfPublisher()); - assertNotNull(configurationReader.getfSecurityManager()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/utils/DMaaPCuratorFactoryTest.java b/src/test/java/com/att/nsa/cambria/utils/DMaaPCuratorFactoryTest.java deleted file mode 100644 index 90e6f6f..0000000 --- a/src/test/java/com/att/nsa/cambria/utils/DMaaPCuratorFactoryTest.java +++ /dev/null @@ -1,70 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.utils; - -import static org.junit.Assert.*; - -import java.io.File; -import java.util.HashMap; -import java.util.Map; - -import org.apache.curator.framework.CuratorFramework; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.ajsc.filemonitor.AJSCPropertiesMap; -import com.att.dmf.mr.constants.CambriaConstants; -import com.att.dmf.mr.utils.DMaaPCuratorFactory; -import com.att.dmf.mr.utils.PropertyReader; -import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException; -import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile; -import com.att.nsa.drumlin.till.nv.impl.nvReadableTable; - -public class DMaaPCuratorFactoryTest { - - @Before - public void setUp() throws Exception { - ClassLoader classLoader = getClass().getClassLoader(); - AJSCPropertiesMap.refresh(new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile())); - - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testgetCurator() throws loadException { - CuratorFramework curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader()); - assertNotNull(curatorFramework); - - Map map = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop); - map.remove(CambriaConstants.kSetting_ZkConfigDbServers); - map.remove(CambriaConstants.kSetting_ZkSessionTimeoutMs); - - - - curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader()); - assertNotNull(curatorFramework); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/utils/DMaaPResponseBuilderTest.java b/src/test/java/com/att/nsa/cambria/utils/DMaaPResponseBuilderTest.java deleted file mode 100644 index 66e4405..0000000 --- a/src/test/java/com/att/nsa/cambria/utils/DMaaPResponseBuilderTest.java +++ /dev/null @@ -1,141 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.utils; - -import static org.junit.Assert.*; - -import java.io.IOException; - -import org.json.JSONException; -import org.json.JSONObject; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.mock.web.MockHttpServletRequest; -import org.springframework.mock.web.MockHttpServletResponse; - -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.utils.DMaaPResponseBuilder; - -public class DMaaPResponseBuilderTest { - - DMaaPContext dMaapContext; - MockHttpServletRequest request; - MockHttpServletResponse response; - - @Before - public void setUp() throws Exception { - - dMaapContext = new DMaaPContext(); - request = new MockHttpServletRequest(); - response = new MockHttpServletResponse(); - dMaapContext.setRequest(request); - dMaapContext.setResponse(response); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testsetNoCacheHeadings(){ - DMaaPResponseBuilder.setNoCacheHeadings(dMaapContext); - assertEquals("no-cache", response.getHeader("Pragma")); - } - - @Test - public void testrespondOk() throws JSONException, IOException{ - JSONObject jsonObject = new JSONObject(); - jsonObject.put("Name", "Test"); - - DMaaPResponseBuilder.respondOk(dMaapContext, jsonObject); - assertEquals("application/json", response.getContentType()); - assertEquals(200, response.getStatus()); - - request.setMethod("HEAD"); - - DMaaPResponseBuilder.respondOk(dMaapContext, jsonObject); - assertEquals("application/json", response.getContentType()); - assertEquals(200, response.getStatus()); - } - - @Test - public void testrespondOkNoContent(){ - DMaaPResponseBuilder.respondOkNoContent(dMaapContext); - assertEquals(204, response.getStatus()); - } - - @Test - public void testrespondOkWithHtml(){ - DMaaPResponseBuilder.respondOkWithHtml(dMaapContext, ""); - - assertEquals("text/html", response.getContentType()); - assertEquals(200, response.getStatus()); - } - - @Test - public void testrespondWithError(){ - DMaaPResponseBuilder.respondWithError(dMaapContext, 500, "InternalServerError"); - assertEquals(500, response.getStatus()); - } - - @Test - public void testrespondWithJsonError(){ - JSONObject o = new JSONObject(); - o.put("status", 500); - o.put("message", "InternalServerError"); - DMaaPResponseBuilder.respondWithError(dMaapContext, 500, o); - assertEquals(500, response.getStatus()); - } - - @Test - public void testrespondWithErrorInJson(){ - DMaaPResponseBuilder.respondWithErrorInJson(dMaapContext, 500, "InternalServerError"); - - assertEquals("application/json", response.getContentType()); - assertEquals(500, response.getStatus()); - } - - @Test - public void testsendErrorAndBody(){ - DMaaPResponseBuilder.sendErrorAndBody(dMaapContext, 500, "InternalServerError", "text/html"); - - assertEquals("text/html", response.getContentType()); - assertEquals(500, response.getStatus()); - - request.setMethod("HEAD"); - - DMaaPResponseBuilder.sendErrorAndBody(dMaapContext, 500, "InternalServerError", "text/html"); - - assertEquals("text/html", response.getContentType()); - assertEquals(500, response.getStatus()); - - } - - @Test - public void testgetStreamForBinaryResponse() throws IOException{ - DMaaPResponseBuilder.getStreamForBinaryResponse(dMaapContext); - - assertEquals("application/octet-stream", response.getContentType()); - assertEquals(200, response.getStatus()); - } - -} diff --git a/src/test/java/com/att/nsa/cambria/utils/UtilsTest.java b/src/test/java/com/att/nsa/cambria/utils/UtilsTest.java deleted file mode 100644 index bff4d48..0000000 --- a/src/test/java/com/att/nsa/cambria/utils/UtilsTest.java +++ /dev/null @@ -1,125 +0,0 @@ -/******************************************************************************* -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.cambria.utils; - -import static org.junit.Assert.*; - -import java.security.Principal; -import java.text.SimpleDateFormat; -import java.util.Date; - -import javax.servlet.http.HttpServletRequest; - -import org.apache.http.auth.BasicUserPrincipal; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.springframework.mock.web.MockHttpServletRequest; - -import com.att.dmf.mr.beans.DMaaPContext; -import com.att.dmf.mr.utils.Utils; - -public class UtilsTest { - - private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS"; - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetFormattedDate() { - Date now = new Date(); - String dateStr = Utils.getFormattedDate(now); - SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT); - String expectedStr = sdf.format(now); - assertNotNull(dateStr); - assertTrue("Formatted date does not match - expected [" + expectedStr - + "] received [" + dateStr + "]", - dateStr.equalsIgnoreCase(expectedStr)); - } - - @Test - public void testgetUserApiKey(){ - MockHttpServletRequest request = new MockHttpServletRequest(); - request.addHeader(Utils.CAMBRIA_AUTH_HEADER, "User:Password"); - assertEquals("User", Utils.getUserApiKey(request)); - - MockHttpServletRequest request2 = new MockHttpServletRequest(); - Principal principal = new BasicUserPrincipal("User@Test"); - request2.setUserPrincipal(principal); - request2.addHeader("Authorization", "test"); - assertEquals("User", Utils.getUserApiKey(request2)); - - MockHttpServletRequest request3 = new MockHttpServletRequest(); - assertNull(Utils.getUserApiKey(request3)); - } - - @Test - public void testgetFromattedBatchSequenceId(){ - Long x = new Long(1234); - String str = Utils.getFromattedBatchSequenceId(x); - assertEquals("001234", str); - } - - @Test - public void testmessageLengthInBytes(){ - String str = "TestString"; - long length = Utils.messageLengthInBytes(str); - assertEquals(10, length); - assertEquals(0, Utils.messageLengthInBytes(null)); - } - - @Test - public void testgetResponseTransactionId(){ - String transactionId = "test123::sampleResponseMessage"; - assertEquals("test123",Utils.getResponseTransactionId(transactionId)); - assertNull(Utils.getResponseTransactionId(null)); - assertNull(Utils.getResponseTransactionId("")); - } - - @Test - public void testgetSleepMsForRate(){ - long x = Utils.getSleepMsForRate(1024.124); - assertEquals(1000, x); - assertEquals(0, Utils.getSleepMsForRate(-1)); - } - - @Test - public void testgetRemoteAddress(){ - DMaaPContext dMaapContext = new DMaaPContext(); - MockHttpServletRequest request = new MockHttpServletRequest(); - - dMaapContext.setRequest(request); - - assertEquals(request.getRemoteAddr(), Utils.getRemoteAddress(dMaapContext)); - - request.addHeader("X-Forwarded-For", "XForward"); - assertEquals("XForward", Utils.getRemoteAddress(dMaapContext)); - - - } -} diff --git a/src/test/java/com/att/nsa/filter/ContentLengthFilterTest.java b/src/test/java/com/att/nsa/filter/ContentLengthFilterTest.java deleted file mode 100644 index 6930f73..0000000 --- a/src/test/java/com/att/nsa/filter/ContentLengthFilterTest.java +++ /dev/null @@ -1,88 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.filter; - -import static org.junit.Assert.*; - -import java.io.IOException; - -import javax.servlet.ServletException; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.mr.filter.ContentLengthFilter; - -public class ContentLengthFilterTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testDestroy() { - ContentLengthFilter filter = new ContentLengthFilter(); - - filter.destroy(); - - assertTrue(true); - } - - - @Test - public void testFilter() { - ContentLengthFilter filter = new ContentLengthFilter(); - - try { - filter.doFilter(null, null, null); - } catch (IOException | ServletException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - - } - - @Test - public void testInit() { - ContentLengthFilter filter = new ContentLengthFilter(); - - try { - filter.init(null); - } catch (ServletException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (NullPointerException e) { - // TODO Auto-generated catch block - assertTrue(true); - } - - } - - -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/filter/DefaultLengthTest.java b/src/test/java/com/att/nsa/filter/DefaultLengthTest.java deleted file mode 100644 index 8be5ca7..0000000 --- a/src/test/java/com/att/nsa/filter/DefaultLengthTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.filter; - -import static org.junit.Assert.*; - -import java.io.IOException; - -import javax.servlet.ServletException; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.att.mr.filter.DefaultLength; - -public class DefaultLengthTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testGetDefaultLength() { - DefaultLength length = new DefaultLength(); - - length.getDefaultLength(); - - assertTrue(true); - } - - @Test - public void testSetDefaultLength() { - DefaultLength length = new DefaultLength(); - - length.setDefaultLength("23"); - - assertTrue(true); - } - - - - - -} \ No newline at end of file diff --git a/src/test/java/com/att/nsa/filter/JUnitTestSuite.java b/src/test/java/com/att/nsa/filter/JUnitTestSuite.java deleted file mode 100644 index 46df4a3..0000000 --- a/src/test/java/com/att/nsa/filter/JUnitTestSuite.java +++ /dev/null @@ -1,42 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.filter; - -import junit.framework.TestSuite; - -import org.junit.runner.RunWith; -import org.junit.runners.Suite; -import org.junit.runners.Suite.SuiteClasses; -import org.apache.log4j.Logger; - -@RunWith(Suite.class) -@SuiteClasses({ ContentLengthFilterTest.class, DefaultLengthTest.class,}) -public class JUnitTestSuite { - private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); - - public static void main(String[] args) { - LOGGER.info("Running the test suite"); - - TestSuite tstSuite = new TestSuite(); - LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); - } - -} diff --git a/src/test/java/com/att/nsa/filter/TestRunner.java b/src/test/java/com/att/nsa/filter/TestRunner.java deleted file mode 100644 index 832a720..0000000 --- a/src/test/java/com/att/nsa/filter/TestRunner.java +++ /dev/null @@ -1,41 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * ONAP Policy Engine - * ================================================================================ - * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package com.att.nsa.filter; - -import org.junit.runner.JUnitCore; -import org.junit.runner.Result; -import org.junit.runner.notification.Failure; -import org.apache.log4j.Logger; - -public class TestRunner { - private static final Logger LOGGER = Logger.getLogger(TestRunner.class); - - public static void main(String[] args) { - // TODO Auto-generated method stub - Result result = JUnitCore.runClasses(JUnitTestSuite.class); - for (Failure failure : result.getFailures()) { - LOGGER.info(failure.toString()); - - } - LOGGER.info(result.wasSuccessful()); - } - -} diff --git a/src/test/java/com/att/sa/cambria/testClient/SimpleExample.java b/src/test/java/com/att/sa/cambria/testClient/SimpleExample.java deleted file mode 100644 index a6ad73e..0000000 --- a/src/test/java/com/att/sa/cambria/testClient/SimpleExample.java +++ /dev/null @@ -1,335 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START======================================================= - * org.onap.dmaap - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * - *******************************************************************************/ - -package com.att.sa.cambria.testClient; - -import kafka.api.FetchRequest; -import kafka.api.FetchRequestBuilder; -import kafka.api.PartitionOffsetRequestInfo; -import kafka.cluster.Broker; -import kafka.common.ErrorMapping; -import kafka.common.TopicAndPartition; -import kafka.javaapi.*; -import kafka.javaapi.consumer.SimpleConsumer; -import kafka.message.MessageAndOffset; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -public class SimpleExample -{ -// public static void main ( String args[] ) -// { -// if ( args.length < 5 ) -// { -// System.err.println ( "usage: SimpleExample " ); -// return; -// } -// -// final long maxReads = Long.parseLong ( args[0] ); -// final String topic = args[1]; -// final int partition = Integer.parseInt ( args[2] ); -// -// final int port = Integer.parseInt ( args[4] ); -// final hostPort hp = new hostPort ( args[3], port ); -// final LinkedList seeds = new LinkedList (); -// seeds.add ( hp ); -// -// try -// { -// final SimpleExample example = new SimpleExample (); -// example.run ( maxReads, topic, partition, seeds ); -// } -// catch ( Exception e ) -// { -// System.out.println ( "Oops:" + e ); -// e.printStackTrace (); -// } -// } -// -// public SimpleExample () -// { -// fReplicaBrokers = new ArrayList (); -// } -// -// public void run ( long remainingAllowedReads, String a_topic, int a_partition, List seedHosts ) throws IOException -// { -// // find the meta data about the topic and partition we are interested in -// -// hostPort leadBroker = findLeader ( seedHosts, a_topic, a_partition ); -// if ( leadBroker == null ) -// { -// System.out.println ( "Can't find leader for Topic and Partition. Exiting" ); -// return; -// } -// -// final String clientName = "Client_" + a_topic + "_" + a_partition; -// -// SimpleConsumer consumer = new SimpleConsumer ( leadBroker.fHost, leadBroker.fPort, 100000, 64 * 1024, clientName ); -// long readOffset = getLastOffset ( consumer, a_topic, a_partition, kafka.api.OffsetRequest.EarliestTime (), clientName ); -// -// int numErrors = 0; -// while ( remainingAllowedReads > 0 ) -// { -// if ( consumer == null ) -// { -// consumer = new SimpleConsumer ( leadBroker.fHost, leadBroker.fPort, 100000, 64 * 1024, clientName ); -// } -// -// final FetchRequest req = new FetchRequestBuilder () -// .clientId ( clientName ) -// .addFetch ( a_topic, a_partition, readOffset, 100000 ).build (); -// final FetchResponse fetchResponse = consumer.fetch ( req ); -// -// if ( fetchResponse.hasError () ) -// { -// numErrors++; -// -// // Something went wrong! -// final short code = fetchResponse.errorCode ( a_topic, a_partition ); -// System.out.println ( "Error fetching data from the Broker:" + leadBroker + " Reason: " + code ); -// if ( numErrors > 5 ) -// break; -// -// if ( code == ErrorMapping.OffsetOutOfRangeCode () ) -// { -// // We asked for an invalid offset. For simple case ask for -// // the last element to reset -// readOffset = getLastOffset ( consumer, a_topic, -// a_partition, kafka.api.OffsetRequest.LatestTime (), -// clientName ); -// continue; -// } -// -// consumer.close (); -// consumer = null; -// -// leadBroker = findNewLeader ( leadBroker, a_topic, a_partition ); -// continue; -// } -// numErrors = 0; -// -// long numRead = 0; -// for ( MessageAndOffset messageAndOffset : fetchResponse.messageSet ( a_topic, a_partition ) ) -// { -// long currentOffset = messageAndOffset.offset (); -// if ( currentOffset < readOffset ) -// { -// System.out.println ( "Found an old offset: " -// + currentOffset + " Expecting: " + readOffset ); -// continue; -// } -// readOffset = messageAndOffset.nextOffset (); -// ByteBuffer payload = messageAndOffset.message ().payload (); -// -// byte[] bytes = new byte [payload.limit ()]; -// payload.get ( bytes ); -// System.out.println ( String.valueOf ( messageAndOffset.offset () ) + ": " + new String ( bytes, "UTF-8" ) ); -// numRead++; -// remainingAllowedReads--; -// } -// -// if ( numRead == 0 ) -// { -// try -// { -// Thread.sleep ( 1000 ); -// } -// catch ( InterruptedException ie ) -// { -// } -// } -// } -// -// if ( consumer != null ) -// { -// consumer.close (); -// } -// } -// -// public static long getLastOffset ( SimpleConsumer consumer, String topic, -// int partition, long whichTime, String clientName ) -// { -// TopicAndPartition topicAndPartition = new TopicAndPartition ( topic, -// partition ); -// Map requestInfo = new HashMap (); -// requestInfo.put ( topicAndPartition, new PartitionOffsetRequestInfo ( -// whichTime, 1 ) ); -// kafka.javaapi.OffsetRequest request = new kafka.javaapi.OffsetRequest ( -// requestInfo, kafka.api.OffsetRequest.CurrentVersion (), clientName ); -// OffsetResponse response = consumer.getOffsetsBefore ( request ); -// -// if ( response.hasError () ) -// { -// System.out.println ( "Error fetching data Offset Data the Broker. Reason: " -// + response.errorCode ( topic, partition ) ); -// return 0; -// } -// -// final long[] offsets = response.offsets ( topic, partition ); -// return offsets[0]; -// } -// -// /** -// * Find a new leader for a topic/partition, including a pause for the coordinator to -// * find a new leader, as needed. -// * -// * @param oldLeader -// * @param topic -// * @param partition -// * @return -// * @throws IOException -// */ -// private hostPort findNewLeader ( hostPort oldLeader, String topic, int partition ) throws IOException -// { -// try -// { -// int attemptsLeft = 3; -// boolean haveSlept = false; -// -// while ( attemptsLeft-- > 0 ) -// { -// System.out.println ( "" + attemptsLeft + " attempts Left" ); // FIXME: make sure it's 3 attempts! -// -// // ask the brokers for a leader -// final hostPort newLeader = findLeader ( fReplicaBrokers, topic, partition ); -// if ( newLeader != null ) -// { -// // we can use this leader if it's different (i.e. a new leader has been elected) -// // or it's the same leader, but we waited to allow ZK to get a new one, and -// // the original recovered -// if ( !oldLeader.equals ( newLeader ) || haveSlept ) -// { -// return newLeader; -// } -// } -// -// // sleep -// haveSlept = true; -// Thread.sleep ( 1000 ); -// } -// } -// catch ( InterruptedException x ) -// { -// // just give up -// } -// -// System.out.println ( "Unable to find new leader after Broker failure. Exiting" ); -// throw new IOException ( "Unable to find new leader after Broker failure. Exiting" ); -// } -// -// /** -// * Given one or more seed brokers, find the leader for a given topic/partition -// * @param seeds -// * @param topic -// * @param partition -// * @return partition metadata, or null -// */ -// private hostPort findLeader ( List seeds, String topic, int partition ) -// { -// final List topics = new ArrayList (); -// topics.add ( topic ); -// -// for ( hostPort seed : seeds ) -// { -// final SimpleConsumer consumer = new SimpleConsumer ( seed.fHost, seed.fPort, 100000, 64 * 1024, "leaderLookup" ); -// final TopicMetadataRequest req = new TopicMetadataRequest ( topics ); -// final TopicMetadataResponse resp = consumer.send ( req ); -// consumer.close (); -// -// final List metaData = resp.topicsMetadata (); -// for ( TopicMetadata item : metaData ) -// { -// for ( PartitionMetadata part : item.partitionsMetadata () ) -// { -// if ( part.partitionId () == partition ) -// { -// // found our partition. load the details, then return it -// fReplicaBrokers.clear (); -// for ( kafka.cluster.Broker replica : part.replicas () ) -// { -// fReplicaBrokers.add ( new hostPort ( replica.host (), replica.port () ) ); -// } -// return new hostPort ( part.leader () ); -// } -// } -// } -// } -// -// return null; -// } -// -// private static class hostPort -// { -// public hostPort ( String host, int port ) { fHost = host; fPort = port; } -// -// public hostPort ( Broker leader ) -// { -// fHost = leader.host (); -// fPort = leader.port (); -// } -// -// -// public final String fHost; -// public final int fPort; -// -// @Override -// public int hashCode () -// { -// final int prime = 31; -// int result = 1; -// result = prime * result -// + ( ( fHost == null ) ? 0 : fHost.hashCode () ); -// result = prime * result + fPort; -// return result; -// } -// -// @Override -// public boolean equals ( Object obj ) -// { -// if ( this == obj ) -// return true; -// if ( obj == null ) -// return false; -// if ( getClass () != obj.getClass () ) -// return false; -// hostPort other = (hostPort) obj; -// if ( fHost == null ) -// { -// if ( other.fHost != null ) -// return false; -// } -// else if ( !fHost.equals ( other.fHost ) ) -// return false; -// if ( fPort != other.fPort ) -// return false; -// return true; -// } -// } -// -// private List fReplicaBrokers; -} diff --git a/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java b/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java new file mode 100644 index 0000000..69abb24 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/DMaaPMetricsSenderTest.java @@ -0,0 +1,131 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.apiServer.metrics.cambria; + + +import static org.junit.Assert.assertTrue; + +import java.io.File; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.mr.apiServer.metrics.cambria.DMaaPMetricsSender; + +public class DMaaPMetricsSenderTest { + + @Before + public void setUp() throws Exception { + ClassLoader classLoader = getClass().getClassLoader(); + AJSCPropertiesMap.refresh(new File(classLoader.getResource("MsgRtrApi.properties").getFile())); + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSendPeriodically() { + + DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic"); + try { + sender.sendPeriodically(null, null, "testTopic"); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (NoClassDefFoundError e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testSendPeriodically2() { + + DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic"); + try { + sender.sendPeriodically(null, null, "url", "testTopic", 2); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testSend() { + + DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic"); + try { + sender.send(); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testRun() { + + DMaaPMetricsSender sender = new DMaaPMetricsSender(null, "url", "testTopic"); + try { + sender.run(); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/JUnitTestSuite.java new file mode 100644 index 0000000..4bef234 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/JUnitTestSuite.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.apiServer.metrics.cambria; + +import junit.framework.TestSuite; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ DMaaPMetricsSenderTest.class}) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/TestRunner.java b/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/TestRunner.java new file mode 100644 index 0000000..2eb7c1c --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/apiServer/metrics/cambria/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.apiServer.metrics.cambria; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiExceptionTest.java b/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiExceptionTest.java new file mode 100644 index 0000000..77a387e --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiExceptionTest.java @@ -0,0 +1,74 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.cambria; + +import static org.junit.Assert.*; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class CambriaApiExceptionTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetErrRes() { + + int status = 1; + String msg = "helloWorld"; + CambriaApiException cambria = new CambriaApiException(status, msg); + + cambria.getErrRes(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testSetErrRes() { + + int status = 1; + String msg = "helloWorld"; + CambriaApiException cambria = new CambriaApiException(status, msg); + + cambria.setErrRes(new ErrorResponse(200, 0, "OK")); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiTestCase.java b/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiTestCase.java new file mode 100644 index 0000000..01e45c9 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiTestCase.java @@ -0,0 +1,51 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.cambria; + +import java.util.HashMap; +import java.util.Map; + +import junit.framework.TestCase; + +import org.junit.Ignore; + +@Ignore +public class CambriaApiTestCase extends TestCase { + + @Override + protected void setUp() throws Exception { + final Map argMap = new HashMap (); + + argMap.put("broker.type", "memory"); + argMap.put("accounts.dao.class", "com.att.nsa.fe3c.dao.memory.MemoryDAOFactory"); + argMap.put("topic.dao.class", "com.att.nsa.fe3c.dao.memory.MemoryDAOFactory"); + + //CambriaApiServer.start(argMap); + System.out.println("setUp() complete"); + } + + public void tearDown() throws Exception { + System.out.println("tearDown() started"); + //CambriaApiServer.stop(); + System.out.println("tearDown() complete"); + } +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiVersionInfoTest.java b/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiVersionInfoTest.java new file mode 100644 index 0000000..cf89f52 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/CambriaApiVersionInfoTest.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.cambria; + +import static org.junit.Assert.*; + +import org.onap.dmaap.dmf.mr.CambriaApiVersionInfo; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class CambriaApiVersionInfoTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetVersion() { + CambriaApiVersionInfo.getVersion(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/CambriaRateLimiterTest.java b/src/test/java/org/onap/dmaap/mr/cambria/CambriaRateLimiterTest.java new file mode 100644 index 0000000..ac7df2b --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/CambriaRateLimiterTest.java @@ -0,0 +1,78 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.cambria; + +import junit.framework.TestCase; + +import org.junit.Test; + +import com.att.nsa.apiServer.util.NsaTestClock; + +public class CambriaRateLimiterTest +{ + @Test + public void testRateLimiter () + { + /*final NsaTestClock clock = new NsaTestClock(1, false); + + final String topic = "topic"; + final String consumerGroup = "group"; + final String clientId = "id"; + + final int window = 5; + + // rate limit: 1 empty call/min avg over 5 minutes, with 10ms delay + final CambriaRateLimiter rater = new CambriaRateLimiter ( 1.0, window, 10 ); + try + { + // prime with a call to start rate window + rater.onCall ( topic, consumerGroup, clientId ); + rater.onSend ( topic, consumerGroup, clientId, 1 ); + clock.addMs ( 1000*60*window ); + + // rate should now be 0, with a good window + for ( int i=0; i<4; i++ ) + { + clock.addMs ( 1000*15 ); + rater.onCall ( topic, consumerGroup, clientId ); + rater.onSend ( topic, consumerGroup, clientId, 0 ); + } + // rate is now 0.8 = 4 calls in last 5 minutes = 4/5 = 0.8 + + clock.addMs ( 1000*15 ); + rater.onCall ( topic, consumerGroup, clientId ); + rater.onSend ( topic, consumerGroup, clientId, 0 ); + // rate = 1.0 = 5 calls in last 5 mins + + clock.addMs ( 1000 ); + rater.onCall ( topic, consumerGroup, clientId ); + rater.onSend ( topic, consumerGroup, clientId, 0 ); + // rate = 1.2 = 6 calls in last 5 mins, should fire + + fail ( "Should have thrown rate limit exception." ); + } + catch ( CambriaApiException x ) + { + // good + }*/ + } +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/JUnitTestSuite.java new file mode 100644 index 0000000..d9e3483 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/JUnitTestSuite.java @@ -0,0 +1,42 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.cambria; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ CambriaApiExceptionTest.class, CambriaApiVersionInfoTest.class, CambriaApiTestCase.class, CambriaRateLimiterTest.class, }) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/TestRunner.java new file mode 100644 index 0000000..f276888 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.cambria; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/CuratorFrameworkImpl.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/CuratorFrameworkImpl.java new file mode 100644 index 0000000..2f57abf --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/CuratorFrameworkImpl.java @@ -0,0 +1,278 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.kafka; + +import java.util.concurrent.TimeUnit; + +import org.apache.curator.CuratorZookeeperClient; +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.WatcherRemoveCuratorFramework; +import org.apache.curator.framework.api.CreateBuilder; +import org.apache.curator.framework.api.CuratorListener; +import org.apache.curator.framework.api.DeleteBuilder; +import org.apache.curator.framework.api.ExistsBuilder; +import org.apache.curator.framework.api.GetACLBuilder; +import org.apache.curator.framework.api.GetChildrenBuilder; +import org.apache.curator.framework.api.GetConfigBuilder; +import org.apache.curator.framework.api.GetDataBuilder; +import org.apache.curator.framework.api.ReconfigBuilder; +import org.apache.curator.framework.api.RemoveWatchesBuilder; +import org.apache.curator.framework.api.SetACLBuilder; +import org.apache.curator.framework.api.SetDataBuilder; +import org.apache.curator.framework.api.SyncBuilder; +import org.apache.curator.framework.api.UnhandledErrorListener; +import org.apache.curator.framework.api.transaction.CuratorMultiTransaction; +import org.apache.curator.framework.api.transaction.CuratorTransaction; +import org.apache.curator.framework.api.transaction.TransactionOp; +import org.apache.curator.framework.imps.CuratorFrameworkState; +import org.apache.curator.framework.listen.Listenable; +import org.apache.curator.framework.schema.SchemaSet; +import org.apache.curator.framework.state.ConnectionStateErrorPolicy; +import org.apache.curator.framework.state.ConnectionStateListener; +import org.apache.curator.utils.EnsurePath; +import org.apache.zookeeper.Watcher; +import org.apache.zookeeper.server.quorum.flexible.QuorumVerifier; + +public class CuratorFrameworkImpl implements CuratorFramework { + + @Override + public void blockUntilConnected() throws InterruptedException { + // TODO Auto-generated method stub + + } + + @Override + public boolean blockUntilConnected(int arg0, TimeUnit arg1) throws InterruptedException { + // TODO Auto-generated method stub + return false; + } + + @Override + public ExistsBuilder checkExists() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void clearWatcherReferences(Watcher arg0) { + // TODO Auto-generated method stub + + } + + @Override + public void close() { + // TODO Auto-generated method stub + + } + + @Override + public CreateBuilder create() { + // TODO Auto-generated method stub + return null; + } + + @Override + public DeleteBuilder delete() { + // TODO Auto-generated method stub + return null; + } + + @Override + public GetACLBuilder getACL() { + // TODO Auto-generated method stub + return null; + } + + @Override + public GetChildrenBuilder getChildren() { + // TODO Auto-generated method stub + return null; + } + + @Override + public Listenable getConnectionStateListenable() { + // TODO Auto-generated method stub + return null; + } + + @Override + public Listenable getCuratorListenable() { + // TODO Auto-generated method stub + return null; + } + + @Override + public GetDataBuilder getData() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getNamespace() { + // TODO Auto-generated method stub + return null; + } + + @Override + public CuratorFrameworkState getState() { + // TODO Auto-generated method stub + return null; + } + + @Override + public Listenable getUnhandledErrorListenable() { + // TODO Auto-generated method stub + return null; + } + + @Override + public CuratorZookeeperClient getZookeeperClient() { + // TODO Auto-generated method stub + return null; + } + + @Override + public CuratorTransaction inTransaction() { + // TODO Auto-generated method stub + return null; + } + + @Override + public boolean isStarted() { + // TODO Auto-generated method stub + return false; + } + + @Override + public EnsurePath newNamespaceAwareEnsurePath(String arg0) { + // TODO Auto-generated method stub + return null; + } + + @Override + public CuratorFramework nonNamespaceView() { + // TODO Auto-generated method stub + return null; + } + + @Override + public SetACLBuilder setACL() { + // TODO Auto-generated method stub + return null; + } + + @Override + public SetDataBuilder setData() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void start() { + // TODO Auto-generated method stub + + } + + @Override + public SyncBuilder sync() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void sync(String arg0, Object arg1) { + // TODO Auto-generated method stub + + } + + @Override + public CuratorFramework usingNamespace(String arg0) { + // TODO Auto-generated method stub + return null; + } + + @Override + public ReconfigBuilder reconfig() { + // TODO Auto-generated method stub + return null; + } + + @Override + public GetConfigBuilder getConfig() { + // TODO Auto-generated method stub + return null; + } + + @Override + public CuratorMultiTransaction transaction() { + // TODO Auto-generated method stub + return null; + } + + @Override + public TransactionOp transactionOp() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void createContainers(String path) throws Exception { + // TODO Auto-generated method stub + + } + + @Override + public RemoveWatchesBuilder watches() { + // TODO Auto-generated method stub + return null; + } + + @Override + public WatcherRemoveCuratorFramework newWatcherRemoveCuratorFramework() { + // TODO Auto-generated method stub + return null; + } + + @Override + public ConnectionStateErrorPolicy getConnectionStateErrorPolicy() { + // TODO Auto-generated method stub + return null; + } + + @Override + public QuorumVerifier getCurrentConfig() { + // TODO Auto-generated method stub + return null; + } + + @Override + public SchemaSet getSchemaSet() { + // TODO Auto-generated method stub + return null; + } + + @Override + public boolean isZk34CompatibilityMode() { + // TODO Auto-generated method stub + return false; + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/JUnitTestSuite.java new file mode 100644 index 0000000..5904de5 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/JUnitTestSuite.java @@ -0,0 +1,42 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.kafka; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ KafkaConsumerCacheTest.class, KafkaPublisherTest.class, }) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaConsumerCacheTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaConsumerCacheTest.java new file mode 100644 index 0000000..83866cf --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaConsumerCacheTest.java @@ -0,0 +1,256 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.kafka; + +import static org.junit.Assert.*; + +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.curator.framework.CuratorFramework; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.modules.junit4.PowerMockRunner; +import org.powermock.core.classloader.annotations.PrepareForTest; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; + +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.dmf.mr.backends.kafka.Kafka011Consumer; +import org.onap.dmaap.dmf.mr.backends.kafka.KafkaConsumerCache; +import org.onap.dmaap.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({ AJSCPropertiesMap.class }) +public class KafkaConsumerCacheTest { + private KafkaConsumerCache kafkaConsumerCache =null; + @Mock + private ConcurrentHashMap fConsumers; + @Mock + private MetricsSet fMetrics; + + @Before + public void setUp() throws Exception { + MockitoAnnotations.initMocks(this); + + } + + @After + public void tearDown() throws Exception { + } + + + @Test + public void testSweep() { + kafkaConsumerCache = new KafkaConsumerCache(); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "kSetting_TouchEveryMs")).thenReturn("100"); + kafkaConsumerCache.sweep(); + + } + + + // DOES NOT WORK + @Test + public void testStartCache() { + + /* + * KafkaConsumerCache kafka = null; + * + * try { kafka = new KafkaConsumerCache("123", null); + * + * } catch (NoClassDefFoundError e) { try { kafka.startCache("DMAAP", + * null); } catch (NullPointerException e1) { // TODO Auto-generated + * catch block assertTrue(true); } catch (KafkaConsumerCacheException + * e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } + */ + + + new CuratorFrameworkImpl(); + new MetricsSetImpl(); + KafkaConsumerCache kafka=null; + try { + kafka = new KafkaConsumerCache(); + kafka.setfApiId("1"); + kafka.startCache("DMAAP", null); + } catch (NoClassDefFoundError e) { + + } catch (KafkaConsumerCacheException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Test + public void testGetCuratorFramework() { + + CuratorFramework curator = new CuratorFrameworkImpl(); + new MetricsSetImpl(); + try { + + } catch (NoClassDefFoundError e) { + + KafkaConsumerCache.getCuratorFramework(curator); + } + + } + + /* + * @Test public void testStopCache() { + * + * KafkaConsumerCache kafka = null; new CuratorFrameworkImpl(); new + * MetricsSetImpl(); try { kafka = new KafkaConsumerCache("123", null); + * kafka.stopCache(); } catch (NoClassDefFoundError e) { + * + * } + * + * } + */ + + @Test + public void testGetConsumerFor() { + + KafkaConsumerCache kafka = null; + + try { + kafka = new KafkaConsumerCache(); + kafka.getConsumerFor("testTopic", "CG1", "23"); + } catch (NoClassDefFoundError e) { + + } catch (KafkaConsumerCacheException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Test + public void testPutConsumerFor() { + + Kafka011Consumer consumer = null; + KafkaConsumerCache kafka = null; + + try { + kafka = new KafkaConsumerCache(); + + } catch (NoClassDefFoundError e) { + try { + kafka.putConsumerFor("testTopic", "CG1", "23", consumer); + } catch (NullPointerException e1) { + // TODO Auto-generated catch block + assertTrue(true); + } catch (KafkaConsumerCacheException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + } + + } + + @Test + public void testGetConsumers() { + + KafkaConsumerCache kafka = null; + + try { + kafka = new KafkaConsumerCache(); + + } catch (NoClassDefFoundError e) { + try { + kafka.getConsumers(); + } catch (NullPointerException e1) { + // TODO Auto-generated catch block + assertTrue(true); + } + } + + } + + @Test + public void testDropAllConsumers() { + + KafkaConsumerCache kafka = null; + try { + kafka = new KafkaConsumerCache(); + + } catch (NoClassDefFoundError e) { + try { + kafka.dropAllConsumers(); + } catch (NullPointerException e1) { + // TODO Auto-generated catch block + assertTrue(true); + } + } + + } + + @Test + public void testSignalOwnership() { + + KafkaConsumerCache kafka = null; + + try { + kafka = new KafkaConsumerCache(); + // kafka.signalOwnership("testTopic", "CG1", "23"); + } catch (NoClassDefFoundError e) { + try { + kafka.signalOwnership("testTopic", "CG1", "23"); + } catch (KafkaConsumerCacheException e1) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e1) { + // TODO Auto-generated catch block + // assertTrue(true); + e1.printStackTrace(); + } + + } + + // assertTrue(true); + } + + @Test + public void testDropConsumer() { + + KafkaConsumerCache kafka = null; + + try { + kafka = new KafkaConsumerCache(); + // kafka.dropConsumer("testTopic", "CG1", "23"); + } catch (NoClassDefFoundError e) { + try { + kafka.dropConsumer("testTopic", "CG1", "23"); + } catch (NullPointerException e1) { + // TODO Auto-generated catch block + assertTrue(true); + } + } + + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java new file mode 100644 index 0000000..982fbf2 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java @@ -0,0 +1,153 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.kafka; + +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; + +import kafka.common.FailedToSendMessageException; +import kafka.producer.KeyedMessage; + +public class KafkaPublisherTest { + + + + /*@Before + public void setUp() throws Exception { + ClassLoader classLoader = getClass().getClassLoader(); + AJSCPropertiesMap.refresh(new File(classLoader.getResource("MsgRtrApi.properties").getFile())); + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSendMessages() { + + String topic = "testTopic"; + + KafkaPublisher kafka = null; + try { + kafka = new KafkaPublisher(null); + + } catch (missingReqdSetting e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NoClassDefFoundError e) { + try { + kafka.sendMessage(topic, null); + } catch (NullPointerException e1) { + // TODO Auto-generated catch block + assertTrue(true); + } catch (FailedToSendMessageException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } catch (IOException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + } catch (FailedToSendMessageException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Test + public void testSendBatchMessage() { + + String topic = "testTopic"; + + KafkaPublisher kafka = null; + ArrayList> kms = null; + try { + kafka = new KafkaPublisher(null); + + } catch (missingReqdSetting e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NoClassDefFoundError e) { + try { + kafka.sendBatchMessage(topic, kms); + } catch (NullPointerException e1) { + // TODO Auto-generated catch block + assertTrue(true); + } catch (IOException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + } catch (FailedToSendMessageException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Test + public void sendMessages() { + + String topic = "testTopic"; + + List msgs = null; + + KafkaPublisher kafka = null; + //ArrayList> kms = null; + try { + kafka = new KafkaPublisher(null); + + } catch (missingReqdSetting e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NoClassDefFoundError e) { + try { + kafka.sendMessages(topic, msgs); + } catch (NullPointerException e1) { + // TODO Auto-generated catch block + assertTrue(true); + } catch (FailedToSendMessageException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } catch (IOException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + } catch (FailedToSendMessageException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + }*/ + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/MetricsSetImpl.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/MetricsSetImpl.java new file mode 100644 index 0000000..ccb432e --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/MetricsSetImpl.java @@ -0,0 +1,123 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.kafka; + +import java.util.List; +import java.util.Map; + +import org.json.JSONObject; + +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import com.att.nsa.metrics.CdmMeasuredItem; + +public class MetricsSetImpl implements MetricsSet { + + @Override + public List getEntries() { + // TODO Auto-generated method stub + return null; + } + + @Override + public CdmMeasuredItem getItem(String arg0) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Map getItems() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void putItem(String arg0, CdmMeasuredItem arg1) { + // TODO Auto-generated method stub + + } + + @Override + public void removeItem(String arg0) { + // TODO Auto-generated method stub + + } + + @Override + public int size() { + // TODO Auto-generated method stub + return 0; + } + + @Override + public JSONObject toJson() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void setupCambriaSender() { + // TODO Auto-generated method stub + + } + + @Override + public void onRouteComplete(String name, long durationMs) { + // TODO Auto-generated method stub + + } + + @Override + public void publishTick(int amount) { + // TODO Auto-generated method stub + + } + + @Override + public void consumeTick(int amount) { + // TODO Auto-generated method stub + + } + + @Override + public void onKafkaConsumerCacheMiss() { + // TODO Auto-generated method stub + + } + + @Override + public void onKafkaConsumerCacheHit() { + // TODO Auto-generated method stub + + } + + @Override + public void onKafkaConsumerClaimed() { + // TODO Auto-generated method stub + + } + + @Override + public void onKafkaConsumerTimeout() { + // TODO Auto-generated method stub + + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/TestRunner.java new file mode 100644 index 0000000..8f70091 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.kafka; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/JUnitTestSuite.java new file mode 100644 index 0000000..d577ae8 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/JUnitTestSuite.java @@ -0,0 +1,43 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.memory; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ MemoryConsumerFactoryTest.class, MemoryMetaBrokerTest.class, MemoryQueueTest.class, + MemoryQueuePublisherTest.class, MessageLoggerTest.class, }) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryConsumerFactoryTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryConsumerFactoryTest.java new file mode 100644 index 0000000..a0bbaa6 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryConsumerFactoryTest.java @@ -0,0 +1,83 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.memory; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.backends.memory.MemoryConsumerFactory; + +public class MemoryConsumerFactoryTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetConsumerFor() { + MemoryConsumerFactory factory = new MemoryConsumerFactory(null); + + + String topic = "testTopic"; + String consumerGroupId = "CG1"; + String clientId = "C1"; + String remoteHost="remoteHost"; + int timeoutMs = 1000; + factory.getConsumerFor(topic, consumerGroupId, clientId, timeoutMs,remoteHost); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testDropCache() { + MemoryConsumerFactory factory = new MemoryConsumerFactory(null); + + factory.dropCache(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testGetConsumers() { + MemoryConsumerFactory factory = new MemoryConsumerFactory(null); + + factory.getConsumers(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryMetaBrokerTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryMetaBrokerTest.java new file mode 100644 index 0000000..60803f3 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryMetaBrokerTest.java @@ -0,0 +1,92 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.memory; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.backends.memory.MemoryMetaBroker; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; + +public class MemoryMetaBrokerTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetAllTopics() { + MemoryMetaBroker broker = new MemoryMetaBroker(null, null); + + broker.getAllTopics(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testGeTopic() { + MemoryMetaBroker broker = new MemoryMetaBroker(null, null); + + broker.getTopic("testTopic"); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testCreateTopic() { + + //uncommenting this gives a Null Pointer Exception + + MemoryMetaBroker broker = new MemoryMetaBroker(null, null); + + int timeoutMs = 1000; + try { + broker.createTopic("testTopic","topic for testing", "ABCD123", 1,3, true); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueuePublisherTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueuePublisherTest.java new file mode 100644 index 0000000..efb308c --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueuePublisherTest.java @@ -0,0 +1,102 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.memory; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueuePublisher; + + +public class MemoryQueuePublisherTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSendBatchMessage() { + MemoryQueuePublisher publisher = new MemoryQueuePublisher(null, null); + + try { + publisher.sendBatchMessageNew("testTopic", null); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testSendMessage() { + MemoryQueuePublisher publisher = new MemoryQueuePublisher(null, null); + + try { + publisher.sendMessage("testTopic", null); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testSendMessages() { + MemoryQueuePublisher publisher = new MemoryQueuePublisher(null, null); + + + try { + publisher.sendMessages("testTopic", null); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueueTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueueTest.java new file mode 100644 index 0000000..126ecbb --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MemoryQueueTest.java @@ -0,0 +1,95 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.memory; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueue; + + +public class MemoryQueueTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testCreateTopic() { + MemoryQueue queue = new MemoryQueue(); + + queue.createTopic("testTopic"); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + @Test + public void testRemoveTopic() { + MemoryQueue queue = new MemoryQueue(); + + queue.removeTopic("testTopic"); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testPut() { + MemoryQueue queue = new MemoryQueue(); + + try { + queue.put("testTopic", null); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testGet() { + MemoryQueue queue = new MemoryQueue(); + + queue.get("testTopic", "consumer"); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MessageLoggerTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MessageLoggerTest.java new file mode 100644 index 0000000..bc2025a --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/MessageLoggerTest.java @@ -0,0 +1,104 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.memory; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.backends.memory.MessageLogger; + + +public class MessageLoggerTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSendMessage() { + MessageLogger dropper = new MessageLogger(); + + try { + dropper.sendMessage("testTopic", null); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testSendMessages() { + MessageLogger dropper = new MessageLogger(); + + try { + dropper.sendMessages("testTopic", null); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testSendBatchMessage() { + MessageLogger dropper = new MessageLogger(); + + try { + dropper.sendBatchMessageNew("testTopic", null); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} + + + + diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/TestRunner.java new file mode 100644 index 0000000..c6af86e --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/memory/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.backends.memory; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest.java new file mode 100644 index 0000000..2a79e92 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest.java @@ -0,0 +1,58 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import org.onap.dmaap.dmf.mr.beans.ApiKeyBean; +import com.att.nsa.metrics.CdmMetricsRegistry; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class ApiKeyBeanTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetEmail() { + + ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); + + bean.getEmail(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest2.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest2.java new file mode 100644 index 0000000..8f77807 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest2.java @@ -0,0 +1,58 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import org.onap.dmaap.dmf.mr.beans.ApiKeyBean; +import com.att.nsa.metrics.CdmMetricsRegistry; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class ApiKeyBeanTest2 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSetEmail() { + + ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); + + bean.setEmail("rs857c@att.com"); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest3.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest3.java new file mode 100644 index 0000000..108b5fb --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest3.java @@ -0,0 +1,58 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import org.onap.dmaap.dmf.mr.beans.ApiKeyBean; +import com.att.nsa.metrics.CdmMetricsRegistry; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class ApiKeyBeanTest3 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetDescription() { + + ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); + + bean.getDescription(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest4.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest4.java new file mode 100644 index 0000000..cae2c94 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest4.java @@ -0,0 +1,58 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import org.onap.dmaap.dmf.mr.beans.ApiKeyBean; +import com.att.nsa.metrics.CdmMetricsRegistry; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class ApiKeyBeanTest4 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSetDescription() { + + ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); + + bean.setDescription("new testing description"); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest5.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest5.java new file mode 100644 index 0000000..3e095c0 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest5.java @@ -0,0 +1,58 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import org.onap.dmaap.dmf.mr.beans.ApiKeyBean; +import com.att.nsa.metrics.CdmMetricsRegistry; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class ApiKeyBeanTest5 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetSharedSecret() { + + ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); + + bean.getSharedSecret(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest6.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest6.java new file mode 100644 index 0000000..871e2e7 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/ApiKeyBeanTest6.java @@ -0,0 +1,58 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import org.onap.dmaap.dmf.mr.beans.ApiKeyBean; +import com.att.nsa.metrics.CdmMetricsRegistry; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class ApiKeyBeanTest6 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetKey() { + + ApiKeyBean bean = new ApiKeyBean("hs647a@att.com", "testing bean"); + + bean.getKey(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPCambriaLimiterTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPCambriaLimiterTest.java new file mode 100644 index 0000000..853d770 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPCambriaLimiterTest.java @@ -0,0 +1,83 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPCambriaLimiter; + +public class DMaaPCambriaLimiterTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetSleepMsForRate() { + + + double value = 3; + DMaaPCambriaLimiter.getSleepMsForRate(value); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testOnCall() { + + DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(1,2, 3); + try { + limiter.onCall("testTopic", "ConsumerGroup1", "client2","remoteHost"); + } catch (CambriaApiException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testOnSend() { + + DMaaPCambriaLimiter limiter = new DMaaPCambriaLimiter(3,3, 3); + limiter.onSend("testTopic", "consumerGroup1", "client1", 100); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest.java new file mode 100644 index 0000000..9a67673 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest.java @@ -0,0 +1,53 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; + +public class DMaaPContextTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetBatchID() { + + DMaaPContext.getBatchID(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest2.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest2.java new file mode 100644 index 0000000..1366911 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest2.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; + +public class DMaaPContextTest2 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetRequest() { + + DMaaPContext context = new DMaaPContext(); + + context.getRequest(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest3.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest3.java new file mode 100644 index 0000000..0c00db0 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest3.java @@ -0,0 +1,57 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; + +public class DMaaPContextTest3 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetResponse() { + + DMaaPContext context = new DMaaPContext(); + + context.getResponse(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest4.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest4.java new file mode 100644 index 0000000..23f1ea2 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest4.java @@ -0,0 +1,60 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.springframework.mock.web.MockHttpServletRequest; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; + +public class DMaaPContextTest4 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetSession() { + + DMaaPContext context = new DMaaPContext(); + MockHttpServletRequest request = new MockHttpServletRequest(); + context.setRequest(request); + + context.getSession(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest5.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest5.java new file mode 100644 index 0000000..4ec03fe --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest5.java @@ -0,0 +1,57 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; + +public class DMaaPContextTest5 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetConfigReader() { + + DMaaPContext context = new DMaaPContext(); + + context.getConfigReader(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest6.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest6.java new file mode 100644 index 0000000..982cffb --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPContextTest6.java @@ -0,0 +1,57 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; + +public class DMaaPContextTest6 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetConsumerRequestTime() { + + DMaaPContext context = new DMaaPContext(); + + context.getConsumerRequestTime(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPKafkaMetaBrokerTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPKafkaMetaBrokerTest.java new file mode 100644 index 0000000..35f3064 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/DMaaPKafkaMetaBrokerTest.java @@ -0,0 +1,252 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.assertTrue; + +import org.I0Itec.zkclient.ZkClient; +import org.I0Itec.zkclient.exception.ZkNoNodeException; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import static org.mockito.Matchers.any; + +import java.util.Properties; + +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.apache.kafka.clients.admin.AdminClient; + +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.metabroker.Broker1.TopicExistsException; +import com.att.nsa.configs.ConfigDb; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.configs.ConfigPath; + + +@RunWith(PowerMockRunner.class) +@PrepareForTest({ AdminClient.class}) +public class DMaaPKafkaMetaBrokerTest { + + @InjectMocks + private DMaaPKafkaMetaBroker dMaaPKafkaMetaBroker; + @Mock + private ZkClient fZk; + @Mock + private AdminClient fKafkaAdminClient; + @Mock + private AdminClient client; + @Mock + private ConfigDb configDb; + @Mock + ConfigPath fBaseTopicData; + @Mock + private ZkClient zkClient; + @Mock + Topic mockTopic; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + PowerMockito.mockStatic(AdminClient.class); + //PowerMockito.when(AdminClient.create (any(Properties.class) )).thenReturn(fKafkaAdminClient); + + //PowerMockito.mockStatic(AdminUtils.class); + PowerMockito.when(configDb.parse("/topics")).thenReturn(fBaseTopicData); + + + } + + @Test + public void testGetAlltopics() { + try { + dMaaPKafkaMetaBroker.getAllTopics(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Test + public void testcreateTopic() { + try { + dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true); + } catch (CambriaApiException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + // TODO Auto-generatee.printStackTrace(); + } + + } + + + @Test + public void testcreateTopic_wrongPartition() { + try { + + dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 0, 1, true); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + // TODO Auto-generatee.printStackTrace(); + } + + } + + @Test + public void testcreateTopic_wrongReplica() { + try { + + dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 0, true); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + // TODO Auto-generatee.printStackTrace(); + } + + } + + @Test + public void testcreateTopic_error1() { + try { + dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + + } + + @Test + public void testcreateTopic_error2() { + try { + dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + @Test + public void testcreateTopic_error3() { + try { + dMaaPKafkaMetaBroker.createTopic("testtopic", "testtopic", "admin", 1, 1, true); + } catch (CambriaApiException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + assertTrue(true); + + } catch (Exception e) { + e.printStackTrace(); + } + + } + + @Test + public void testDeleteTopic() { + try { + dMaaPKafkaMetaBroker.deleteTopic("testtopic"); + } catch (CambriaApiException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + assertTrue(true); + + } + + @Test + public void testDeleteTopic_error1() { + try { + dMaaPKafkaMetaBroker.deleteTopic("testtopic"); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + + } + + @Test + public void testDeleteTopic_error2() { + try { + dMaaPKafkaMetaBroker.deleteTopic("testtopic"); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + + } + + @Test + public void testDeleteTopic_error3() { + try { + dMaaPKafkaMetaBroker.deleteTopic("testtopic"); + } catch (CambriaApiException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + assertTrue(true); + } catch (Exception e) { + e.printStackTrace(); + } + + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/JUnitTestSuite.java new file mode 100644 index 0000000..1589290 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/JUnitTestSuite.java @@ -0,0 +1,49 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ ApiKeyBeanTest.class, ApiKeyBeanTest2.class, ApiKeyBeanTest3.class, ApiKeyBeanTest4.class, ApiKeyBeanTest5.class, ApiKeyBeanTest6.class, + DMaaPCambriaLimiterTest.class, DMaaPContextTest.class, DMaaPContextTest2.class, + DMaaPContextTest3.class,DMaaPContextTest4.class,DMaaPContextTest5.class,DMaaPContextTest6.class, + LogDetailsTest.class, LogDetailsTest2.class,LogDetailsTest3.class,LogDetailsTest4.class,LogDetailsTest5.class,LogDetailsTest6.class, + LogDetailsTest7.class,LogDetailsTest8.class,LogDetailsTest9.class,LogDetailsTest10.class,LogDetailsTest11.class,LogDetailsTest12.class, + LogDetailsTest13.class,LogDetailsTest14.class,LogDetailsTest15.class,LogDetailsTest16.class, TopicBeanTest.class,TopicBeanTest2.class,TopicBeanTest3.class, + TopicBeanTest4.class,TopicBeanTest5.class,TopicBeanTest6.class,TopicBeanTest7.class,TopicBeanTest8.class,TopicBeanTest9.class,TopicBeanTest10.class,}) + +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest.java new file mode 100644 index 0000000..8d83821 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest.java @@ -0,0 +1,70 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetPublisherId() { + + LogDetails details = new LogDetails(); + + details.getPublisherId(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testGetPublisherLogDetails(){ + + LogDetails details = new LogDetails(); + details.setTotalMessageCount(1); + details.setConsumeTimestamp("02-27-1018"); + details.setSubscriberGroupId("1"); + details.setSubscriberId("1"); + assertEquals(details.getTotalMessageCount(),1); + assertEquals(details.getConsumeTimestamp(),"02-27-1018"); + assertEquals(details.getSubscriberId(),"1"); + assertEquals(details.getSubscriberGroupId(),"1"); + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest10.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest10.java new file mode 100644 index 0000000..c80062f --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest10.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest10 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetPublishTimestamp() { + + LogDetails details = new LogDetails(); + + details.getPublishTimestamp(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest11.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest11.java new file mode 100644 index 0000000..83b91cb --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest11.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest11 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetMessageLengthInBytes() { + + LogDetails details = new LogDetails(); + + details.getMessageLengthInBytes(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest12.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest12.java new file mode 100644 index 0000000..214876a --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest12.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest12 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetConsumeTimestamp() { + + LogDetails details = new LogDetails(); + + details.getConsumeTimestamp(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest13.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest13.java new file mode 100644 index 0000000..d3f6f23 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest13.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest13 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetTotalMessageCount() { + + LogDetails details = new LogDetails(); + + details.getTotalMessageCount(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest14.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest14.java new file mode 100644 index 0000000..b3dd6b8 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest14.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest14 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testIsTransactionEnabled() { + + LogDetails details = new LogDetails(); + + details.isTransactionEnabled(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest15.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest15.java new file mode 100644 index 0000000..d3da5a0 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest15.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest15 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetTransactionIdTs() { + + LogDetails details = new LogDetails(); + + details.getTransactionIdTs(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest16.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest16.java new file mode 100644 index 0000000..95cee27 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest16.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest16 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetServerIp() { + + LogDetails details = new LogDetails(); + + details.getServerIp(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest17.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest17.java new file mode 100644 index 0000000..39104be --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest17.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest17 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetPublisherLogDetails() { + + LogDetails details = new LogDetails(); + + details.getPublisherLogDetails(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest18.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest18.java new file mode 100644 index 0000000..031b4d9 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest18.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest18 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetPublisherLogDetails() { + + LogDetails details = new LogDetails(); + + details.getPublisherLogDetails(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest2.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest2.java new file mode 100644 index 0000000..7b951d4 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest2.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest2 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetTransactionId() { + + LogDetails details = new LogDetails(); + + details.getTransactionId(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest3.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest3.java new file mode 100644 index 0000000..fbb0ced --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest3.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest3 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetTopicId() { + + LogDetails details = new LogDetails(); + + details.getTopicId(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest4.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest4.java new file mode 100644 index 0000000..baee017 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest4.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest4 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetSubscriberGroupId() { + + LogDetails details = new LogDetails(); + + details.getSubscriberGroupId(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest5.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest5.java new file mode 100644 index 0000000..36136e4 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest5.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest5 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetSubscriberId() { + + LogDetails details = new LogDetails(); + + details.getSubscriberId(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest6.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest6.java new file mode 100644 index 0000000..ca583f9 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest6.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest6 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetPublisherIp() { + + LogDetails details = new LogDetails(); + + details.getPublisherIp(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest7.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest7.java new file mode 100644 index 0000000..da03335 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest7.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest7 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetMessageBatchId() { + + LogDetails details = new LogDetails(); + + details.getMessageBatchId(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest8.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest8.java new file mode 100644 index 0000000..d67bee6 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest8.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest8 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetMessageTimestamp() { + + LogDetails details = new LogDetails(); + + details.getMessageTimestamp(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest9.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest9.java new file mode 100644 index 0000000..afd1913 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/LogDetailsTest9.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class LogDetailsTest9 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetMessageSequence() { + + LogDetails details = new LogDetails(); + + details.getMessageSequence(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TestRunner.java new file mode 100644 index 0000000..f65bdfa --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest.java new file mode 100644 index 0000000..844fc08 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.TopicBean; + +public class TopicBeanTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetTopicName() { + + TopicBean bean = new TopicBean(); + + bean.getTopicName(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest10.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest10.java new file mode 100644 index 0000000..e8e343d --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest10.java @@ -0,0 +1,55 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.TopicBean; + +public class TopicBeanTest10 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSetTransactionEnabled() { + + TopicBean bean = new TopicBean(); + bean.setTransactionEnabled(true); + + /* String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True"));*/ + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest2.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest2.java new file mode 100644 index 0000000..789df84 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest2.java @@ -0,0 +1,55 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.TopicBean; + +public class TopicBeanTest2 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSetTopicName() { + + TopicBean bean = new TopicBean(); + bean.setTopicName("testTopic"); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest3.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest3.java new file mode 100644 index 0000000..9d2819d --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest3.java @@ -0,0 +1,55 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.TopicBean; + +public class TopicBeanTest3 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetPartitionCount() { + + TopicBean bean = new TopicBean(); + bean.getPartitionCount(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest4.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest4.java new file mode 100644 index 0000000..15dd5e5 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest4.java @@ -0,0 +1,55 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.TopicBean; + +public class TopicBeanTest4 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSetPartitionCount() { + + TopicBean bean = new TopicBean(); + bean.setPartitionCount(8); + + /* String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True"));*/ + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest5.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest5.java new file mode 100644 index 0000000..85ec6f9 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest5.java @@ -0,0 +1,55 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.TopicBean; + +public class TopicBeanTest5 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetReplicationCount() { + + TopicBean bean = new TopicBean(); + bean.getReplicationCount(); + + /* String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True"));*/ + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest6.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest6.java new file mode 100644 index 0000000..819431a --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest6.java @@ -0,0 +1,55 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.TopicBean; + +public class TopicBeanTest6 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSetReplicationCount() { + + TopicBean bean = new TopicBean(); + bean.setReplicationCount(3); + + /* String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True"));*/ + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest7.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest7.java new file mode 100644 index 0000000..0a90aa6 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest7.java @@ -0,0 +1,55 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.TopicBean; + +public class TopicBeanTest7 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testIsTransactionEnabled() { + + TopicBean bean = new TopicBean(); + bean.isTransactionEnabled(); + + /* String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True"));*/ + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest8.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest8.java new file mode 100644 index 0000000..8ecc561 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest8.java @@ -0,0 +1,55 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.TopicBean; + +public class TopicBeanTest8 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetTopicDescription() { + + TopicBean bean = new TopicBean(); + bean.getTopicDescription(); + + /* String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True"));*/ + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest9.java b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest9.java new file mode 100644 index 0000000..85c566c --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/beans/TopicBeanTest9.java @@ -0,0 +1,55 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.beans; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.TopicBean; + +public class TopicBeanTest9 { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSetTopicDescription() { + + TopicBean bean = new TopicBean(); + bean.setTopicDescription("testing topic"); + + /* String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True"));*/ + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java b/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java new file mode 100644 index 0000000..892d969 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java @@ -0,0 +1,169 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ + + package org.onap.dmaap.mr.cambria.embed; + +import java.io.File; +import java.util.Arrays; +import java.util.Map; +import java.util.Properties; + +import org.apache.commons.io.FileUtils; +import org.apache.curator.framework.CuratorFramework; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.backends.kafka.KafkaPublisher; +import org.onap.dmaap.dmf.mr.backends.memory.MemoryMetaBroker; +import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueue; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.admin.CreateTopicsResult; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.common.KafkaFuture; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaConsumerFactory; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.beans.DMaaPMetricsSet; +import org.onap.dmaap.dmf.mr.beans.DMaaPZkClient; +import org.onap.dmaap.dmf.mr.beans.DMaaPZkConfigDb; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPCuratorFactory; +import org.onap.dmaap.dmf.mr.utils.PropertyReader; +import com.att.nsa.security.db.BaseNsaApiDbImpl; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; +import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory; + + +public class EmbedConfigurationReader { + private static final String DEFAULT_KAFKA_LOG_DIR = "/kafka_embedded"; + public static final String TEST_TOPIC = "testTopic"; + private static final int BROKER_ID = 0; + private static final int BROKER_PORT = 5000; + private static final String LOCALHOST_BROKER = String.format("localhost:%d", BROKER_PORT); + + private static final String DEFAULT_ZOOKEEPER_LOG_DIR = "/zookeeper"; + private static final int ZOOKEEPER_PORT = 2000; + private static final String ZOOKEEPER_HOST = String.format("localhost:%d", ZOOKEEPER_PORT); + + private static final String groupId = "groupID"; + String dir; + private AdminClient fKafkaAdminClient; + KafkaLocal kafkaLocal; + + public void setUp() throws Exception { + + ClassLoader classLoader = getClass().getClassLoader(); + AJSCPropertiesMap.refresh(new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile())); + + Properties kafkaProperties; + Properties zkProperties; + + try { + //load properties + dir = new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile()).getParent(); + kafkaProperties = getKafkaProperties(dir + DEFAULT_KAFKA_LOG_DIR, BROKER_PORT, BROKER_ID); + zkProperties = getZookeeperProperties(ZOOKEEPER_PORT,dir + DEFAULT_ZOOKEEPER_LOG_DIR); + + //start kafkaLocalServer + kafkaLocal = new KafkaLocal(kafkaProperties, zkProperties); + + Map map = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop); + map.put(CambriaConstants.kSetting_ZkConfigDbServers, ZOOKEEPER_HOST); + map.put("kafka.client.zookeeper", ZOOKEEPER_HOST); + map.put("kafka.metadata.broker.list", LOCALHOST_BROKER); + + DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader()); + + final Properties props = new Properties (); + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092" ); + props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret'"); + props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); + props.put("sasl.mechanism", "PLAIN"); + fKafkaAdminClient = AdminClient.create ( props ); + + // if(!AdminUtils.topicExists(dMaaPZkClient, TEST_TOPIC)) + // AdminUtils.createTopic(dMaaPZkClient, TEST_TOPIC, 3, 1, new Properties()); + final NewTopic topicRequest = new NewTopic ( TEST_TOPIC, 3, new Integer(1).shortValue () ); + fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) ); + Thread.sleep(5000); + } catch (Exception e){ + e.printStackTrace(System.out); + } + } + + private static Properties getKafkaProperties(String logDir, int port, int brokerId) { + Properties properties = new Properties(); + properties.put("port", port + ""); + properties.put("broker.id", brokerId + ""); + properties.put("log.dir", logDir); + properties.put("zookeeper.connect", ZOOKEEPER_HOST); + properties.put("default.replication.factor", "1"); + properties.put("delete.topic.enable", "true"); + properties.put("consumer.timeout.ms", -1); + return properties; + } + + private static Properties getZookeeperProperties(int port, String zookeeperDir) { + Properties properties = new Properties(); + properties.put("clientPort", port + ""); + properties.put("dataDir", zookeeperDir); + return properties; + } + + public void tearDown() throws Exception { + DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader()); + if(fKafkaAdminClient!=null) + fKafkaAdminClient.deleteTopics(Arrays.asList(TEST_TOPIC)); + //AdminUtils.deleteTopic(dMaaPZkClient, TEST_TOPIC); + //dMaaPZkClient.delete(dir + DEFAULT_KAFKA_LOG_DIR); + //dMaaPZkClient.delete(dir + DEFAULT_ZOOKEEPER_LOG_DIR); + kafkaLocal.stop(); + FileUtils.cleanDirectory(new File(dir + DEFAULT_KAFKA_LOG_DIR)); + } + + + public ConfigurationReader buildConfigurationReader() throws Exception { + + setUp(); + + PropertyReader propertyReader = new PropertyReader(); + DMaaPMetricsSet dMaaPMetricsSet = new DMaaPMetricsSet(propertyReader); + DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(propertyReader); + DMaaPZkConfigDb dMaaPZkConfigDb = new DMaaPZkConfigDb(dMaaPZkClient, propertyReader); + CuratorFramework curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader()); + DMaaPKafkaConsumerFactory dMaaPKafkaConsumerFactory = new DMaaPKafkaConsumerFactory(dMaaPMetricsSet, curatorFramework,null); + MemoryQueue memoryQueue = new MemoryQueue(); + MemoryMetaBroker memoryMetaBroker = new MemoryMetaBroker(memoryQueue, dMaaPZkConfigDb); + BaseNsaApiDbImpl baseNsaApiDbImpl = new BaseNsaApiDbImpl<>(dMaaPZkConfigDb, new NsaSimpleApiKeyFactory()); + DMaaPAuthenticator dMaaPAuthenticator = new DMaaPAuthenticatorImpl<>(baseNsaApiDbImpl); + KafkaPublisher kafkaPublisher = new KafkaPublisher(propertyReader); + DMaaPKafkaMetaBroker dMaaPKafkaMetaBroker = new DMaaPKafkaMetaBroker(propertyReader, dMaaPZkClient, dMaaPZkConfigDb); + + return new ConfigurationReader(propertyReader, + dMaaPMetricsSet, dMaaPZkClient, dMaaPZkConfigDb, kafkaPublisher, + curatorFramework, dMaaPKafkaConsumerFactory, dMaaPKafkaMetaBroker, + memoryQueue, memoryMetaBroker, baseNsaApiDbImpl, dMaaPAuthenticator); + + } +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java b/src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java new file mode 100644 index 0000000..7cdd6b1 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/embed/KafkaLocal.java @@ -0,0 +1,58 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.embed; + +import java.io.IOException; +import java.util.Properties; + +import kafka.server.KafkaConfig; +import kafka.server.KafkaServerStartable; + + +public class KafkaLocal { + + public KafkaServerStartable kafka; + public ZooKeeperLocal zookeeper; + + public KafkaLocal(Properties kafkaProperties, Properties zkProperties) throws IOException, InterruptedException{ + KafkaConfig kafkaConfig = new KafkaConfig(kafkaProperties); + + //start local zookeeper + System.out.println("starting local zookeeper..."); + zookeeper = new ZooKeeperLocal(zkProperties); + System.out.println("done"); + + //start local kafka broker + kafka = new KafkaServerStartable(kafkaConfig); + System.out.println("starting local kafka broker..."); + kafka.startup(); + System.out.println("done"); + } + + + public void stop(){ + //stop kafka broker + System.out.println("stopping kafka..."); + kafka.shutdown(); + System.out.println("done"); + } + +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java b/src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java new file mode 100644 index 0000000..57d87c7 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/embed/ZooKeeperLocal.java @@ -0,0 +1,59 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.embed; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Properties; + +import org.apache.zookeeper.server.ServerConfig; +import org.apache.zookeeper.server.ZooKeeperServerMain; +import org.apache.zookeeper.server.quorum.QuorumPeerConfig; + +public class ZooKeeperLocal { + + ZooKeeperServerMain zooKeeperServer; + + public ZooKeeperLocal(Properties zkProperties) throws FileNotFoundException, IOException{ + QuorumPeerConfig quorumConfiguration = new QuorumPeerConfig(); + try { + quorumConfiguration.parseProperties(zkProperties); + } catch(Exception e) { + throw new RuntimeException(e); + } + + zooKeeperServer = new ZooKeeperServerMain(); + final ServerConfig configuration = new ServerConfig(); + configuration.readFrom(quorumConfiguration); + + + new Thread() { + public void run() { + try { + zooKeeperServer.runFromConfig(configuration); + } catch (IOException e) { + System.out.println("ZooKeeper Failed"); + e.printStackTrace(System.err); + } + } + }.start(); + } +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPCambriaExceptionMapperTest.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPCambriaExceptionMapperTest.java new file mode 100644 index 0000000..2a764f2 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPCambriaExceptionMapperTest.java @@ -0,0 +1,60 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.exception; + +import static org.junit.Assert.*; +import org.onap.dmaap.dmf.mr.exception.DMaaPCambriaExceptionMapper; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; +import org.onap.dmaap.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class DMaaPCambriaExceptionMapperTest { + + @Before + public void setUp() throws Exception { + DMaaPCambriaExceptionMapper exception = new DMaaPCambriaExceptionMapper(); + } + + @After + public void tearDown() throws Exception { + + } + + + @Test + public void testToResponse() { + + DMaaPCambriaExceptionMapper mapper = new DMaaPCambriaExceptionMapper(); + + try { + mapper.toResponse(null); + } catch (NullPointerException e) { + assertTrue(true); + } + + + + } + +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPErrorMessagesTest.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPErrorMessagesTest.java new file mode 100644 index 0000000..977f8ab --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPErrorMessagesTest.java @@ -0,0 +1,372 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.exception; + +import static org.junit.Assert.*; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; +import org.onap.dmaap.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class DMaaPErrorMessagesTest { + + @Before + public void setUp() throws Exception { + + } + + @After + public void tearDown() throws Exception { + + } + + + @Test + public void testGetMsgSizeExceeds() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getMsgSizeExceeds(); + assertTrue(true); + + } + + @Test + public void testSetMsgSizeExceeds() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setMsgSizeExceeds("200"); + assertTrue(true); + + } + + @Test + public void testGetNotFound() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getNotFound(); + assertTrue(true); + + } + + @Test + public void testSetNotFound() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setNotFound("not found"); + assertTrue(true); + + } + + @Test + public void testGetServerUnav() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getServerUnav(); + assertTrue(true); + + } + + @Test + public void testSetServerUnav() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setServerUnav("server1"); + assertTrue(true); + + } + + @Test + public void testGetMethodNotAllowed() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getMethodNotAllowed(); + assertTrue(true); + + } + + @Test + public void testSetMethodNotAllowed() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setMethodNotAllowed("server2"); + assertTrue(true); + + } + + + @Test + public void testGetBadRequest() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getBadRequest(); + assertTrue(true); + + } + + @Test + public void testSetBadRequest() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setBadRequest("badRequest"); + assertTrue(true); + + } + + @Test + public void testGetNwTimeout() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getNwTimeout(); + assertTrue(true); + + } + + @Test + public void testSetNwTimeout() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setNwTimeout("12:00:00"); + assertTrue(true); + + } + + @Test + public void testGetNotPermitted1() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getNotPermitted1(); + assertTrue(true); + + } + + @Test + public void testSetNotPermitted1() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setNotPermitted1("not permitted"); + assertTrue(true); + + } + + @Test + public void testGetNotPermitted2() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getNotPermitted2(); + assertTrue(true); + + } + + @Test + public void testSetNotPermitted2() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setNotPermitted2("not permitted2"); + assertTrue(true); + + } + + @Test + public void testGetTopicsfailure() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getTopicsfailure(); + assertTrue(true); + + } + + @Test + public void testSetTopicsfailure() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setTopicsfailure("failure"); + assertTrue(true); + + } + + @Test + public void testGetTopicDetailsFail() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getTopicDetailsFail(); + assertTrue(true); + + } + + @Test + public void testSetTopicDetailsFail() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setTopicDetailsFail("topic details fail"); + assertTrue(true); + + } + + @Test + public void testGetCreateTopicFail() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getCreateTopicFail(); + assertTrue(true); + + } + + @Test + public void testSetCreateTopicFail() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setCreateTopicFail("topic details fail"); + assertTrue(true); + + } + + @Test + public void testGetIncorrectJson() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getIncorrectJson(); + assertTrue(true); + + } + + @Test + public void testSetIncorrectJson() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setIncorrectJson("incorrect Json"); + assertTrue(true); + + } + + @Test + public void testGetDeleteTopicFail() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getDeleteTopicFail(); + assertTrue(true); + + } + + @Test + public void testSetDeleteTopicFail() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setDeleteTopicFail("delete tpic fail"); + assertTrue(true); + + } + + @Test + public void testGetConsumeMsgError() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getConsumeMsgError(); + assertTrue(true); + + } + + @Test + public void testSetConsumeMsgError() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setConsumeMsgError("consume message error"); + assertTrue(true); + + } + + + @Test + public void testGetPublishMsgError() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getPublishMsgError(); + assertTrue(true); + + } + + @Test + public void testSetPublishMsgError() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setPublishMsgError("publish message error"); + assertTrue(true); + + } + + @Test + public void testGetPublishMsgCount() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getPublishMsgCount(); + assertTrue(true); + + } + + @Test + public void testSetPublishMsgCount() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setPublishMsgCount("200"); + assertTrue(true); + + } + + @Test + public void testGetAuthFailure() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getAuthFailure(); + assertTrue(true); + + } + + @Test + public void testSetAuthFailure() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setAuthFailure("auth failure"); + assertTrue(true); + + } + + @Test + public void testGetTopicNotExist() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.getTopicNotExist(); + assertTrue(true); + + } + + @Test + public void testSetTopicNotExist() { + + DMaaPErrorMessages msg = new DMaaPErrorMessages(); + msg.setTopicNotExist("toopic doesn't exist"); + assertTrue(true); + + } + + +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPWebExceptionMapperTest.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPWebExceptionMapperTest.java new file mode 100644 index 0000000..64d1c7c --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/exception/DMaaPWebExceptionMapperTest.java @@ -0,0 +1,60 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.exception; + +import static org.junit.Assert.*; +import org.onap.dmaap.dmf.mr.exception.DMaaPWebExceptionMapper; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; +import org.onap.dmaap.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class DMaaPWebExceptionMapperTest { + + @Before + public void setUp() throws Exception { + + } + + @After + public void tearDown() throws Exception { + + } + + + @Test + public void testToResponse() { + + DMaaPWebExceptionMapper msg = new DMaaPWebExceptionMapper(); + + try { + msg.toResponse(null); + } catch (Exception e) { + assertTrue(true); + } + + + + } + +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/ErrorResponseTest.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/ErrorResponseTest.java new file mode 100644 index 0000000..66242dc --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/exception/ErrorResponseTest.java @@ -0,0 +1,146 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.exception; + +import static org.junit.Assert.*; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; +import org.onap.dmaap.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class ErrorResponseTest { + + @Before + public void setUp() throws Exception { + + } + + @After + public void tearDown() throws Exception { + + } + + + @Test + public void testGetHttpStatusCode() { + + ErrorResponse resp = new ErrorResponse(200, 500, "no error"); + + resp.getHttpStatusCode(); + assertTrue(true); + + + } + + @Test + public void tesSGetHttpStatusCode() { + + ErrorResponse resp = new ErrorResponse(200, 500, "no error"); + + resp.setHttpStatusCode(200); + assertTrue(true); + + + } + + @Test + public void testGetMrErrorCode() { + + ErrorResponse resp = new ErrorResponse(200, 500, "no error"); + + resp.getMrErrorCode(); + assertTrue(true); + + + } + + @Test + public void testSetMrErrorCode() { + + ErrorResponse resp = new ErrorResponse(200, 500, "no error"); + + resp.setMrErrorCode(500); + assertTrue(true); + + + } + + @Test + public void testGetErrorMessage() { + + ErrorResponse resp = new ErrorResponse(200, 500, "no error"); + + resp.getErrorMessage(); + assertTrue(true); + + + } + + @Test + public void testSetErrorMessage() { + + ErrorResponse resp = new ErrorResponse(200, 500, "no error"); + + resp.setErrorMessage("no error"); + assertTrue(true); + + + } + + @Test + public void testToString() { + + ErrorResponse resp = new ErrorResponse(200, 500, "no error"); + + resp.toString(); + assertTrue(true); + + + } + + @Test + public void testGetErrMapperStr1() { + + ErrorResponse resp = new ErrorResponse(200, 500, "no error"); + + resp.setHelpURL("/help"); + assertTrue(true); + + + } + + @Test + public void testGetErrMapperStr() { + + ErrorResponse resp = new ErrorResponse(200, 500, "no error"); + + resp.getHelpURL(); + assertTrue(true); + + + } + + + +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/JUnitTestSuite.java new file mode 100644 index 0000000..e7aedac --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/exception/JUnitTestSuite.java @@ -0,0 +1,43 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.exception; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ DMaaPCambriaExceptionMapperTest.class, + DMaaPErrorMessagesTest.class, DMaaPWebExceptionMapperTest.class, ErrorResponseTest.class}) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/exception/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/exception/TestRunner.java new file mode 100644 index 0000000..cd5a2ff --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/exception/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.exception; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/listener/CambriaServletContextListenerTest.java b/src/test/java/org/onap/dmaap/mr/cambria/listener/CambriaServletContextListenerTest.java new file mode 100644 index 0000000..8405617 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/listener/CambriaServletContextListenerTest.java @@ -0,0 +1,79 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + + package org.onap.dmaap.mr.cambria.listener; + +import static org.junit.Assert.*; + +import javax.servlet.ServletContextEvent; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.listener.CambriaServletContextListener; + +public class CambriaServletContextListenerTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testContextDestroyed() { + CambriaServletContextListener listener = new CambriaServletContextListener(); + try { + listener.contextDestroyed(null); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testContextInitialized() { + CambriaServletContextListener listener = new CambriaServletContextListener(); + + try { + listener.contextInitialized(null); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + + +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/cambria/listener/DME2EndPointLoaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/listener/DME2EndPointLoaderTest.java new file mode 100644 index 0000000..2ea58e2 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/listener/DME2EndPointLoaderTest.java @@ -0,0 +1,78 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.listener; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.listener.DME2EndPointLoader; + +public class DME2EndPointLoaderTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testPublishEndPoints() { + DME2EndPointLoader loader = DME2EndPointLoader.getInstance(); + + + try { + loader.publishEndPoints(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testUnPublishEndPoints() { + DME2EndPointLoader loader = DME2EndPointLoader.getInstance(); + + + try { + loader.unPublishEndPoints(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/listener/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/listener/JUnitTestSuite.java new file mode 100644 index 0000000..38efa3b --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/listener/JUnitTestSuite.java @@ -0,0 +1,43 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + + package org.onap.dmaap.mr.cambria.listener; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ DME2EndPointLoaderTest.class, CambriaServletContextListenerTest.class }) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/listener/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/listener/TestRunner.java new file mode 100644 index 0000000..636c7b4 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/listener/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.listener; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImpl.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImpl.java new file mode 100644 index 0000000..94fdb6b --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImpl.java @@ -0,0 +1,71 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metabroker; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.metabroker.Broker; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +public class BrokerImpl implements Broker { + + @Override + public List getAllTopics() throws ConfigDbException { + // TODO Auto-generated method stub + Topic top = new TopicImplem(); + + List list = new ArrayList(); + + for (int i = 0; i < 5; i++) { + top = new TopicImplem(); + list.add(top); + + } + + return null; + + } + + @Override + public Topic getTopic(String topic) throws ConfigDbException { + // TODO Auto-generated method stub + return new TopicImplem(); + } + + @Override + public Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas, + boolean transactionEnabled) throws TopicExistsException, CambriaApiException { + // TODO Auto-generated method stub + return new TopicImplem(topic, description, ownerApiKey, transactionEnabled); + } + + @Override + public void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException { + // TODO Auto-generated method stub + Topic top = new TopicImplem(); + + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImplTest.java new file mode 100644 index 0000000..0f72eea --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/BrokerImplTest.java @@ -0,0 +1,109 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + + package org.onap.dmaap.mr.cambria.metabroker; + +import static org.junit.Assert.*; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +public class BrokerImplTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetOwners() { + + try { + new BrokerImpl().getAllTopics(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + assertTrue(true); + } + + @Test + public void testGetTopic() { + + try { + new BrokerImpl().getTopic("topicName"); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + assertTrue(true); + + } + + @Test + public void testCreateTopic() { + + try { + new BrokerImpl().createTopic("topicName", "testing topic", "owner123", 3, 3, true); + + } catch (CambriaApiException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + assertTrue(true); + + } + + @Test + public void testDeleteTopic() { + + try { + new BrokerImpl().deleteTopic("topicName"); + } catch (CambriaApiException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + assertTrue(true); + + } +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/JUnitTestSuite.java new file mode 100644 index 0000000..4224aae --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/JUnitTestSuite.java @@ -0,0 +1,42 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metabroker; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ BrokerImplTest.class, TopicImplemTest.class, }) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TestRunner.java new file mode 100644 index 0000000..072c9e3 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metabroker; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplTest.java new file mode 100644 index 0000000..fabe91c --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplTest.java @@ -0,0 +1,25 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metabroker; + +public class TopicImplTest { + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplem.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplem.java new file mode 100644 index 0000000..02cf9b3 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplem.java @@ -0,0 +1,140 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metabroker; + +import java.util.Set; + +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.NsaAcl; +import com.att.nsa.security.NsaApiKey; + +public class TopicImplem implements Topic { + private String name, owner, description; + boolean isTransactionEnabled; + private Set set = null; + private NsaAcl readerAcl, writerAcl; + + public TopicImplem() { + name = getName(); + owner = getOwner(); + description = getDescription(); + isTransactionEnabled = true; + readerAcl = getReaderAcl(); + writerAcl = getWriterAcl(); + } + + public TopicImplem(String topic, String description, String ownerApiKey, boolean transactionEnabled) { + + this.name = topic; + this.owner = ownerApiKey; + this.description = description; + isTransactionEnabled = transactionEnabled; + + + } + @Override + public Set getOwners() { + // TODO Auto-generated method stub + for (int i = 0; i < 5; i++) { + set.add("string" + (i + 1)); + } + return set; + } + + @Override + public String getName() { + // TODO Auto-generated method stub + return "testTopic"; + } + + @Override + public String getOwner() { + // TODO Auto-generated method stub + return "owner"; + } + + @Override + public String getDescription() { + // TODO Auto-generated method stub + return "topic for testing purposes"; + } + + @Override + public boolean isTransactionEnabled() { + // TODO Auto-generated method stub + return true; + } + + @Override + public NsaAcl getReaderAcl() { + // TODO Auto-generated method stub + return new NsaAcl(); + } + + @Override + public NsaAcl getWriterAcl() { + // TODO Auto-generated method stub + return new NsaAcl(); + } + + @Override + public void checkUserRead(NsaApiKey user) throws AccessDeniedException { + // TODO Auto-generated method stub + NsaApiKey u = user; + } + + @Override + public void checkUserWrite(NsaApiKey user) throws AccessDeniedException { + // TODO Auto-generated method stub + + NsaApiKey u = user; + } + + @Override + public void permitWritesFromUser(String publisherId, NsaApiKey asUser) + throws AccessDeniedException, ConfigDbException { + // TODO Auto-generated method stub + String id = publisherId; + + } + + @Override + public void denyWritesFromUser(String publisherId, NsaApiKey asUser) + throws AccessDeniedException, ConfigDbException { + // TODO Auto-generated method stub + String id = publisherId; + + } + + @Override + public void permitReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException, ConfigDbException { + // TODO Auto-generated method stub + String id = consumerId; + } + + @Override + public void denyReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException, ConfigDbException { + // TODO Auto-generated method stub + String id = consumerId; + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplemTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplemTest.java new file mode 100644 index 0000000..279c8bb --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metabroker/TopicImplemTest.java @@ -0,0 +1,176 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metabroker; + +import static org.junit.Assert.*; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +public class TopicImplemTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + + @Test + public void testGetOwners() { + + assertNotNull(new TopicImplem().getOwner()); + + } + + @Test + public void testGetName() { + + assertNotNull(new TopicImplem().getName()); + + } + + @Test + public void testGetOwner() { + + assertNotNull(new TopicImplem().getOwner()); + + } + + @Test + public void testGetDescription() { + + assertNotNull(new TopicImplem().getDescription()); + + } + + @Test + public void testIsTransactionEnabled() { + + assertTrue(new TopicImplem().isTransactionEnabled()); + + } + + @Test + public void testGetReaderAcl() { + new TopicImplem().getReaderAcl(); + assertTrue(true); + + } + + @Test + public void testGetWriterAcl() { + new TopicImplem().getReaderAcl(); + assertTrue(true); + + } + + + @Test + public void testCheckUserRead() { + try { + new TopicImplem().checkUserRead(null); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + + } + + @Test + public void testCheckUserWrite() { + try { + new TopicImplem().checkUserWrite(null); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + + } + + @Test + public void testPermitWritesFromUser() { + try { + new TopicImplem().permitWritesFromUser("publisherId", null); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + + } + + @Test + public void testDenyWritesFromUser() { + try { + new TopicImplem().denyWritesFromUser("publisherId", null); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + + } + + @Test + public void testPermitReadsByUser() { + try { + new TopicImplem().permitReadsByUser("consumerId", null); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + + } + + @Test + public void testDenyReadsByUser() { + try { + new TopicImplem().denyReadsByUser("consumerId", null); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + + } +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/CambriaPublisherUtilityTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/CambriaPublisherUtilityTest.java new file mode 100644 index 0000000..a73e77b --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/CambriaPublisherUtilityTest.java @@ -0,0 +1,95 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metrics.publisher; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.metrics.publisher.CambriaPublisherUtility; + +public class CambriaPublisherUtilityTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testEscape() { + + CambriaPublisherUtility utility = new CambriaPublisherUtility(); + + utility.escape("testTopic"); + assertTrue(true); + + } + + @Test + public void testMakeUrl() { + + CambriaPublisherUtility utility = new CambriaPublisherUtility(); + + utility.makeUrl("testTopic"); + assertTrue(true); + + } + + @Test + public void testMakeConsumerUrl() { + + CambriaPublisherUtility utility = new CambriaPublisherUtility(); + + utility.makeConsumerUrl("testTopic", "CG1", "23"); + assertTrue(true); + + } + + @Test + public void testCreateHostsList() { + + CambriaPublisherUtility utility = new CambriaPublisherUtility(); + + try { + utility.createHostsList(null); + } catch (NullPointerException e) { + assertTrue(true); + } + + + } + + @Test + public void testHostForString() { + + CambriaPublisherUtility utility = new CambriaPublisherUtility(); + + utility.hostForString("hello"); + assertTrue(true); + + } +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java new file mode 100644 index 0000000..51334e8 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/DMaaPCambriaClientFactoryTest.java @@ -0,0 +1,171 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metrics.publisher; + +import static org.junit.Assert.*; + +import java.util.ArrayList; +import java.util.Collection; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.listener.DME2EndPointLoader; +import org.onap.dmaap.dmf.mr.metrics.publisher.CambriaConsumer; +import org.onap.dmaap.dmf.mr.metrics.publisher.CambriaPublisherUtility; +import org.onap.dmaap.dmf.mr.metrics.publisher.DMaaPCambriaClientFactory; + +public class DMaaPCambriaClientFactoryTest { + + private Collection hostSet; + + private String[] hostSetArray; + @Before + public void setUp() throws Exception { + hostSet = new ArrayList(); + + hostSetArray = new String[10]; + + for (int i = 0; i < 10; i++) { + hostSet.add("host" + (i+1)); + hostSetArray[i] = "host" + (i+1); + } + + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testCreateConsumer() { + + + + DMaaPCambriaClientFactory.createConsumer("hostList", "testTopic"); + assertTrue(true); + + } + + @Test + public void testCreateConsumer2() { + + + try { + DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic"); + } catch (NullPointerException e) { + assertTrue(true); + } + + + } + + @Test + public void testCreateConsumer3() { + + DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "filter"); + assertTrue(true); + + } + + @Test + public void testCreateConsumer4() { + DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "CG1", "23"); + assertTrue(true); + + } + + @Test + public void testCreateConsumer5() { + + DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "CG1", "23", 100, 20); + assertTrue(true); + + } + + @Test + public void testCreateConsumer6() { + + + DMaaPCambriaClientFactory.createConsumer("hostList", "testTopic", "CG1", "23", 100, 20, "filter", "apikey", "apisecret"); + assertTrue(true); + + } + + @Test + public void testCreateConsumer7() { + + DMaaPCambriaClientFactory.createConsumer(hostSet, "testTopic", "CG1", "23", 100, 20, "filter", "apikey", "apisecret"); + assertTrue(true); + + } + + @Test + public void testCreateSimplePublisher() { + + DMaaPCambriaClientFactory.createSimplePublisher("hostList", "testTopic"); + assertTrue(true); + + } + + @Test + public void testCreateBatchingPublisher() { + + DMaaPCambriaClientFactory.createBatchingPublisher("hostList", "testTopic", 100, 50); + assertTrue(true); + + } + + @Test + public void testCreateBatchingPublisher2() { + + DMaaPCambriaClientFactory.createBatchingPublisher("hostList", "testTopic", 100, 50, true); + assertTrue(true); + + } + + @Test + public void testCreateBatchingPublisher3() { + + DMaaPCambriaClientFactory.createBatchingPublisher(hostSetArray, "testTopic", 100, 50, true); + assertTrue(true); + + } + + @Test + public void testCreateBatchingPublisher4() { + + DMaaPCambriaClientFactory.createBatchingPublisher(hostSet, "testTopic", 100, 50, true); + assertTrue(true); + + } + + @Test + public void $testInject() { + + DMaaPCambriaClientFactory factory = new DMaaPCambriaClientFactory(); + factory.$testInject(null); + assertTrue(true); + + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/JUnitTestSuite.java new file mode 100644 index 0000000..87b6e8b --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/JUnitTestSuite.java @@ -0,0 +1,42 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metrics.publisher; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ DMaaPCambriaClientFactoryTest.class, CambriaPublisherUtilityTest.class}) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/TestRunner.java new file mode 100644 index 0000000..b7e997f --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metrics.publisher; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/CambriaBaseClientTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/CambriaBaseClientTest.java new file mode 100644 index 0000000..d3deffe --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/CambriaBaseClientTest.java @@ -0,0 +1,97 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metrics.publisher.impl; + +import static org.junit.Assert.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; + +import org.onap.dmaap.dmf.mr.metrics.publisher.impl.CambriaBaseClient; +import com.att.eelf.configuration.EELFLogger; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class CambriaBaseClientTest { + + private CambriaBaseClient client = null; + @Before + public void setUp() throws Exception { + + Collection hosts = new ArrayList(); + + for (int i = 0; i < 5; i++) { + hosts.add("host"+(i+1)); + } + + + client = new CambriaBaseClient(hosts, "client1"); + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testClose() { + client.close(); + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testGetLog() { + client.getLog(); + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testLogTo() { + client.logTo(null); + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + public JSONArray getJSONArray() { + + String[] data = {"stringone", "stringtwo"}; + JSONArray array = new JSONArray(Arrays.asList(data)); + + return array; + } + + @Test + public void testJsonArrayToSet() { + client.jsonArrayToSet(getJSONArray()); + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/ClockTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/ClockTest.java new file mode 100644 index 0000000..241c5f6 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/ClockTest.java @@ -0,0 +1,84 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metrics.publisher.impl; + +import static org.junit.Assert.*; + + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.metrics.publisher.impl.Clock; + +public class ClockTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetIt() { + + Clock.getIt(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testNow() { + + Clock.now(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testNowImpl() { + + Clock clock = new Clock(); + clock.nowImpl(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testRegister() { + + Clock clock = new Clock(); + Clock.register(clock); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java new file mode 100644 index 0000000..c5fc5b6 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImplTest.java @@ -0,0 +1,94 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + +package org.onap.dmaap.mr.cambria.metrics.publisher.impl; + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.metrics.publisher.impl.DMaaPCambriaConsumerImpl; + +public class DMaaPCambriaConsumerImplTest { + + private DMaaPCambriaConsumerImpl consumer = null; + @Before + public void setUp() throws Exception { + + Collection hosts = new ArrayList(); + + for (int i = 0; i < 5; i++) { + hosts.add("host"+(i+1)); + } + consumer = new DMaaPCambriaConsumerImpl(hosts, "testTopic", "consumerGroup1", "1", 2000, 200, "hi", + "9AMFFNIZpusO54oG","6BY86UQcio2LJdgyU7Cwg5oQ"); + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testStringToList() { + + List response = DMaaPCambriaConsumerImpl.stringToList("Hello world, this is a test string"); + assertNotNull(response); + + + } + + @Test + public void testFetch() { + + Iterable response = null; + boolean flag = true; + try { + response = consumer.fetch(200, 20); + } catch (IOException e) { + flag = false; + // TODO Auto-generated catch block + e.printStackTrace(); + } + if(flag) { + assertNotNull(response); + } else { + assertTrue(true); + } + + } + + + @Test + public void testCreateUrlPath() { + + String response = consumer.createUrlPath(200, 20); + assertNotNull(response); + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java new file mode 100644 index 0000000..bd1f221 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisherTest.java @@ -0,0 +1,87 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metrics.publisher.impl; + +import static org.junit.Assert.*; + +import java.util.ArrayList; +import java.util.Collection; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher; + + + +public class DMaaPCambriaSimplerBatchPublisherTest { + + private DMaaPCambriaSimplerBatchPublisher publisher = null; + @Before + public void setUp() throws Exception { + + Collection hosts = new ArrayList(); + + for (int i = 0; i < 5; i++) { + hosts.add("host"+(i+1)); + } + + publisher = new DMaaPCambriaSimplerBatchPublisher.Builder().againstUrls(hosts).onTopic("testTopic") + .batchTo(200, 100).compress(true).build(); + + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSend() { + + publisher.send("hello", "test message"); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testClose() { + + publisher.close(); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + @Test + public void testGetPendingMEssageCount() { + + publisher.getPendingMessageCount(); + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/JUnitTestSuite.java new file mode 100644 index 0000000..2b72f70 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/JUnitTestSuite.java @@ -0,0 +1,43 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metrics.publisher.impl; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ DMaaPCambriaSimplerBatchPublisherTest.class, ClockTest.class, + CambriaBaseClientTest.class, DMaaPCambriaConsumerImplTest.class}) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/TestRunner.java new file mode 100644 index 0000000..895e93f --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/metrics/publisher/impl/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.metrics.publisher.impl; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaEventSetTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaEventSetTest.java new file mode 100644 index 0000000..bf4060e --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaEventSetTest.java @@ -0,0 +1,76 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.resources; + +import static org.junit.Assert.*; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.resources.CambriaEventSet; +import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream; + + +public class CambriaEventSetTest { + + private CambriaOutboundEventStream coes = null; + + @Before + public void setUp() throws Exception { + + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testNext() { + CambriaEventSet event = null; + String str = "contains text to be converted to InputStream"; + + InputStream stream = new ByteArrayInputStream(str.getBytes()); + try { + event = new CambriaEventSet("application/cambria", stream, true, "hi"); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + try { + event.next(); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + assertTrue(true); + + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaOutboundEventStreamTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaOutboundEventStreamTest.java new file mode 100644 index 0000000..970bc60 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/resources/CambriaOutboundEventStreamTest.java @@ -0,0 +1,106 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.resources; + +import static org.junit.Assert.*; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream; + +public class CambriaOutboundEventStreamTest { + + private CambriaOutboundEventStream coes = null; + + @Before + public void setUp() throws Exception { + coes = new CambriaOutboundEventStream.Builder(null).timeout(10).limit(1).filter(CambriaConstants.kNoFilter) + .pretty(false).withMeta(true).build(); + DMaaPContext ctx = new DMaaPContext(); + //ctx.set... + coes.setDmaapContext(ctx); + coes.setTopic(null); + coes.setTransEnabled(true); + coes.setTopicStyle(true); + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetSentCount() { + int sentCount = coes.getSentCount(); + assertTrue("Doesn't match, got " + sentCount, sentCount==0);; + } + + @Test + public void testWrite() { + //fail("Not yet implemented"); + } + + @Test + public void testForEachMessage() { + //fail("Not yet implemented"); + } + + @Test + public void testGetDmaapContext() { + DMaaPContext ctx = coes.getDmaapContext(); + + assertNotNull(ctx); + } + + @Test + public void testSetDmaapContext() { + DMaaPContext ctx = new DMaaPContext(); + coes.setDmaapContext(ctx); + assertTrue(ctx.equals(coes.getDmaapContext())); + } + + @Test + public void testGetTopic() { + coes.getTopic(); + assertTrue(true); + } + + @Test + public void testSetTopic() { + //fail("Not yet implemented"); + } + + @Test + public void testSetTopicStyle() { + coes.setTopicStyle(true); + assertTrue(true); + } + + @Test + public void testSetTransEnabled() { + coes.setTransEnabled(true); + assertTrue(true); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/JUnitTestSuite.java new file mode 100644 index 0000000..d35105a --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/resources/JUnitTestSuite.java @@ -0,0 +1,42 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.resources; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ CambriaEventSetTest.class,CambriaOutboundEventStreamTest.class, }) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/TestRunner.java new file mode 100644 index 0000000..92b94d5 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/resources/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.resources; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java new file mode 100644 index 0000000..78197bd --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaJsonStreamReaderTest.java @@ -0,0 +1,72 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.resources.streamReaders; + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.InputStream; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaJsonStreamReader; + +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; + +public class CambriaJsonStreamReaderTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + + @Test + public void testNext() { + + CambriaJsonStreamReader test = null; + + String source = "{'name': 'tester', 'id': '2'}"; + InputStream stream = null; + try { + stream = IOUtils.toInputStream(source, "UTF-8"); + test = new CambriaJsonStreamReader(stream,"hello"); + test.next(); + } catch (IOException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } catch (CambriaApiException e1) { + e1.printStackTrace(); + } + + assertTrue(true); + + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java new file mode 100644 index 0000000..1b5f405 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaRawStreamReaderTest.java @@ -0,0 +1,72 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.resources.streamReaders; + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.InputStream; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaRawStreamReader; + +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.CambriaApiException; + +public class CambriaRawStreamReaderTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testNext() { + + CambriaRawStreamReader test = null; + message msg = null; + + String source = "{'name': 'tester', 'id': '2'}"; + InputStream stream = null; + try { + stream = IOUtils.toInputStream(source, "UTF-8"); + test = new CambriaRawStreamReader(stream,"hello"); + msg = test.next(); + } catch (IOException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } catch (CambriaApiException e1) { + e1.printStackTrace(); + } + + assertNotNull(msg); + + + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaStreamReaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaStreamReaderTest.java new file mode 100644 index 0000000..5a58fd8 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaStreamReaderTest.java @@ -0,0 +1,72 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.resources.streamReaders; + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.InputStream; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaStreamReader; + +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.CambriaApiException; + +public class CambriaStreamReaderTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testNext() { + + CambriaStreamReader test = null; + + + String source = "{'name': 'tester', 'id': '2', 'message': 'hello'}"; + InputStream stream = null; + try { + stream = IOUtils.toInputStream(source, "UTF-8"); + test = new CambriaStreamReader(stream); + test.next(); + } catch (IOException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } catch (CambriaApiException e1) { + e1.printStackTrace(); + } + + assertTrue(true); + + + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java new file mode 100644 index 0000000..2b60b95 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/CambriaTextStreamReaderTest.java @@ -0,0 +1,71 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.resources.streamReaders; + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.InputStream; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.resources.streamReaders.CambriaStreamReader; + +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.CambriaApiException; + +public class CambriaTextStreamReaderTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testNext() { + + CambriaStreamReader test = null; + + String source = "{'name': 'tester', 'id': '2', 'message': 'hello'}"; + InputStream stream = null; + try { + stream = IOUtils.toInputStream(source, "UTF-8"); + test = new CambriaStreamReader(stream); + test.next(); + } catch (IOException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } catch (CambriaApiException e1) { + e1.printStackTrace(); + } + + assertTrue(true); + + + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/JUnitTestSuite.java new file mode 100644 index 0000000..a01fa6c --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/JUnitTestSuite.java @@ -0,0 +1,43 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.resources.streamReaders; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ CambriaJsonStreamReaderTest.class,CambriaRawStreamReaderTest.class, + CambriaStreamReaderTest.class, CambriaTextStreamReaderTest.class, }) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/TestRunner.java new file mode 100644 index 0000000..5da7245 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/resources/streamReaders/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.resources.streamReaders; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAAFAuthenticatorImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAAFAuthenticatorImplTest.java new file mode 100644 index 0000000..7019a2b --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAAFAuthenticatorImplTest.java @@ -0,0 +1,83 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.security; + +import static org.junit.Assert.*; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.springframework.mock.web.MockHttpServletRequest; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticatorImpl; + + + +public class DMaaPAAFAuthenticatorImplTest { + + private MockHttpServletRequest request = null; + @Before + public void setUp() throws Exception { + //creating servlet object + request = new MockHttpServletRequest(); + request.setServerName("www.example.com"); + request.setRequestURI("/foo"); + request.setQueryString("param1=value1¶m"); + String url = request.getRequestURL() + "?" + request.getQueryString(); + + + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testAafAuthentication() { + + DMaaPAAFAuthenticatorImpl authenticator = new DMaaPAAFAuthenticatorImpl(); + authenticator.aafAuthentication(request, "admin"); + assertTrue(true); + + } + + + + /*@Test + public void testAafPermissionString() { + + DMaaPAAFAuthenticatorImpl authenticator = new DMaaPAAFAuthenticatorImpl(); + try { + authenticator.aafPermissionString("testTopic", "admin"); + } catch (CambriaApiException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + assertTrue(true); + + }*/ + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAuthenticatorImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAuthenticatorImplTest.java new file mode 100644 index 0000000..4ec5c38 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/security/DMaaPAuthenticatorImplTest.java @@ -0,0 +1,129 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.security; + +import static org.junit.Assert.*; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.springframework.mock.web.MockHttpServletRequest; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; + + + +public class DMaaPAuthenticatorImplTest { + + private MockHttpServletRequest request = null; + @Before + public void setUp() throws Exception { + //creating servlet object + request = new MockHttpServletRequest(); + request.setServerName("www.example.com"); + request.setRequestURI("/foo"); + request.setQueryString("param1=value1¶m"); + String url = request.getRequestURL() + "?" + request.getQueryString(); + + + } + + @After + public void tearDown() throws Exception { + } + + /*@Test + public void testAuthenticate() { + + DMaaPAuthenticatorImpl authenticator = new DMaaPAuthenticatorImpl(null); + + DMaaPContext ctx = new DMaaPContext(); + authenticator.authenticate(ctx); + assertTrue(true); + + }*/ + + + + /*@Test + public void testAafPermissionString() { + + DMaaPAAFAuthenticatorImpl authenticator = new DMaaPAAFAuthenticatorImpl(); + try { + authenticator.aafPermissionString("testTopic", "admin"); + } catch (CambriaApiException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + assertTrue(true); + + }*/ + + @Test + public void testIsAuthentic() { + + DMaaPAuthenticatorImpl authenticator = new DMaaPAuthenticatorImpl(null); + + authenticator.isAuthentic(request); + assertTrue(true); + + } + + @Test + public void testQualify() { + + DMaaPAuthenticatorImpl authenticator = new DMaaPAuthenticatorImpl(null); + + authenticator.qualify(request); + assertTrue(true); + + } + + @Test + public void testAddAuthenticator() { + + DMaaPAuthenticatorImpl authenticator = new DMaaPAuthenticatorImpl(null); + DMaaPAuthenticator authenticator2 = new DMaaPAuthenticatorImpl(null); + + authenticator.addAuthenticator(authenticator2); + assertTrue(true); + + } + + /*@Test + public void testGetAuthenticatedUser() { + + + DMaaPContext ctx = new DMaaPContext(); + DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); + assertTrue(true); + + } + */ + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/security/JUnitTestSuite.java new file mode 100644 index 0000000..60ae849 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/security/JUnitTestSuite.java @@ -0,0 +1,43 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.security; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ DMaaPAAFAuthenticatorImplTest.class,DMaaPAuthenticatorImplTest.class, +}) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/security/TestRunner.java new file mode 100644 index 0000000..e294538 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/security/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.security; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java new file mode 100644 index 0000000..7bb26d0 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPMechIdAuthenticatorTest.java @@ -0,0 +1,107 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.security.impl; + +import static org.junit.Assert.*; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.springframework.mock.web.MockHttpServletRequest; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.security.impl.DMaaPMechIdAuthenticator; + + + +public class DMaaPMechIdAuthenticatorTest { + + private MockHttpServletRequest request = null; + @Before + public void setUp() throws Exception { + //creating servlet object + request = new MockHttpServletRequest(); + request.setServerName("www.example.com"); + request.setRequestURI("/foo"); + request.setQueryString("param1=value1¶m"); + String url = request.getRequestURL() + "?" + request.getQueryString(); + + + } + + @After + public void tearDown() throws Exception { + } + + + @Test + public void testQualify() { + + DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator(); + + authenticator.qualify(request); + assertTrue(true); + + } + + @Test + public void testAuthenticate() { + + DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator(); + + DMaaPContext ctx = new DMaaPContext(); + authenticator.authenticate(ctx); + assertTrue(true); + + } + + @Test + public void testIsAuthentic() { + DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator(); + + authenticator.isAuthentic(request); + assertTrue(true); + + } + + + + @Test + public void testAddAuthenticator() { + + DMaaPMechIdAuthenticator authenticator = new DMaaPMechIdAuthenticator(); + DMaaPMechIdAuthenticator authenticator2 = new DMaaPMechIdAuthenticator(); + + authenticator.addAuthenticator(authenticator2); + assertTrue(true); + + } + + + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java new file mode 100644 index 0000000..24e43f5 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/DMaaPOriginalUebAuthenticatorTest.java @@ -0,0 +1,117 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.security.impl; + +import static org.junit.Assert.*; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.springframework.mock.web.MockHttpServletRequest; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator; + + + +public class DMaaPOriginalUebAuthenticatorTest { + + private MockHttpServletRequest request = null; + @Before + public void setUp() throws Exception { + //creating servlet object + request = new MockHttpServletRequest(); + request.setServerName("www.example.com"); + request.setRequestURI("/foo"); + request.setQueryString("param1=value1¶m"); + String url = request.getRequestURL() + "?" + request.getQueryString(); + + + } + + @After + public void tearDown() throws Exception { + } + + + @Test + public void testQualify() { + + DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100); + + authenticator.qualify(request); + assertTrue(true); + + } + + @Test + public void testAuthenticate() { + + DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100); + + DMaaPContext ctx = new DMaaPContext(); + authenticator.authenticate(ctx); + assertTrue(true); + + } + + @Test + public void testAuthenticate2() { + + DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100); + DMaaPContext ctx = new DMaaPContext(); + authenticator.authenticate("google.com", "xHMDwk25kwkkyi26JH","Dec 16, 2016", "Dec/16/2016","123"); + + + } + + @Test + public void testIsAuthentic() { + DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100); + + authenticator.isAuthentic(request); + assertTrue(true); + + } + + + + @Test + public void testAddAuthenticator() { + + DMaaPOriginalUebAuthenticator authenticator = new DMaaPOriginalUebAuthenticator(null, 100); + DMaaPOriginalUebAuthenticator authenticator2 = new DMaaPOriginalUebAuthenticator(null, 100); + + authenticator.addAuthenticator(authenticator2); + assertTrue(true); + + } + + + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/impl/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/JUnitTestSuite.java new file mode 100644 index 0000000..3a54ac5 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/JUnitTestSuite.java @@ -0,0 +1,43 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.security.impl; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ DMaaPMechIdAuthenticatorTest.class, DMaaPOriginalUebAuthenticatorTest.class, +}) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/security/impl/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/TestRunner.java new file mode 100644 index 0000000..2699acd --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/security/impl/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.security.impl; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/AdminServiceImplemTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/AdminServiceImplemTest.java new file mode 100644 index 0000000..1895ac6 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/AdminServiceImplemTest.java @@ -0,0 +1,182 @@ +/*- +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.service.impl; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.impl.AdminServiceImpl; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.limits.Blacklist; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class }) +public class AdminServiceImplemTest { + + @InjectMocks + AdminServiceImpl adminServiceImpl; + + @Mock + DMaaPContext dmaapContext; + @Mock + ConsumerFactory factory; + + @Mock + ConfigurationReader configReader; + @Mock + Blacklist Blacklist; + + @Before + public void setUp() throws Exception { + + MockitoAnnotations.initMocks(this); + PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class); + NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password"); + + PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader); + PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory); + PowerMockito.when(configReader.getfIpBlackList()).thenReturn(Blacklist); + + PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user); + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + } + + @After + public void tearDown() throws Exception { + } + + // ISSUES WITH AUTHENTICATION + @Test + public void testShowConsumerCache() { + + try { + adminServiceImpl.showConsumerCache(dmaapContext); + } catch (IOException | AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testDropConsumerCache() { + + try { + adminServiceImpl.dropConsumerCache(dmaapContext); + } catch (IOException | AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testGetBlacklist() { + + try { + adminServiceImpl.getBlacklist(dmaapContext); + } catch (IOException | AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testAddToBlacklist() { + + try { + adminServiceImpl.addToBlacklist(dmaapContext, "120.120.120.120"); + } catch (IOException | AccessDeniedException | ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testRemoveFromBlacklist() { + + try { + adminServiceImpl.removeFromBlacklist(dmaapContext, "120.120.120.120"); + } catch (IOException | AccessDeniedException | ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/ApiKeysServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/ApiKeysServiceImplTest.java new file mode 100644 index 0000000..a0ad709 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/ApiKeysServiceImplTest.java @@ -0,0 +1,331 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + + package org.onap.dmaap.mr.cambria.service.impl; + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.beans.ApiKeyBean; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.impl.ApiKeysServiceImpl; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.onap.dmaap.dmf.mr.utils.Emailer; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.limits.Blacklist; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.security.db.NsaApiDb; +import com.att.nsa.security.db.NsaApiDb.KeyExistsException; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class }) +public class ApiKeysServiceImplTest { + + @InjectMocks + ApiKeysServiceImpl service; + + @Mock + DMaaPContext dmaapContext; + @Mock + ConsumerFactory factory; + + @Mock + ConfigurationReader configReader; + @Mock + Blacklist Blacklist; + @Mock + Emailer emailer; + + @Before + public void setUp() throws Exception { + + MockitoAnnotations.initMocks(this); + PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class); + NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password"); + + PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader); + PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory); + PowerMockito.when(configReader.getfIpBlackList()).thenReturn(Blacklist); + + PowerMockito.when(configReader.getfApiKeyDb()).thenReturn(fApiKeyDb); + PowerMockito.when(configReader.getSystemEmailer()).thenReturn(emailer); + PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user); + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + + } + + @After + public void tearDown() throws Exception { + } + + + @Test + public void testGetAllApiKeys() { + + service = new ApiKeysServiceImpl(); + try { + service.getAllApiKeys(dmaapContext); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + + } + + @Test + public void testGetApiKey() { + + ApiKeysServiceImpl service = new ApiKeysServiceImpl(); + try { + service.getApiKey(dmaapContext, "testkey"); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + } + assertTrue(true); + + } + + @Test + public void testGetApiKey_error() { + + ApiKeysServiceImpl service = new ApiKeysServiceImpl(); + try { + service.getApiKey(dmaapContext, "k35Hdw6Sde"); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + } + + @Test + public void testCreateApiKey() { + + ApiKeysServiceImpl service = new ApiKeysServiceImpl(); + try { + service.createApiKey(dmaapContext, new ApiKeyBean("test@onap.com", "testing apikey bean")); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (KeyExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch(NoClassDefFoundError e) { + + } + assertTrue(true); + } + + @Test + public void testUpdateApiKey() { + + ApiKeysServiceImpl service = new ApiKeysServiceImpl(); + try { + + service.updateApiKey(dmaapContext, "admin", new ApiKeyBean("test@onapt.com", "testing apikey bean")); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + + } + @Test + public void testUpdateApiKey_error() { + + ApiKeysServiceImpl service = new ApiKeysServiceImpl(); + try { + + service.updateApiKey(dmaapContext, null, new ApiKeyBean("test@onapt.com", "testing apikey bean")); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + assertTrue(true); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Test + public void testDeleteApiKey() { + + ApiKeysServiceImpl service = new ApiKeysServiceImpl(); + try { + + service.deleteApiKey(dmaapContext, null); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Test + public void testDeleteApiKey_error() { + + ApiKeysServiceImpl service = new ApiKeysServiceImpl(); + try { + + service.deleteApiKey(dmaapContext, "admin"); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + NsaApiDb fApiKeyDb= new NsaApiDb() { + + + Set keys = new HashSet<>(Arrays.asList("testkey","admin")); + + + @Override + public NsaSimpleApiKey createApiKey(String arg0, String arg1) + throws com.att.nsa.security.db.NsaApiDb.KeyExistsException, ConfigDbException { + // TODO Auto-generated method stub + return new NsaSimpleApiKey(arg0, arg1); + } + + @Override + public boolean deleteApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean deleteApiKey(String arg0) throws ConfigDbException { + // TODO Auto-generated method stub + return false; + } + + @Override + public Map loadAllKeyRecords() throws ConfigDbException { + // TODO Auto-generated method stub + return null; + } + + @Override + public Set loadAllKeys() throws ConfigDbException { + // TODO Auto-generated method stub + + return keys ; + } + + @Override + public NsaSimpleApiKey loadApiKey(String arg0) throws ConfigDbException { + if(!keys.contains(arg0)){ + return null; + } + return new NsaSimpleApiKey(arg0, "password"); + } + + @Override + public void saveApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { + // TODO Auto-generated method stub + + } + }; +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/BaseTransactionDbImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/BaseTransactionDbImplTest.java new file mode 100644 index 0000000..257202c --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/BaseTransactionDbImplTest.java @@ -0,0 +1,158 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.service.impl; + +import static org.junit.Assert.*; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import org.onap.dmaap.dmf.mr.service.impl.BaseTransactionDbImpl; +import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionObjDB.KeyExistsException; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class BaseTransactionDbImplTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testCreateTransactionObj() { + + + try { + + BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null); + service.createTransactionObj("transition"); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (KeyExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testSaveTransactionObj() { + + + try { + + BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null); + service.saveTransactionObj(null); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testLoadTransactionObj() { + + try { + + BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null); + service.loadTransactionObj("34"); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + @Test + public void testLoadAllTransactionObjs() { + + try { + + BaseTransactionDbImpl service = new BaseTransactionDbImpl(null, null); + service.loadAllTransactionObjs(); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/EventsServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/EventsServiceImplTest.java new file mode 100644 index 0000000..1e677d8 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/EventsServiceImplTest.java @@ -0,0 +1,312 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.service.impl; + +import static org.mockito.Mockito.when; +import static org.mockito.Matchers.anyString; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.ConcurrentModificationException; +import java.util.Map; +import java.util.Properties; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; + +import com.att.ajsc.beans.PropertiesMapBean; +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException; +import org.onap.dmaap.dmf.mr.beans.DMaaPCambriaLimiter; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import org.onap.dmaap.dmf.mr.service.impl.EventsServiceImpl; +import org.onap.dmaap.dmf.mr.utils.PropertyReader; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue; +import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.limits.Blacklist; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +import kafka.admin.AdminUtils; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({ DMaaPAuthenticatorImpl.class, AJSCPropertiesMap.class }) +public class EventsServiceImplTest { + + private InputStream iStream = null; + DMaaPContext dMaapContext = new DMaaPContext(); + EventsServiceImpl service = new EventsServiceImpl(); + DMaaPErrorMessages pErrorMessages = new DMaaPErrorMessages(); + @Mock + ConfigurationReader configurationReader; + @Mock + Blacklist blacklist; + @Mock + DMaaPAuthenticator dmaaPAuthenticator; + @Mock + DMaaPAAFAuthenticator dmaapAAFauthenticator; + @Mock + NsaApiKey user; + @Mock + NsaSimpleApiKey nsaSimpleApiKey; + @Mock + DMaaPKafkaMetaBroker dmaapKafkaMetaBroker; + @Mock + Topic createdTopic; + @Mock + ConsumerFactory factory; + + @Before + public void setUp() throws Exception { + MockitoAnnotations.initMocks(this); + String source = "source of my InputStream"; + iStream = new ByteArrayInputStream(source.getBytes("UTF-8")); + + MockHttpServletRequest request = new MockHttpServletRequest(); + MockHttpServletResponse response = new MockHttpServletResponse(); + dMaapContext.setRequest(request); + dMaapContext.setResponse(response); + when(blacklist.contains(anyString())).thenReturn(false); + when(configurationReader.getfIpBlackList()).thenReturn(blacklist); + dMaapContext.setConfigReader(configurationReader); + + service.setErrorMessages(pErrorMessages); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout")).thenReturn("100"); + + AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout"); + + } + + @Test(expected = CambriaApiException.class) + public void testGetEvents() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, + TopicExistsException, AccessDeniedException, UnavailableException, IOException { + when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); + when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); + PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); + service.getEvents(dMaapContext, "testTopic", "CG1", "23"); + } + + @Test(expected = CambriaApiException.class) + public void testGetEventsBlackListErr() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, + TopicExistsException, AccessDeniedException, UnavailableException, IOException { + when(blacklist.contains(anyString())).thenReturn(true); + when(configurationReader.getfIpBlackList()).thenReturn(blacklist); + dMaapContext.setConfigReader(configurationReader); + service.getEvents(dMaapContext, "testTopic", "CG1", "23"); + } + + @Test(expected = CambriaApiException.class) + public void testGetEventsNoTopicError() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, + TopicExistsException, AccessDeniedException, UnavailableException, IOException { + when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); + when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(null); + PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); + service.getEvents(dMaapContext, "testTopic", "CG1", "23"); + } + + @Test(expected = CambriaApiException.class) + public void testGetEventsuserNull() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, + TopicExistsException, AccessDeniedException, UnavailableException, IOException { + when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(null); + when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); + PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); + MockHttpServletRequest mockRequest = new MockHttpServletRequest(); + mockRequest.addHeader("Authorization", "passed"); + dMaapContext.setRequest(mockRequest); + dMaapContext.getRequest().getHeader("Authorization"); + service.getEvents(dMaapContext, "testTopic", "CG1", "23"); + } + + @Test(expected = CambriaApiException.class) + public void testGetEventsExcp2() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, + TopicExistsException, AccessDeniedException, UnavailableException, IOException { + when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); + when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); + PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); + when(configurationReader.getfRateLimiter()).thenThrow(new ConcurrentModificationException("Error occurred")); + service.getEvents(dMaapContext, "testTopic", "CG1", "23"); + } + + @Test(expected = CambriaApiException.class) + public void testPushEvents() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, + TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, + invalidSettingValue, loadException { + + // AdminUtils.createTopic(configurationReader.getZk(), "testTopic", 10, + // 1, new Properties()); + + configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader())); + + when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); + when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); + PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); + + service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00"); + + service.getEvents(dMaapContext, "testTopic", "CG1", "23"); + + /* + * String trueValue = "True"; + * assertTrue(trueValue.equalsIgnoreCase("True")); + */ + + } + + @Test(expected = CambriaApiException.class) + public void testPushEventsBlackListedIp() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, + TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, + invalidSettingValue, loadException { + + // AdminUtils.createTopic(configurationReader.getZk(), "testTopic", 10, + // 1, new Properties()); + when(blacklist.contains(anyString())).thenReturn(true); + when(configurationReader.getfIpBlackList()).thenReturn(blacklist); + configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader())); + when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); + when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); + PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); + + service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00"); + + } + + @Test(expected = NullPointerException.class) + public void testPushEventsNoUser() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, + TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, + invalidSettingValue, loadException { + + configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader())); + + when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(null); + when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); + PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); + MockHttpServletRequest mockRequest = new MockHttpServletRequest(); + mockRequest.addHeader("Authorization", "passed"); + mockRequest.addHeader("Authorization", "passed"); + dMaapContext.setRequest(mockRequest); + dMaapContext.getRequest().getHeader("Authorization"); + service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00"); + + } + + @Test(expected = CambriaApiException.class) + public void testPushEventsWtTransaction() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, + TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, + invalidSettingValue, loadException { + + configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader())); + + when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); + when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); + PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); + when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "transidUEBtopicreqd")).thenReturn("true"); + + service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00"); + + } + + @Test(expected = CambriaApiException.class) + public void testPushEventsWtTransactionError() throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException, + TopicExistsException, AccessDeniedException, UnavailableException, IOException, missingReqdSetting, + invalidSettingValue, loadException { + + configurationReader.setfRateLimiter(new DMaaPCambriaLimiter(new PropertyReader())); + + when(dmaaPAuthenticator.authenticate(dMaapContext)).thenReturn(nsaSimpleApiKey); + when(configurationReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(configurationReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(createdTopic); + PowerMockito.when(configurationReader.getfConsumerFactory()).thenReturn(factory); + when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "transidUEBtopicreqd")).thenReturn("true"); + when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "event.batch.length")).thenReturn("0"); + when(configurationReader.getfPublisher()).thenThrow(new ConcurrentModificationException("Error occurred")); + + service.pushEvents(dMaapContext, "testTopic", iStream, "3", "12:00:00"); + + } + + @Test + public void testIsTransEnabled1() { + + when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "transidUEBtopicreqd")).thenReturn("true"); + assertTrue(service.isTransEnabled()); + + } + @Test + public void testIsTransEnabled2() { + + when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "transidUEBtopicreqd")).thenReturn("false"); + assertFalse(service.isTransEnabled()); + + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/JUnitTestSuite.java new file mode 100644 index 0000000..c437fe4 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/JUnitTestSuite.java @@ -0,0 +1,43 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.service.impl; + +import junit.framework.TestSuite; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ UIServiceImplTest.class, AdminServiceImplemTest.class, ApiKeysServiceImplTest.class, + ShowConsumerCacheTest.class,TopicServiceImplTest.class, TransactionServiceImplTest.class, MMServiceImplTest.class, + BaseTransactionDbImplTest.class, MetricsServiceImplTest.class,EventsServiceImplTest.class}) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MMServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MMServiceImplTest.java new file mode 100644 index 0000000..20a8cad --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MMServiceImplTest.java @@ -0,0 +1,382 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.service.impl; + +import static org.junit.Assert.*; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.ConcurrentModificationException; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import com.att.ajsc.beans.PropertiesMapBean; +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.impl.MMServiceImpl; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.onap.dmaap.dmf.mr.utils.Emailer; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.limits.Blacklist; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.security.db.NsaApiDb; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import static org.mockito.Matchers.anyString; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class, PropertiesMapBean.class, + AJSCPropertiesMap.class }) +public class MMServiceImplTest { + + @InjectMocks + MMServiceImpl service; + + @Mock + DMaaPContext dmaapContext; + @Mock + ConsumerFactory factory; + @Mock + private DMaaPErrorMessages errorMessages; + @Mock + ConfigurationReader configReader; + @Mock + Blacklist Blacklist; + @Mock + Emailer emailer; + @Mock + DMaaPKafkaMetaBroker dmaapKafkaMetaBroker; + @Mock + Topic metatopic; + + @Before + public void setUp() throws Exception { + + MockitoAnnotations.initMocks(this); + PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class); + NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password"); + + PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader); + PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory); + PowerMockito.when(configReader.getfIpBlackList()).thenReturn(Blacklist); + + PowerMockito.when(configReader.getfApiKeyDb()).thenReturn(fApiKeyDb); + PowerMockito.when(configReader.getSystemEmailer()).thenReturn(emailer); + PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user); + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + + MockHttpServletRequest request = new MockHttpServletRequest(); + MockHttpServletResponse response = new MockHttpServletResponse(); + PowerMockito.when(dmaapContext.getRequest()).thenReturn(request); + PowerMockito.when(dmaapContext.getResponse()).thenReturn(response); + + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(PropertiesMapBean.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout")).thenReturn("1000"); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty")).thenReturn("true"); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta")).thenReturn("true"); + PowerMockito.when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSubscribe_Blacklistip() { + + try { + PowerMockito.when(Blacklist.contains("127.0.0.1")).thenReturn(true); + service.subscribe(dmaapContext, "testTopic", "CG1", "23"); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (UnavailableException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Test + public void testSubscribe_NullTopic() { + + try { + PowerMockito.when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(null); + service.subscribe(dmaapContext, "testTopic", "CG1", "23"); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (UnavailableException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Test(expected = CambriaApiException.class) + public void testSubscribe_NullTopic_Error() throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException, + CambriaApiException, IOException { + + PowerMockito.when(configReader.getfMetrics()).thenThrow(new ConcurrentModificationException("Error occurred")); + PowerMockito.when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(metatopic); + service.subscribe(dmaapContext, "testTopic", "CG1", "23"); + } + + @Test + public void testPushEvents_wttransaction() { + + String source = "source of my InputStream"; + + try { + InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8")); + service.pushEvents(dmaapContext, "msgrtr.apinode.metrics.dmaap", iStream, "3", "12:00:00"); + + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (missingReqdSetting e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + @Test(expected = CambriaApiException.class) + public void testPushEvents_wttransaction_error() throws Exception { + + String source = "source of my InputStream"; + + InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8")); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(PropertiesMapBean.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "event.batch.length")).thenReturn("-5"); + PowerMockito.when(configReader.getfPublisher()).thenThrow(new ConcurrentModificationException("Error occurred")); + service.pushEvents(dmaapContext, "msgrtr.apinode.metrics.dmaap1", iStream, "3", "12:00:00"); + + } + + @Test + public void testPushEvents() { + + String source = "source of my InputStream"; + + try { + InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8")); + service.pushEvents(dmaapContext, "testTopic", iStream, "3", "12:00:00"); + + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (missingReqdSetting e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Test + public void testPushEvents_blacklistip() { + + String source = "source of my InputStream"; + + try { + PowerMockito.when(Blacklist.contains("127.0.0.1")).thenReturn(true); + InputStream iStream = new ByteArrayInputStream(source.getBytes("UTF-8")); + service.pushEvents(dmaapContext, "testTopic", iStream, "3", "12:00:00"); + + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } catch (CambriaApiException e) { + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (TopicExistsException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (AccessDeniedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (missingReqdSetting e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + NsaApiDb fApiKeyDb = new NsaApiDb() { + + Set keys = new HashSet<>(Arrays.asList("testkey", "admin")); + + @Override + public NsaSimpleApiKey createApiKey(String arg0, String arg1) + throws com.att.nsa.security.db.NsaApiDb.KeyExistsException, ConfigDbException { + // TODO Auto-generated method stub + return new NsaSimpleApiKey(arg0, arg1); + } + + @Override + public boolean deleteApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean deleteApiKey(String arg0) throws ConfigDbException { + // TODO Auto-generated method stub + return false; + } + + @Override + public Map loadAllKeyRecords() throws ConfigDbException { + // TODO Auto-generated method stub + return null; + } + + @Override + public Set loadAllKeys() throws ConfigDbException { + // TODO Auto-generated method stub + + return keys; + } + + @Override + public NsaSimpleApiKey loadApiKey(String arg0) throws ConfigDbException { + if (!keys.contains(arg0)) { + return null; + } + return new NsaSimpleApiKey(arg0, "password"); + } + + @Override + public void saveApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { + // TODO Auto-generated method stub + + } + }; + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MessageTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MessageTest.java new file mode 100644 index 0000000..e946b66 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MessageTest.java @@ -0,0 +1,64 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.service.impl; + +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.beans.LogDetails; + +public class MessageTest implements message { + + @Override + public String getKey() { + // TODO Auto-generated method stub + return "123"; + } + + @Override + public String getMessage() { + // TODO Auto-generated method stub + return "Msg"; + } + + @Override + public void setLogDetails(LogDetails logDetails) { + // TODO Auto-generated method stub + + } + + @Override + public LogDetails getLogDetails() { + // TODO Auto-generated method stub + return null; + } + + @Override + public boolean isTransactionEnabled() { + // TODO Auto-generated method stub + return false; + } + + @Override + public void setTransactionEnabled(boolean transactionEnabled) { + // TODO Auto-generated method stub + + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MetricsServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MetricsServiceImplTest.java new file mode 100644 index 0000000..51ee4d4 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/MetricsServiceImplTest.java @@ -0,0 +1,95 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + + package org.onap.dmaap.mr.cambria.service.impl; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.service.impl.MetricsServiceImpl; + +public class MetricsServiceImplTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGet() { + + MetricsServiceImpl service = new MetricsServiceImpl(); + try { + service.get(new DMaaPContext()); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + @Test + public void testGetMetricByName() { + + MetricsServiceImpl service = new MetricsServiceImpl(); + try { + service.getMetricByName(new DMaaPContext(), "uptime"); + } catch (org.json.JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + //e.printStackTrace(); + assertTrue(true); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (CambriaApiException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/ShowConsumerCacheTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/ShowConsumerCacheTest.java new file mode 100644 index 0000000..5d058b9 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/ShowConsumerCacheTest.java @@ -0,0 +1,149 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.service.impl; +import static org.junit.Assert.assertEquals; +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyInt; +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.mockito.runners.MockitoJUnitRunner; + +import com.att.aft.dme2.internal.jettison.json.JSONArray; +import com.att.aft.dme2.internal.jettison.json.JSONException; +import com.att.aft.dme2.internal.jettison.json.JSONObject; + +import com.att.ajsc.beans.PropertiesMapBean; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.TopicBean; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +import jline.internal.Log; +import kafka.consumer.Consumer; + + + +//@RunWith(MockitoJUnitRunner.class) +//@RunWith(PowerMockRunner.class) +//@PrepareForTest(PropertiesMapBean.class) +public class ShowConsumerCacheTest { +/* +@InjectMocks +TopicServiceImpl topicService; + +@Mock +private DMaaPErrorMessages errorMessages; + +@Mock +DMaaPContext dmaapContext; + +@Mock +ConfigurationReader configReader; + + +@Mock +JSONObject consumers; + +@Mock +JSONObject consumerObject; + +@Mock +JSONArray jsonConsumersList; + +@Mock +DMaaPAuthenticator dmaaPAuthenticator; + +@Mock +NsaApiKey user; + +@Mock +NsaSimpleApiKey nsaSimpleApiKey; + +@Mock +HttpServletRequest httpServReq; + + +@Before +public void setUp(){ +MockitoAnnotations.initMocks(this); +} + + +//@Test(expected = DMaaPAccessDeniedException.class) +@Test +public void testShowConsmerCache()throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException, JSONException{ +Assert.assertNotNull(topicService); + +String myName = "Brian"; +Object created = null; +Object accessed = null; +Object log = null; +Object info = null; + +when(consumerObject.put("name", myName)).thenReturn(consumerObject); +when(consumerObject.put("created", created)).thenReturn(consumerObject); +when(consumerObject.put("accessed", accessed)).thenReturn(consumerObject); +when(consumerObject.put("accessed", Consumer.class)).thenReturn(consumerObject); +when(jsonConsumersList.put(consumerObject)).thenReturn(null); + +when(consumers.put("consumers", jsonConsumersList)).thenReturn(consumerObject); + + + +}*/ + + +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TestRunner.java new file mode 100644 index 0000000..277950a --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.service.impl; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TopicServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TopicServiceImplTest.java new file mode 100644 index 0000000..e5d3233 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TopicServiceImplTest.java @@ -0,0 +1,766 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.service.impl; + +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyInt; +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import com.att.ajsc.beans.PropertiesMapBean; +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.beans.TopicBean; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.impl.TopicServiceImpl; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.NsaAcl; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +//@RunWith(MockitoJUnitRunner.class) +@RunWith(PowerMockRunner.class) +@PrepareForTest({ PropertiesMapBean.class, DMaaPAuthenticatorImpl.class,AJSCPropertiesMap.class,DMaaPResponseBuilder.class }) +public class TopicServiceImplTest { + + TopicServiceImpl topicService; + + @Mock + private DMaaPErrorMessages errorMessages; + + @Mock + DMaaPContext dmaapContext; + + @Mock + ConfigurationReader configReader; + + @Mock + ServletOutputStream oStream; + + @Mock + DMaaPAuthenticator dmaaPAuthenticator; + + @Mock + DMaaPAAFAuthenticator dmaapAAFauthenticator; + @Mock + NsaApiKey user; + + @Mock + NsaSimpleApiKey nsaSimpleApiKey; + + @Mock + HttpServletRequest httpServReq; + + @Mock + HttpServletResponse httpServRes; + + @Mock + DMaaPKafkaMetaBroker dmaapKafkaMetaBroker; + + @Mock + Topic createdTopic; + + @Mock + NsaAcl nsaAcl; + + @Mock + JSONObject jsonObj; + + @Mock + JSONArray jsonArray; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + topicService = new TopicServiceImpl(); + topicService.setErrorMessages(errorMessages); + NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password"); + PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class); + PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user); + } + + @Test(expected = NullPointerException.class) + public void testCreateTopicWithEnforcedName() + throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException { + + Assert.assertNotNull(topicService); + PowerMockito.mockStatic(PropertiesMapBean.class); + + when(PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, "enforced.topic.name.AAF")) + .thenReturn("enfTopicName"); + + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(nsaSimpleApiKey); + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + topicService.createTopic(dmaapContext, topicBean); + } + + @Test + public void testCreateTopicWithTopicNameNotEnforced() + throws DMaaPAccessDeniedException, CambriaApiException, ConfigDbException,IOException,TopicExistsException, org.onap.dmaap.dmf.mr.metabroker.Broker1.TopicExistsException { + + Assert.assertNotNull(topicService); + + PowerMockito.mockStatic(PropertiesMapBean.class); + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + + when(PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, "enforced.topic.name.AAF")) + .thenReturn("enfTopicName"); + + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getMethod()).thenReturn("HEAD"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(nsaSimpleApiKey); + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + + when(nsaAcl.isActive()).thenReturn(true); + when(nsaAcl.getUsers()).thenReturn(new HashSet<>(Arrays.asList("user1,user2".split(",")))); + + when(createdTopic.getName()).thenReturn("topicName"); + when(createdTopic.getOwner()).thenReturn("Owner"); + when(createdTopic.getDescription()).thenReturn("Description"); + when(createdTopic.getReaderAcl()).thenReturn(nsaAcl); + when(createdTopic.getWriterAcl()).thenReturn(nsaAcl); + + when(dmaapKafkaMetaBroker.createTopic(anyString(), anyString(), anyString(), anyInt(), anyInt(), anyBoolean())) + .thenReturn(createdTopic); + + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("NotEnforcedTopicName"); + + topicService.createTopic(dmaapContext, topicBean); + + verify(dmaapKafkaMetaBroker, times(1)).createTopic(anyString(), anyString(), anyString(), anyInt(), anyInt(), + anyBoolean()); + } + + @Test(expected = NullPointerException.class) + public void testCreateTopicNoUserInContextAndNoAuthHeader() + throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException { + + Assert.assertNotNull(topicService); + + PowerMockito.mockStatic(PropertiesMapBean.class); + + when(PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, "enforced.topic.name.AAF")) + .thenReturn("enfTopicName"); + + when(httpServReq.getHeader("Authorization")).thenReturn(null); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + topicService.createTopic(dmaapContext, topicBean); + } + + @Test(expected = NullPointerException.class) + public void testCreateTopicNoUserInContextAndAuthHeaderAndPermitted() + throws DMaaPAccessDeniedException, CambriaApiException, IOException, TopicExistsException { + + Assert.assertNotNull(topicService); + + PowerMockito.mockStatic(PropertiesMapBean.class); + + when(PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, "enforced.topic.name.AAF")) + .thenReturn("enfTopicName"); + + when(httpServReq.getHeader("Authorization")).thenReturn("Authorization"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + + // when(dmaapAAFauthenticator.aafAuthentication(httpServReq, + // anyString())).thenReturn(false); + + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + topicService.createTopic(dmaapContext, topicBean); + } + + @Test(expected = TopicExistsException.class) + public void testGetTopics_null_topic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException { + + Assert.assertNotNull(topicService); + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(httpServReq.getMethod()).thenReturn("HEAD"); + + when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(null); + + topicService.getTopic(dmaapContext, "topicName"); + } + + @Test + public void testGetTopics_NonNull_topic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException { + + Assert.assertNotNull(topicService); + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + + when(dmaapKafkaMetaBroker.getTopic(anyString())).thenReturn(createdTopic); + + when(createdTopic.getName()).thenReturn("topicName"); + when(createdTopic.getDescription()).thenReturn("topicDescription"); + when(createdTopic.getOwners()).thenReturn(new HashSet<>(Arrays.asList("user1,user2".split(",")))); + + when(createdTopic.getReaderAcl()).thenReturn(nsaAcl); + when(createdTopic.getWriterAcl()).thenReturn(nsaAcl); + + when(httpServReq.getMethod()).thenReturn("HEAD"); + + when(httpServRes.getOutputStream()).thenReturn(oStream); + + topicService.getTopic(dmaapContext, "topicName"); + } + + @Test(expected = TopicExistsException.class) + public void testGetPublishersByTopicName_nullTopic() throws DMaaPAccessDeniedException, CambriaApiException, + IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(httpServReq.getMethod()).thenReturn("HEAD"); + + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(null); + + topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name"); + + } + + @Test + public void testGetPublishersByTopicName_nonNullTopic() throws DMaaPAccessDeniedException, CambriaApiException, + IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(httpServReq.getMethod()).thenReturn("HEAD"); + + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(createdTopic); + when(createdTopic.getWriterAcl()).thenReturn(nsaAcl); + topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name"); + } + + @Test(expected = TopicExistsException.class) + public void testGetConsumersByTopicName_nullTopic() throws DMaaPAccessDeniedException, CambriaApiException, + IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(httpServReq.getMethod()).thenReturn("HEAD"); + + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(null); + + topicService.getConsumersByTopicName(dmaapContext, "topicNamespace.name"); + + } + + @Test + public void testGetConsumersByTopicName_nonNullTopic() throws DMaaPAccessDeniedException, CambriaApiException, + IOException, TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(httpServReq.getMethod()).thenReturn("HEAD"); + + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(createdTopic); + + when(createdTopic.getReaderAcl()).thenReturn(nsaAcl); + + topicService.getConsumersByTopicName(dmaapContext, "topicNamespace.name"); + } + + @Test + public void testGetPublishersByTopicName() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + + PowerMockito.mockStatic(AJSCPropertiesMap.class); + + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("topicFactoryAAF"); + + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(httpServReq.getMethod()).thenReturn("HEAD"); + + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(createdTopic); + + when(createdTopic.getReaderAcl()).thenReturn(nsaAcl); + + topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name"); + } + + @Test(expected=TopicExistsException.class) + public void testGetPublishersByTopicNameError() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + + PowerMockito.mockStatic(AJSCPropertiesMap.class); + + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("topicFactoryAAF"); + + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(httpServReq.getMethod()).thenReturn("HEAD"); + + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.name")).thenReturn(null); + + when(createdTopic.getReaderAcl()).thenReturn(nsaAcl); + + topicService.getPublishersByTopicName(dmaapContext, "topicNamespace.name"); + } + + @Test + public void testdeleteTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.deleteTopic(dmaapContext, "topicNamespace.topic"); + } + + @Test(expected=TopicExistsException.class) + public void testdeleteTopic_nulltopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.deleteTopic(dmaapContext, "topicNamespace.topic"); + } + + /*@Test(expected=DMaaPAccessDeniedException.class) + public void testdeleteTopic_authHeader() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(null); + topicService.deleteTopic(dmaapContext, "topicNamespace.topic"); + }*/ + + @Test + public void testPermitConsumerForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.permitConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin"); + } + + @Test(expected=TopicExistsException.class) + public void testPermitConsumerForTopic_nulltopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.permitConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin"); + } + + @Test + public void testdenyConsumerForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.denyConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin"); + } + + @Test(expected=TopicExistsException.class) + public void testdenyConsumerForTopic_nulltopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.denyConsumerForTopic(dmaapContext, "topicNamespace.topic", "admin"); + } + + + @Test + public void testPermitPublisherForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.permitPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin"); + } + + @Test(expected=TopicExistsException.class) + public void testPermitPublisherForTopic_nulltopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.permitPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin"); + } + + @Test + public void testDenyPublisherForTopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + when(dmaapContext.getResponse()).thenReturn(httpServRes); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.denyPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin");; + } + + @Test(expected=TopicExistsException.class) + public void testDenyPublisherForTopic_nulltopic() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.denyPublisherForTopic(dmaapContext, "topicNamespace.topic", "admin");; + } + + @Test + public void testGetAllTopics() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.getAllTopics(dmaapContext); + } + + @Test + public void testGetTopics() throws DMaaPAccessDeniedException, CambriaApiException, IOException, + TopicExistsException, JSONException, ConfigDbException, AccessDeniedException { + + Assert.assertNotNull(topicService); + + // PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.mockStatic(AJSCPropertiesMap.class); + PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "msgRtr.topicfactory.aaf")) + .thenReturn("hello"); + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + when(dmaaPAuthenticator.authenticate(dmaapContext)).thenReturn(null); + when(httpServReq.getHeader("AppName")).thenReturn("MyApp"); + when(httpServReq.getHeader("Authorization")).thenReturn("Admin"); + when(dmaapContext.getRequest()).thenReturn(httpServReq); + + when(configReader.getfSecurityManager()).thenReturn(dmaaPAuthenticator); + when(dmaapContext.getConfigReader()).thenReturn(configReader); + when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(createdTopic); + TopicBean topicBean = new TopicBean(); + topicBean.setTopicName("enfTopicNamePlusExtra"); + + topicService.getTopics(dmaapContext); + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TransactionServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TransactionServiceImplTest.java new file mode 100644 index 0000000..3a14212 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/TransactionServiceImplTest.java @@ -0,0 +1,97 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + + + package org.onap.dmaap.mr.cambria.service.impl; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import com.att.aft.dme2.internal.jettison.json.JSONException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.service.impl.TransactionServiceImpl; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class TransactionServiceImplTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testCheckTransaction() { + + TransactionServiceImpl service = new TransactionServiceImpl(); + service.checkTransaction(new TransactionObj("23", 1100, 1000, 10)); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testGetAllTransactionObjs() { + + TransactionServiceImpl service = new TransactionServiceImpl(); + try { + service.getAllTransactionObjs(new DMaaPContext()); + } catch (ConfigDbException | IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testGetTransactionObj() { + + TransactionServiceImpl service = new TransactionServiceImpl(); + try { + service.getTransactionObj(new DMaaPContext(), "23"); + } catch (ConfigDbException | IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (JSONException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/service/impl/UIServiceImplTest.java b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/UIServiceImplTest.java new file mode 100644 index 0000000..2fbbec1 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/service/impl/UIServiceImplTest.java @@ -0,0 +1,302 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.service.impl; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.service.impl.UIServiceImpl; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.db.NsaApiDb; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({ DMaaPAuthenticatorImpl.class, DMaaPResponseBuilder.class }) +public class UIServiceImplTest { + + @InjectMocks + UIServiceImpl service; + + @Mock + DMaaPContext dmaapContext; + @Mock + ConsumerFactory factory; + + @Mock + ConfigurationReader configReader; + + @Mock + DMaaPKafkaMetaBroker dmaapKafkaMetaBroker; + + @Mock + Topic metatopic; + + @Before + public void setUp() throws Exception { + MockitoAnnotations.initMocks(this); + PowerMockito.mockStatic(DMaaPAuthenticatorImpl.class); + NsaSimpleApiKey user = new NsaSimpleApiKey("admin", "password"); + + PowerMockito.when(dmaapContext.getConfigReader()).thenReturn(configReader); + PowerMockito.when(configReader.getfConsumerFactory()).thenReturn(factory); + + PowerMockito.when(configReader.getfApiKeyDb()).thenReturn(fApiKeyDb); + PowerMockito.when(DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext)).thenReturn(user); + PowerMockito.mockStatic(DMaaPResponseBuilder.class); + PowerMockito.when(configReader.getfMetaBroker()).thenReturn(dmaapKafkaMetaBroker); + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testHello() { + + try { + service.hello(dmaapContext); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + @Test + public void testGetApiKeysTable() { + + try { + service.getApiKeysTable(dmaapContext); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + + } + + @Test + public void testGetApiKey() { + + try { + service.getApiKey(dmaapContext, "admin"); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + } + + @Test + public void testGetApiKey_invalidkey() { + + try { + service.getApiKey(dmaapContext, "k56HmWT72J"); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + assertTrue(true); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + } + + @Test + public void testGetTopicsTable() { + + try { + List topics = new ArrayList(); + topics.add(metatopic); + when(dmaapKafkaMetaBroker.getAllTopics()).thenReturn(topics); + service.getTopicsTable(dmaapContext); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + + } + + @Test + public void testGetTopic() { + + try { + when(dmaapKafkaMetaBroker.getTopic("testTopic")).thenReturn(metatopic); + service.getTopic(dmaapContext, "testTopic"); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + assertTrue(true); + } + + @Test + public void testGetTopic_nulltopic() { + + try { + when(dmaapKafkaMetaBroker.getTopic("topicNamespace.topic")).thenReturn(null); + service.getTopic(dmaapContext, "testTopic"); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + // e.printStackTrace(); + } catch (ConfigDbException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + } + + NsaApiDb fApiKeyDb = new NsaApiDb() { + + Set keys = new HashSet<>(Arrays.asList("testkey", "admin")); + + @Override + public NsaSimpleApiKey createApiKey(String arg0, String arg1) + throws com.att.nsa.security.db.NsaApiDb.KeyExistsException, ConfigDbException { + // TODO Auto-generated method stub + return new NsaSimpleApiKey(arg0, arg1); + } + + @Override + public boolean deleteApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean deleteApiKey(String arg0) throws ConfigDbException { + // TODO Auto-generated method stub + return false; + } + + @Override + public Map loadAllKeyRecords() throws ConfigDbException { + Map map = new HashMap(); + map.put("testkey", new NsaSimpleApiKey("testkey", "password")); + map.put("admin", new NsaSimpleApiKey("admin", "password")); + + return map; + } + + @Override + public Set loadAllKeys() throws ConfigDbException { + // TODO Auto-generated method stub + + return keys; + } + + @Override + public NsaSimpleApiKey loadApiKey(String arg0) throws ConfigDbException { + if (!keys.contains(arg0)) { + return null; + } + return new NsaSimpleApiKey(arg0, "password"); + } + + @Override + public void saveApiKey(NsaSimpleApiKey arg0) throws ConfigDbException { + // TODO Auto-generated method stub + + } + }; + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/JUnitTestSuite.java new file mode 100644 index 0000000..eded211 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/transaction/JUnitTestSuite.java @@ -0,0 +1,42 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.transaction; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ TransactionObjTest.class, TrnRequestTest.class, }) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/TestRunner.java new file mode 100644 index 0000000..b2ba612 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/transaction/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.transaction; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/TransactionObjTest.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/TransactionObjTest.java new file mode 100644 index 0000000..36cdf92 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/transaction/TransactionObjTest.java @@ -0,0 +1,175 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.transaction; + +import static org.junit.Assert.*; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; + +public class TransactionObjTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testAsJsonObject() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + + try { + obj.asJsonObject(); + + } catch(NullPointerException e) { + assertTrue(true); + } + + } + + @Test + public void testGetId() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.getId(); + assertTrue(true); + + } + + @Test + public void testSetId() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.setId("23"); + assertTrue(true); + + } + + @Test + public void testGetCreateTime() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.getCreateTime(); + assertTrue(true); + + } + + @Test + public void testSetCreateTime() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.setCreateTime("12:00:00"); + assertTrue(true); + + } + + @Test + public void testSerialize() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.serialize(); + assertTrue(true); + + } + + @Test + public void testGetTotalMessageCount() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.getTotalMessageCount(); + assertTrue(true); + + } + + @Test + public void testSetTotalMessageCount() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.setTotalMessageCount(200); + assertTrue(true); + + } + + @Test + public void testGetSuccessMessageCount() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.getSuccessMessageCount(); + assertTrue(true); + + } + + @Test + public void testSetSuccessMessageCount() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.setSuccessMessageCount(198); + assertTrue(true); + + } + + @Test + public void testGetFailureMessageCount() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.getFailureMessageCount(); + assertTrue(true); + + } + + @Test + public void testSetFailureMessageCount() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.setFailureMessageCount(2); + assertTrue(true); + + } + + @Test + public void testGetfData() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.getfData(); + assertTrue(true); + + } + + @Test + public void testSetfData() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.setfData(null); + assertTrue(true); + + } + + @Test + public void testGetTrnRequest() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.getTrnRequest(); + assertTrue(true); + + } + + @Test + public void testSetTrnRequest() { + TransactionObj obj = new TransactionObj("23", 100, 98, 2); + obj.setTrnRequest(null); + assertTrue(true); + + } + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/TrnRequestTest.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/TrnRequestTest.java new file mode 100644 index 0000000..284815e --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/transaction/TrnRequestTest.java @@ -0,0 +1,187 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.transaction; + +import static org.junit.Assert.*; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; +import org.onap.dmaap.dmf.mr.transaction.TrnRequest; + +public class TrnRequestTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetId() { + TrnRequest req = new TrnRequest(); + + req.getId(); + assertTrue(true); + + } + + @Test + public void testSetId() { + TrnRequest req = new TrnRequest(); + + req.setId("23"); + assertTrue(true); + + } + + @Test + public void testGetRequestCreate() { + TrnRequest req = new TrnRequest(); + + req.getRequestCreate(); + assertTrue(true); + + } + + @Test + public void testSetRequestCreate() { + TrnRequest req = new TrnRequest(); + + req.setRequestCreate("createRequest"); + assertTrue(true); + + } + + @Test + public void testGetRequestHost() { + TrnRequest req = new TrnRequest(); + + req.getRequestHost(); + assertTrue(true); + + } + + @Test + public void testSetRequestHost() { + TrnRequest req = new TrnRequest(); + + req.setRequestHost("requestHost"); + assertTrue(true); + + } + + @Test + public void testGetServerHost() { + TrnRequest req = new TrnRequest(); + + req.getServerHost(); + assertTrue(true); + + } + + @Test + public void testSetServerHost() { + TrnRequest req = new TrnRequest(); + + req.setServerHost("requestHost"); + assertTrue(true); + + } + + @Test + public void testGetMessageProceed() { + TrnRequest req = new TrnRequest(); + + req.getMessageProceed(); + assertTrue(true); + + } + + @Test + public void testSetMessageProceed() { + TrnRequest req = new TrnRequest(); + + req.setMessageProceed("messageProceed"); + assertTrue(true); + + } + + @Test + public void testGetTotalMessage() { + TrnRequest req = new TrnRequest(); + + req.getTotalMessage(); + assertTrue(true); + + } + + @Test + public void testSetTotalMessage() { + TrnRequest req = new TrnRequest(); + + req.setTotalMessage("200"); + assertTrue(true); + + } + + + @Test + public void testGetClientType() { + TrnRequest req = new TrnRequest(); + + req.getClientType(); + assertTrue(true); + + } + + @Test + public void testSetClientType() { + TrnRequest req = new TrnRequest(); + + req.setClientType("admin"); + assertTrue(true); + + } + + @Test + public void testGetUrl() { + TrnRequest req = new TrnRequest(); + + req.getUrl(); + assertTrue(true); + + } + + @Test + public void testSetUrl() { + TrnRequest req = new TrnRequest(); + + req.setUrl("http://google.com"); + assertTrue(true); + + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java new file mode 100644 index 0000000..9feccce --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/DMaaPSimpleTransactionFactoryTest.java @@ -0,0 +1,67 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.transaction.impl; + +import static org.junit.Assert.*; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.dmf.mr.transaction.impl.DMaaPSimpleTransactionFactory; + +public class DMaaPSimpleTransactionFactoryTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testMakeNewTransactionObj() { + + DMaaPSimpleTransactionFactory factory = new DMaaPSimpleTransactionFactory(); + + factory.makeNewTransactionObj("{'transactionId': '123', 'totalMessageCount': '200', " + + "'successMessageCount': '200', 'failureMessageCount': '0'}"); + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + + } + + @Test + public void testMakeNewTransactionId() { + + DMaaPSimpleTransactionFactory factory = new DMaaPSimpleTransactionFactory(); + factory.makeNewTransactionId("123"); + + String trueValue = "True"; + assertTrue(trueValue.equalsIgnoreCase("True")); + + } + + + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/JUnitTestSuite.java new file mode 100644 index 0000000..a70cf18 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/JUnitTestSuite.java @@ -0,0 +1,42 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.transaction.impl; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ DMaaPSimpleTransactionFactoryTest.class, }) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/TestRunner.java b/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/TestRunner.java new file mode 100644 index 0000000..e889ec7 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/transaction/impl/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.transaction.impl; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/utils/ConfigurationReaderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/utils/ConfigurationReaderTest.java new file mode 100644 index 0000000..9a02c38 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/utils/ConfigurationReaderTest.java @@ -0,0 +1,56 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.utils; + +import static org.junit.Assert.assertNotNull; + +import org.junit.After; +import org.junit.Test; + +import org.onap.dmaap.mr.cambria.embed.EmbedConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; + +public class ConfigurationReaderTest { + + EmbedConfigurationReader embedConfigurationReader = new EmbedConfigurationReader(); + + @After + public void tearDown() throws Exception { + embedConfigurationReader.tearDown(); + } + + @Test + public void testConfigurationReader() throws Exception { + + ConfigurationReader configurationReader = embedConfigurationReader.buildConfigurationReader(); + + assertNotNull(configurationReader); + assertNotNull(configurationReader.getfApiKeyDb()); + assertNotNull(configurationReader.getfConfigDb()); + assertNotNull(configurationReader.getfConsumerFactory()); + assertNotNull(configurationReader.getfIpBlackList()); + assertNotNull(configurationReader.getfMetaBroker()); + assertNotNull(configurationReader.getfMetrics()); + assertNotNull(configurationReader.getfPublisher()); + assertNotNull(configurationReader.getfSecurityManager()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPCuratorFactoryTest.java b/src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPCuratorFactoryTest.java new file mode 100644 index 0000000..ff78d7f --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPCuratorFactoryTest.java @@ -0,0 +1,70 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.utils; + +import static org.junit.Assert.*; + +import java.io.File; +import java.util.HashMap; +import java.util.Map; + +import org.apache.curator.framework.CuratorFramework; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.utils.DMaaPCuratorFactory; +import org.onap.dmaap.dmf.mr.utils.PropertyReader; +import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException; +import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile; +import com.att.nsa.drumlin.till.nv.impl.nvReadableTable; + +public class DMaaPCuratorFactoryTest { + + @Before + public void setUp() throws Exception { + ClassLoader classLoader = getClass().getClassLoader(); + AJSCPropertiesMap.refresh(new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile())); + + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testgetCurator() throws loadException { + CuratorFramework curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader()); + assertNotNull(curatorFramework); + + Map map = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop); + map.remove(CambriaConstants.kSetting_ZkConfigDbServers); + map.remove(CambriaConstants.kSetting_ZkSessionTimeoutMs); + + + + curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader()); + assertNotNull(curatorFramework); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPResponseBuilderTest.java b/src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPResponseBuilderTest.java new file mode 100644 index 0000000..aaabb7d --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/utils/DMaaPResponseBuilderTest.java @@ -0,0 +1,141 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.utils; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.json.JSONException; +import org.json.JSONObject; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; + +public class DMaaPResponseBuilderTest { + + DMaaPContext dMaapContext; + MockHttpServletRequest request; + MockHttpServletResponse response; + + @Before + public void setUp() throws Exception { + + dMaapContext = new DMaaPContext(); + request = new MockHttpServletRequest(); + response = new MockHttpServletResponse(); + dMaapContext.setRequest(request); + dMaapContext.setResponse(response); + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testsetNoCacheHeadings(){ + DMaaPResponseBuilder.setNoCacheHeadings(dMaapContext); + assertEquals("no-cache", response.getHeader("Pragma")); + } + + @Test + public void testrespondOk() throws JSONException, IOException{ + JSONObject jsonObject = new JSONObject(); + jsonObject.put("Name", "Test"); + + DMaaPResponseBuilder.respondOk(dMaapContext, jsonObject); + assertEquals("application/json", response.getContentType()); + assertEquals(200, response.getStatus()); + + request.setMethod("HEAD"); + + DMaaPResponseBuilder.respondOk(dMaapContext, jsonObject); + assertEquals("application/json", response.getContentType()); + assertEquals(200, response.getStatus()); + } + + @Test + public void testrespondOkNoContent(){ + DMaaPResponseBuilder.respondOkNoContent(dMaapContext); + assertEquals(204, response.getStatus()); + } + + @Test + public void testrespondOkWithHtml(){ + DMaaPResponseBuilder.respondOkWithHtml(dMaapContext, ""); + + assertEquals("text/html", response.getContentType()); + assertEquals(200, response.getStatus()); + } + + @Test + public void testrespondWithError(){ + DMaaPResponseBuilder.respondWithError(dMaapContext, 500, "InternalServerError"); + assertEquals(500, response.getStatus()); + } + + @Test + public void testrespondWithJsonError(){ + JSONObject o = new JSONObject(); + o.put("status", 500); + o.put("message", "InternalServerError"); + DMaaPResponseBuilder.respondWithError(dMaapContext, 500, o); + assertEquals(500, response.getStatus()); + } + + @Test + public void testrespondWithErrorInJson(){ + DMaaPResponseBuilder.respondWithErrorInJson(dMaapContext, 500, "InternalServerError"); + + assertEquals("application/json", response.getContentType()); + assertEquals(500, response.getStatus()); + } + + @Test + public void testsendErrorAndBody(){ + DMaaPResponseBuilder.sendErrorAndBody(dMaapContext, 500, "InternalServerError", "text/html"); + + assertEquals("text/html", response.getContentType()); + assertEquals(500, response.getStatus()); + + request.setMethod("HEAD"); + + DMaaPResponseBuilder.sendErrorAndBody(dMaapContext, 500, "InternalServerError", "text/html"); + + assertEquals("text/html", response.getContentType()); + assertEquals(500, response.getStatus()); + + } + + @Test + public void testgetStreamForBinaryResponse() throws IOException{ + DMaaPResponseBuilder.getStreamForBinaryResponse(dMaapContext); + + assertEquals("application/octet-stream", response.getContentType()); + assertEquals(200, response.getStatus()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java b/src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java new file mode 100644 index 0000000..b4645a3 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java @@ -0,0 +1,125 @@ +/******************************************************************************* +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + package org.onap.dmaap.mr.cambria.utils; + +import static org.junit.Assert.*; + +import java.security.Principal; +import java.text.SimpleDateFormat; +import java.util.Date; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.http.auth.BasicUserPrincipal; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.springframework.mock.web.MockHttpServletRequest; + +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.utils.Utils; + +public class UtilsTest { + + private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS"; + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetFormattedDate() { + Date now = new Date(); + String dateStr = Utils.getFormattedDate(now); + SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT); + String expectedStr = sdf.format(now); + assertNotNull(dateStr); + assertTrue("Formatted date does not match - expected [" + expectedStr + + "] received [" + dateStr + "]", + dateStr.equalsIgnoreCase(expectedStr)); + } + + @Test + public void testgetUserApiKey(){ + MockHttpServletRequest request = new MockHttpServletRequest(); + request.addHeader(Utils.CAMBRIA_AUTH_HEADER, "User:Password"); + assertEquals("User", Utils.getUserApiKey(request)); + + MockHttpServletRequest request2 = new MockHttpServletRequest(); + Principal principal = new BasicUserPrincipal("User@Test"); + request2.setUserPrincipal(principal); + request2.addHeader("Authorization", "test"); + assertEquals("User", Utils.getUserApiKey(request2)); + + MockHttpServletRequest request3 = new MockHttpServletRequest(); + assertNull(Utils.getUserApiKey(request3)); + } + + @Test + public void testgetFromattedBatchSequenceId(){ + Long x = new Long(1234); + String str = Utils.getFromattedBatchSequenceId(x); + assertEquals("001234", str); + } + + @Test + public void testmessageLengthInBytes(){ + String str = "TestString"; + long length = Utils.messageLengthInBytes(str); + assertEquals(10, length); + assertEquals(0, Utils.messageLengthInBytes(null)); + } + + @Test + public void testgetResponseTransactionId(){ + String transactionId = "test123::sampleResponseMessage"; + assertEquals("test123",Utils.getResponseTransactionId(transactionId)); + assertNull(Utils.getResponseTransactionId(null)); + assertNull(Utils.getResponseTransactionId("")); + } + + @Test + public void testgetSleepMsForRate(){ + long x = Utils.getSleepMsForRate(1024.124); + assertEquals(1000, x); + assertEquals(0, Utils.getSleepMsForRate(-1)); + } + + @Test + public void testgetRemoteAddress(){ + DMaaPContext dMaapContext = new DMaaPContext(); + MockHttpServletRequest request = new MockHttpServletRequest(); + + dMaapContext.setRequest(request); + + assertEquals(request.getRemoteAddr(), Utils.getRemoteAddress(dMaapContext)); + + request.addHeader("X-Forwarded-For", "XForward"); + assertEquals("XForward", Utils.getRemoteAddress(dMaapContext)); + + + } +} diff --git a/src/test/java/org/onap/dmaap/mr/filter/ContentLengthFilterTest.java b/src/test/java/org/onap/dmaap/mr/filter/ContentLengthFilterTest.java new file mode 100644 index 0000000..9d329e5 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/filter/ContentLengthFilterTest.java @@ -0,0 +1,88 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.filter; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import javax.servlet.ServletException; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.onap.dmaap.mr.filter.ContentLengthFilter; + +public class ContentLengthFilterTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testDestroy() { + ContentLengthFilter filter = new ContentLengthFilter(); + + filter.destroy(); + + assertTrue(true); + } + + + @Test + public void testFilter() { + ContentLengthFilter filter = new ContentLengthFilter(); + + try { + filter.doFilter(null, null, null); + } catch (IOException | ServletException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + + } + + @Test + public void testInit() { + ContentLengthFilter filter = new ContentLengthFilter(); + + try { + filter.init(null); + } catch (ServletException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (NullPointerException e) { + // TODO Auto-generated catch block + assertTrue(true); + } + + } + + +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/filter/DefaultLengthTest.java b/src/test/java/org/onap/dmaap/mr/filter/DefaultLengthTest.java new file mode 100644 index 0000000..56b8d05 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/filter/DefaultLengthTest.java @@ -0,0 +1,65 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.filter; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import javax.servlet.ServletException; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + + +public class DefaultLengthTest { + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testGetDefaultLength() { + DefaultLength length = new DefaultLength(); + + length.getDefaultLength(); + + assertTrue(true); + } + + @Test + public void testSetDefaultLength() { + DefaultLength length = new DefaultLength(); + + length.setDefaultLength("23"); + + assertTrue(true); + } + + + + + +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/filter/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/filter/JUnitTestSuite.java new file mode 100644 index 0000000..a534560 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/filter/JUnitTestSuite.java @@ -0,0 +1,42 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.filter; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ ContentLengthFilterTest.class, DefaultLengthTest.class,}) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/filter/TestRunner.java b/src/test/java/org/onap/dmaap/mr/filter/TestRunner.java new file mode 100644 index 0000000..ec78329 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/filter/TestRunner.java @@ -0,0 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP Policy Engine + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.mr.filter; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/ApiKeyBean.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/ApiKeyBean.java new file mode 100644 index 0000000..7ecc203 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dmaap/ApiKeyBean.java @@ -0,0 +1,72 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dmaap; + +import java.io.Serializable; + +public class ApiKeyBean implements Serializable { + + /*private static final long serialVersionUID = -8219849086890567740L; + + // private static final String KEY_CHARS = + // "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + + + private String email; + private String description; + + public ApiKeyBean() { + super(); + } + + public ApiKeyBean(String email, String description) { + super(); + this.email = email; + this.description = description; + } + + public String getEmail() { + return email; + } + + public void setEmail(String email) { + this.email = email; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + /* + * public String getKey() { return generateKey(16); } + * + * public String getSharedSecret() { return generateKey(24); } + * + * private static String generateKey ( int length ) { return + * uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length ); } + */ + +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapPubSubTest.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapPubSubTest.java new file mode 100644 index 0000000..a026573 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapPubSubTest.java @@ -0,0 +1,138 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dmaap; + +import java.io.InputStream; +import java.util.Scanner; + +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.client.Entity; +import javax.ws.rs.client.WebTarget; +import javax.ws.rs.core.Response; + +import junit.framework.TestCase; + +import org.json.JSONObject; +import org.apache.http.HttpStatus; +import org.apache.log4j.Logger; + +import com.att.nsa.drumlin.till.data.sha1HmacSigner; + +public class DMaapPubSubTest { +/* private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class); + Client client = ClientBuilder.newClient(); + String url = LoadPropertyFile.getPropertyFileData().getProperty("url"); + WebTarget target = client.target(url); + String topicapikey; + String topicsecretKey; + String serverCalculatedSignature; + String date = LoadPropertyFile.getPropertyFileData().getProperty("date"); + // changes by islam + String topic_name = LoadPropertyFile.getPropertyFileData().getProperty("topicName"); + DmaapApiKeyTest keyInstance = new DmaapApiKeyTest(); + + + public void testProduceMessage() { + LOGGER.info("test case publish message"); + // DMaapTopicTest topicCreation = new DMaapTopicTest(); + DmaapApiKeyTest keyInstance = new DmaapApiKeyTest(); + // creating topic + createTopic(topic_name); + + target = client.target(url); + target = target.path("/events/"); + target = target.path(topic_name); + Response response2 = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) + .header("X-CambriaDate", date).post(Entity.json("{message:producing first message}")); + keyInstance.assertStatus(response2); + LOGGER.info("successfully published message"); + } + + public void testConsumeMessage() { + LOGGER.info("test case subscribing message"); + createTopic(topic_name); + target = client.target(url); + target = target.path("/events/"); + target = target.path(topic_name); + target = target.path("consumGrp"); + target = target.path(topicapikey); + Response response = target.request().get(); + keyInstance.assertStatus(response); + LOGGER.info("successfully consumed messages"); + InputStream is = (InputStream) response.getEntity(); + Scanner s = new Scanner(is); + s.useDelimiter("\\A"); + String data = s.next(); + s.close(); + LOGGER.info("Consumed Message data: " + data); + } + + public void createTopic(String name) { + if (!topicExist(name)) { + TopicBean topicbean = new TopicBean(); + topicbean.setDescription("creating topic"); + topicbean.setPartitionCount(1); + topicbean.setReplicationCount(1); + topicbean.setTopicName(name); + topicbean.setTransactionEnabled(true); + target = client.target(url); + target = target.path("/topics/create"); + JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "topic creation")); + topicapikey = (String) jsonObj.get("key"); + topicsecretKey = (String) jsonObj.get("secret"); + serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey); + Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) + .header("X-CambriaDate", date).post(Entity.json(topicbean)); + keyInstance.assertStatus(response); + } + } + + public boolean topicExist(String topicName) { + target = target.path("/topics/" + topicName); + InputStream is, issecret; + Response response = target.request().get(); + if (response.getStatus() == HttpStatus.SC_OK) { + is = (InputStream) response.getEntity(); + Scanner s = new Scanner(is); + s.useDelimiter("\\A"); + JSONObject dataObj = new JSONObject(s.next()); + s.close(); + // get owner of a topic + topicapikey = (String) dataObj.get("owner"); + target = client.target(url); + target = target.path("/apiKeys/"); + target = target.path(topicapikey); + Response response2 = target.request().get(); + issecret = (InputStream) response2.getEntity(); + Scanner st = new Scanner(issecret); + st.useDelimiter("\\A"); + JSONObject dataObj1 = new JSONObject(st.next()); + st.close(); + // get secret key of this topic// + topicsecretKey = (String) dataObj1.get("secret"); + serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey); + return true; + } else + return false; + }*/ +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapTopicTest.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapTopicTest.java new file mode 100644 index 0000000..ec1a63a --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dmaap/DMaapTopicTest.java @@ -0,0 +1,267 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dmaap; + +import java.io.InputStream; +import java.util.Properties; +import java.util.Scanner; + +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.client.Entity; +import javax.ws.rs.client.WebTarget; +import javax.ws.rs.core.Response; + +import junit.framework.TestCase; + +import org.apache.http.HttpStatus; +import org.json.JSONObject; +import org.apache.log4j.Logger; + +import com.att.nsa.drumlin.till.data.sha1HmacSigner; + +public class DMaapTopicTest { + /*private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class); + Client client = ClientBuilder.newClient(); + String topicapikey, topicsecretKey, serverCalculatedSignature; + Properties prop = LoadPropertyFile.getPropertyFileData(); + String topicName = prop.getProperty("topicName"); + String url = prop.getProperty("url"); + String date = prop.getProperty("date"); + WebTarget target = client.target(url); + DmaapApiKeyTest keyInstance = new DmaapApiKeyTest(); + + + public void createTopic(String name) { + if (!topicExist(name)) { + TopicBean topicbean = new TopicBean(); + topicbean.setDescription("creating topic"); + topicbean.setPartitionCount(1); + topicbean.setReplicationCount(1); + topicbean.setTopicName(name); + topicbean.setTransactionEnabled(true); + target = client.target(url); + target = target.path("/topics/create"); + JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("nm254w@att.com", "topic creation")); + topicapikey = (String) jsonObj.get("key"); + topicsecretKey = (String) jsonObj.get("secret"); + serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey); + Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) + .header("X-CambriaDate", date).post(Entity.json(topicbean)); + keyInstance.assertStatus(response); + } + + } + + public boolean topicExist(String topicName) { + target = target.path("/topics/" + topicName); + InputStream is, issecret; + Response response = target.request().get(); + if (response.getStatus() == HttpStatus.SC_OK) { + is = (InputStream) response.getEntity(); + Scanner s = new Scanner(is); + s.useDelimiter("\\A"); + JSONObject dataObj = new JSONObject(s.next()); + s.close(); + // get owner of a topic + topicapikey = (String) dataObj.get("owner"); + target = client.target(url); + target = target.path("/apiKeys/"); + target = target.path(topicapikey); + Response response2 = target.request().get(); + issecret = (InputStream) response2.getEntity(); + Scanner st = new Scanner(issecret); + st.useDelimiter("\\A"); + JSONObject dataObj1 = new JSONObject(st.next()); + st.close(); + // get secret key of this topic// + topicsecretKey = (String) dataObj1.get("secret"); + serverCalculatedSignature = sha1HmacSigner.sign(date, topicsecretKey); + return true; + } else + return false; + } + + public void testCreateTopic() { + LOGGER.info("test case create topic"); + createTopic(topicName); + LOGGER.info("Returning after create topic"); + } + + public void testOneTopic() { + LOGGER.info("test case get specific topic name " + topicName); + createTopic(topicName); + target = client.target(url); + target = target.path("/topics/"); + target = target.path(topicName); + Response response = target.request().get(); + LOGGER.info("Successfully returned after fetching topic" + topicName); + keyInstance.assertStatus(response); + InputStream is = (InputStream) response.getEntity(); + Scanner s = new Scanner(is); + s.useDelimiter("\\A"); + JSONObject dataObj = new JSONObject(s.next()); + LOGGER.info("Details of " + topicName + " : " + dataObj.toString()); + s.close(); + } + + public void testdeleteTopic() { + LOGGER.info("test case delete topic name " + topicName); + createTopic(topicName); + target = client.target(url); + target = target.path("/topics/"); + target = target.path(topicName); + Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) + .header("X-CambriaDate", date).delete(); + keyInstance.assertStatus(response); + LOGGER.info("Successfully returned after deleting topic" + topicName); + } + + public void testAllTopic() { + LOGGER.info("test case fetch all topic"); + target = client.target(url); + target = target.path("/topics"); + Response response = target.request().get(); + keyInstance.assertStatus(response); + LOGGER.info("successfully returned after fetching all the topic"); + InputStream is = (InputStream) response.getEntity(); + Scanner s = new Scanner(is); + s.useDelimiter("\\A"); + JSONObject dataObj = new JSONObject(s.next()); + s.close(); + LOGGER.info("List of all topics " + dataObj.toString()); + } + + public void testPublisherForTopic() { + LOGGER.info("test case get all publishers for topic: " + topicName); + // creating topic to check + createTopic(topicName); + target = client.target(url); + target = target.path("/topics/"); + target = target.path(topicName); + target = target.path("/producers"); + // checking all producer for a particular topic + Response response = target.request().get(); + keyInstance.assertStatus(response); + LOGGER.info("Successfully returned after getting all the publishers" + topicName); + } + + public void testPermitPublisherForTopic() { + LOGGER.info("test case permit user for topic " + topicName); + JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to ")); + String userapikey = (String) jsonObj.get("key"); + createTopic(topicName); + // adding user to a topic// + target = client.target(url); + target = target.path("/topics/"); + target = target.path(topicName); + target = target.path("/producers/"); + target = target.path(userapikey); + Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) + .header("X-CambriaDate", date).put(Entity.json("")); + keyInstance.assertStatus(response); + LOGGER.info("successfully returned after permiting the user for topic " + topicName); + } + + public void testDenyPublisherForTopic() { + LOGGER.info("test case denying user for topic " + topicName); + JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to ")); + String userapikey = (String) jsonObj.get("key"); + createTopic(topicName); + // adding user to a topic// + target = client.target(url); + target = target.path("/topics/"); + target = target.path(topicName); + target = target.path("/producers/"); + target = target.path(userapikey); + target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) + .header("X-CambriaDate", date).put(Entity.json("")); + // deleting user who is just added// + target = client.target(url); + target = target.path("/topics/"); + target = target.path(topicName); + target = target.path("/producers/"); + target = target.path(userapikey); + Response response2 = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) + .header("X-CambriaDate", date).delete(); + keyInstance.assertStatus(response2); + LOGGER.info("successfully returned after denying the user for topic " + topicName); + } + + public void testConsumerForTopic() { + LOGGER.info("test case get all consumers for topic: " + topicName); + // creating topic to check + createTopic(topicName); + target = client.target(url); + target = target.path("/topics/"); + target = target.path(topicName); + target = target.path("/consumers"); + // checking all consumer for a particular topic + Response response = target.request().get(); + keyInstance.assertStatus(response); + LOGGER.info("Successfully returned after getting all the consumers" + topicName); + } + + public void testPermitConsumerForTopic() { + LOGGER.info("test case get all consumer for topic: " + topicName); + // creating user for adding to topic// + JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to ")); + String userapikey = (String) jsonObj.get("key"); + createTopic(topicName); + // adding user to a topic// + target = client.target(url); + target = target.path("/topics/"); + target = target.path(topicName); + target = target.path("/consumers/"); + target = target.path(userapikey); + Response response = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) + .header("X-CambriaDate", date).put(Entity.json("")); + keyInstance.assertStatus(response); + LOGGER.info("Successfully returned after getting all the consumers" + topicName); + } + + public void testDenyConsumerForTopic() { + LOGGER.info("test case denying consumer for topic " + topicName); + // creating user for adding and deleting from topic// + JSONObject jsonObj = keyInstance.returnKey(new ApiKeyBean("ai039a@att.com", "adding user to ")); + String userapikey = (String) jsonObj.get("key"); + createTopic(topicName); + // adding user to a topic// + target = client.target(url); + target = target.path("/topics/"); + target = target.path(topicName); + target = target.path("/consumers/"); + target = target.path(userapikey); + target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) + .header("X-CambriaDate", date).put(Entity.json("")); + // deleting user who is just added// + target = client.target(url); + target = target.path("/topics/"); + target = target.path(topicName); + target = target.path("/consumers/"); + target = target.path(userapikey); + Response response2 = target.request().header("X-CambriaAuth", topicapikey + ":" + serverCalculatedSignature) + .header("X-CambriaDate", date).delete(); + keyInstance.assertStatus(response2); + LOGGER.info("successfully returned after denying the consumer for topic " + topicName); + }*/ +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapAdminTest.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapAdminTest.java new file mode 100644 index 0000000..f8dc500 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapAdminTest.java @@ -0,0 +1,60 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dmaap; + + + +public class DmaapAdminTest { + /*private static final Logger LOGGER = Logger.getLogger(DmaapAdminTest.class); + Client client = ClientBuilder.newClient(); + WebTarget target = client.target(LoadPropertyFile.getPropertyFileData().getProperty("url")); + + + public void assertStatus(Response response) { + assertTrue(response.getStatus() == HttpStatus.SC_OK); + } + + // 1.get consumer cache + public void testConsumerCache() { + LOGGER.info("test case consumer cache"); + target = target.path("/admin/consumerCache"); + Response response = target.request().get(); + assertStatus(response); + LOGGER.info("Successfully returned after fetching consumer cache"); + InputStream is = (InputStream) response.getEntity(); + Scanner s = new Scanner(is); + s.useDelimiter("\\A"); + String data = s.next(); + s.close(); + LOGGER.info("Details of consumer cache :" + data); + } + + // 2.drop consumer cache + public void testDropConsumerCache() { + LOGGER.info("test case drop consumer cache"); + target = target.path("/admin/dropConsumerCache"); + Response response = target.request().post(Entity.json(null)); + assertStatus(response); + LOGGER.info("Successfully returned after dropping consumer cache"); + } +*/ +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapApiKeyTest.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapApiKeyTest.java new file mode 100644 index 0000000..f40025a --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapApiKeyTest.java @@ -0,0 +1,162 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dmaap; + +import java.io.InputStream; +import java.util.Properties; +import java.util.Scanner; + +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.client.Entity; +import javax.ws.rs.client.WebTarget; +import javax.ws.rs.core.Response; + +import junit.framework.TestCase; + +import org.apache.log4j.Logger; +import org.apache.http.HttpStatus; +import org.json.JSONObject; + +import com.att.nsa.drumlin.till.data.sha1HmacSigner; + +public class DmaapApiKeyTest { + /* + private static final Logger LOGGER = Logger.getLogger(DmaapApiKeyTest.class); + Client client = ClientBuilder.newClient(); + Properties prop = LoadPropertyFile.getPropertyFileData(); + String url = prop.getProperty("url"); + WebTarget target = client.target(url); + String date = prop.getProperty("date"); + + + public JSONObject returnKey(ApiKeyBean apikeybean) { + LOGGER.info("Call to return newly created key"); + target = client.target(url); + target = target.path("/apiKeys/create"); + Response response = target.request().post(Entity.json(apikeybean)); + assertStatus(response); + LOGGER.info("successfully created keys"); + InputStream is = (InputStream) response.getEntity(); + Scanner s = new Scanner(is); + s.useDelimiter("\\A"); + JSONObject dataObj = new JSONObject(s.next()); + s.close(); + LOGGER.info("key details :" + dataObj.toString()); + return dataObj; + } + + // 1. create key + public void testCreateKey() { + LOGGER.info("test case create key"); + ApiKeyBean apiKeyBean = new ApiKeyBean("nm254w@att.com", "Creating Api Key."); + returnKey(apiKeyBean); + LOGGER.info("Successfully returned after creating key"); + } + + public void assertStatus(Response response) { + assertTrue(response.getStatus() == HttpStatus.SC_OK); + } + + // 2. get Allkey details + public void testAllKey() { + LOGGER.info("test case get all key"); + target = target.path("/apiKeys"); + Response response = target.request().get(); + assertStatus(response); + LOGGER.info("successfully returned after get all key"); + InputStream is = (InputStream) response.getEntity(); + Scanner s = new Scanner(is); + s.useDelimiter("\\A"); + LOGGER.info("Details of key: " + s.next()); + s.close(); + + } + + // 3. get specific key + public void testSpecificKey() { + LOGGER.info("test case get specific key"); + String apiKey = ""; + ApiKeyBean apiKeyBean = new ApiKeyBean("ai039@att.com", "Creating Api Key."); + + apiKey = (String) returnKey(apiKeyBean).get("key"); + target = client.target(url); + target = target.path("/apiKeys/"); + target = target.path(apiKey); + Response response = target.request().get(); + assertStatus(response); + LOGGER.info("successfully returned after fetching specific key"); + } + + // 4. update key + + public void testUpdateKey() { + LOGGER.info("test case update key"); + String apiKey = ""; + String secretKey = ""; + final String serverCalculatedSignature; + final String X_CambriaAuth; + final String X_CambriaDate; + JSONObject jsonObj; + + ApiKeyBean apiKeyBean = new ApiKeyBean("ai039@att.com", "Creating Api Key for update"); + ApiKeyBean apiKeyBean1 = new ApiKeyBean("ai03911@att.com", "updating Api Key."); + jsonObj = returnKey(apiKeyBean); + apiKey = (String) jsonObj.get("key"); + secretKey = (String) jsonObj.get("secret"); + + serverCalculatedSignature = sha1HmacSigner.sign(date, secretKey); + X_CambriaAuth = apiKey + ":" + serverCalculatedSignature; + X_CambriaDate = date; + target = client.target(url); + target = target.path("/apiKeys/" + apiKey); + Response response1 = target.request().header("X-CambriaAuth", X_CambriaAuth) + .header("X-CambriaDate", X_CambriaDate).put(Entity.json(apiKeyBean1)); + assertStatus(response1); + LOGGER.info("successfully returned after updating key"); + } + + // 5. delete key + public void testDeleteKey() { + LOGGER.info("test case delete key"); + String apiKey = ""; + String secretKey = ""; + final String serverCalculatedSignature; + final String X_CambriaAuth; + final String X_CambriaDate; + JSONObject jsonObj; + ApiKeyBean apiKeyBean = new ApiKeyBean("ai039@att.com", "Creating Api Key."); + jsonObj = returnKey(apiKeyBean); + apiKey = (String) jsonObj.get("key"); + secretKey = (String) jsonObj.get("secret"); + serverCalculatedSignature = sha1HmacSigner.sign(date, secretKey); + X_CambriaAuth = apiKey + ":" + serverCalculatedSignature; + X_CambriaDate = date; + target = client.target(url); + target = target.path("/apiKeys/" + apiKey); + Response response2 = target.request().header("X-CambriaAuth", X_CambriaAuth) + .header("X-CambriaDate", X_CambriaDate).delete(); + assertStatus(response2); + LOGGER.info("successfully returned after deleting key"); + } +*/ +} \ No newline at end of file diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapMetricsTest.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapMetricsTest.java new file mode 100644 index 0000000..4521ad2 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dmaap/DmaapMetricsTest.java @@ -0,0 +1,77 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dmaap; + +import java.io.InputStream; +import java.util.Scanner; + +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.client.WebTarget; +import javax.ws.rs.core.Response; + +import junit.framework.TestCase; + +import org.apache.log4j.Logger; +import org.apache.http.HttpStatus; + +public class DmaapMetricsTest { + /*private static final Logger LOGGER = Logger.getLogger(DmaapMetricsTest.class); + Client client = ClientBuilder.newClient(); + WebTarget target = client.target(LoadPropertyFile.getPropertyFileData().getProperty("url")); + + public void assertStatus(Response response) { + assertTrue(response.getStatus() == HttpStatus.SC_OK); + } + + + // 1.get metrics + public void testMetrics() { + LOGGER.info("test case get all metrics"); + target = target.path("/metrics"); + Response response = target.request().get(); + assertStatus(response); + LOGGER.info("successfully returned after fetching all metrics"); + InputStream is = (InputStream) response.getEntity(); + Scanner s = new Scanner(is); + s.useDelimiter("\\A"); + String data = s.next(); + s.close(); + LOGGER.info("DmaapMetricTest Test all metrics" + data); + } + + // 2.get metrics by name + public void testMetricsByName() { + LOGGER.info("test case get metrics by name"); + target = target.path("/metrics/startTime"); + Response response = target.request().get(); + assertStatus(response); + LOGGER.info("successfully returned after fetching specific metrics"); + InputStream is = (InputStream) response.getEntity(); + Scanner s = new Scanner(is); + s.useDelimiter("\\A"); + String data = s.next(); + s.close(); + LOGGER.info("DmaapMetricTest metrics by name" + data); + } +*/ +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/JUnitTestSuite.java new file mode 100644 index 0000000..3090773 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dmaap/JUnitTestSuite.java @@ -0,0 +1,44 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dmaap; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ DMaapPubSubTest.class, DmaapApiKeyTest.class, DMaapTopicTest.class, DmaapMetricsTest.class, + DmaapAdminTest.class }) +public class JUnitTestSuite { + /*private static final Logger LOGGER = Logger.getLogger(DMaapTopicTest.class); + + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + }*/ + +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/LoadPropertyFile.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/LoadPropertyFile.java new file mode 100644 index 0000000..46b88d0 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dmaap/LoadPropertyFile.java @@ -0,0 +1,48 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dmaap; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + +import org.apache.log4j.Logger; + +public class LoadPropertyFile { + /*private static final Logger LOGGER = Logger + .getLogger(LoadPropertyFile.class); + + static public Properties getPropertyFileData() { + Properties prop = new Properties(); + LOGGER.info("loading the property file"); + + try { + InputStream inputStream = LoadPropertyFile.class.getClassLoader() + .getResourceAsStream("DMaaPUrl.properties"); + prop.load(inputStream); + LOGGER.info("successfully loaded the property file"); + } catch (IOException e) { + LOGGER.error("Error while retrieving API keys: " + e); + } + return prop; + }*/ +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/TestRunner.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/TestRunner.java new file mode 100644 index 0000000..7f7ca82 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dmaap/TestRunner.java @@ -0,0 +1,42 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dmaap; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + /*private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + } + LOGGER.info(result.wasSuccessful()); + } +*/ +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dmaap/TopicBean.java b/src/test/java/org/onap/dmaap/mr/test/dmaap/TopicBean.java new file mode 100644 index 0000000..b1950e2 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dmaap/TopicBean.java @@ -0,0 +1,72 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +/** + * + */ +package org.onap.dmaap.mr.test.dmaap; + +import java.io.Serializable; + +public class TopicBean implements Serializable { + + /* + * private static final long serialVersionUID = -8620390377775457949L; + * private String topicName; private String description; + * + * + * private int partitionCount; private int replicationCount; private boolean + * transactionEnabled = false; + * + * public boolean isTransactionEnabled() { return transactionEnabled; } + * + * public void setTransactionEnabled(boolean transactionEnabled) { + * this.transactionEnabled = transactionEnabled; } + * + * public TopicBean() { super(); } + * + * public TopicBean(String topicName, String description, int + * partitionCount, int replicationCount, boolean transactionEnabled) { + * super(); this.topicName = topicName; this.description = description; + * this.partitionCount = partitionCount; this.replicationCount = + * replicationCount; this.transactionEnabled = transactionEnabled; } + * + * public String getTopicName() { return topicName; } + * + * public void setTopicName(String topicName) { this.topicName = topicName; + * } + * + * public String getDescription() { return description; } + * + * public void setDescription(String description) { this.description = + * description; } + * + * public int getPartitionCount() { return partitionCount; } + * + * public void setPartitionCount(int partitionCount) { this.partitionCount = + * partitionCount; } + * + * public int getReplicationCount() { return replicationCount; } + * + * public void setReplicationCount(int replicationCount) { + * this.replicationCount = replicationCount; } + */ +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/ApiKeyBean.java b/src/test/java/org/onap/dmaap/mr/test/dme2/ApiKeyBean.java new file mode 100644 index 0000000..96c9c78 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/ApiKeyBean.java @@ -0,0 +1,72 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import java.io.Serializable; + +public class ApiKeyBean implements Serializable { + + private static final long serialVersionUID = -8219849086890567740L; + + // private static final String KEY_CHARS = + // "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + + + private String email; + private String description; + + public ApiKeyBean() { + super(); + } + + public ApiKeyBean(String email, String description) { + super(); + this.email = email; + this.description = description; + } + + public String getEmail() { + return email; + } + + public void setEmail(String email) { + this.email = email; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + /* + * public String getKey() { return generateKey(16); } + * + * public String getSharedSecret() { return generateKey(24); } + * + * private static String generateKey ( int length ) { return + * uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length ); } + */ + +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2AdminTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2AdminTest.java new file mode 100644 index 0000000..3222046 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2AdminTest.java @@ -0,0 +1,148 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Properties; + +import org.apache.log4j.Logger; + +import junit.framework.TestCase; + +import com.att.aft.dme2.api.DME2Client; +import com.att.aft.dme2.api.DME2Exception; +//import com.ibm.disthub2.impl.client.PropSchema; + +public class DME2AdminTest extends TestCase { + private static final Logger LOGGER = Logger.getLogger(DME2AdminTest.class); + + protected String url; + + protected Properties props; + + protected HashMap hm; + + protected String methodType; + + protected String contentType; + + protected String user; + + protected String password; + + @Override + protected void setUp() throws Exception { + super.setUp(); + System.setProperty("AFT_DME2_CLIENT_SSL_INCLUDE_PROTOCOLS", "SSLv3,TLSv1,TLSv1.1"); + System.setProperty("AFT_DME2_CLIENT_IGNORE_SSL_CONFIG", "false"); + System.setProperty("AFT_DME2_CLIENT_KEYSTORE_PASSWORD", "changeit"); + this.props = LoadPropertyFile.getPropertyFileDataProducer(); + String latitude = props.getProperty("Latitude"); + String longitude = props.getProperty("Longitude"); + String version = props.getProperty("Version"); + String serviceName = props.getProperty("ServiceName"); + serviceName = "dmaap-v1.dev.dmaap.dt.saat.acsi.att.com/admin"; + String env = props.getProperty("Environment"); + String partner = props.getProperty("Partner"); + String protocol = props.getProperty("Protocol"); + + methodType = props.getProperty("MethodTypeGet"); + contentType = props.getProperty("contenttype"); + user = props.getProperty("user"); + password = props.getProperty("password"); + + + this.url = protocol + "://" + serviceName + "?" + "version=" + version + "&" + "envContext=" + env + "&" + + "routeOffer=" + partner + "&partner=BOT_R"; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + } + + public void testGetConsumerCache() { + LOGGER.info("test case consumer cache started"); + + String subContextPath = props.getProperty("SubContextPathGetAdminConsumerCache"); + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(methodType); + sender.setSubContext(subContextPath); + sender.setPayload(""); + sender.addHeader("Content-Type", contentType); + + sender.addHeader("X-CambriaAuth", "user1:7J49YriFlyRgebyOsSJhZvY/C60="); + sender.addHeader("X-X-CambriaDate", "2016-10-18T09:56:04-05:00"); + + //sender.setCredentials(user, password); + sender.setHeaders(hm); + LOGGER.info("Getting consumer Cache"); + String reply = sender.sendAndWait(5000L); + System.out.println(reply); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + assertNotNull(reply); + LOGGER.info("response from consumer cache=" + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public void ttestDropConsumerCache() { + LOGGER.info("Drom consumer cache initiated"); + + String subContextPath = props.getProperty("SubContextPathDropAdminConsumerCache"); + + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(methodType); + sender.setSubContext(subContextPath); + sender.setPayload(""); + sender.addHeader("Content-Type", contentType); + sender.setCredentials(user, password); + sender.setHeaders(hm); + + LOGGER.info("Dropping consumer cache..........."); + String reply = sender.sendAndWait(5000L); + + // assertTrue(LoadPropertyFile.isValidJsonString(reply)); + assertNotNull(reply); + LOGGER.info("response =" + reply); + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ApiKeyTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ApiKeyTest.java new file mode 100644 index 0000000..54148b2 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ApiKeyTest.java @@ -0,0 +1,229 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Properties; + +import org.apache.log4j.Logger; +import org.json.JSONObject; + +import com.att.aft.dme2.api.DME2Client; +import com.att.aft.dme2.api.DME2Exception; +import com.att.aft.dme2.internal.jackson.map.ObjectMapper; + +import junit.framework.TestCase; + +public class DME2ApiKeyTest extends TestCase { + private static final Logger LOGGER = Logger.getLogger(DME2ApiKeyTest.class); + + protected String url; + + protected Properties props; + + @Override + protected void setUp() throws Exception { + super.setUp(); + System.setProperty("AFT_DME2_CLIENT_SSL_INCLUDE_PROTOCOLS", "SSLv3,TLSv1,TLSv1.1"); + System.setProperty("AFT_DME2_CLIENT_IGNORE_SSL_CONFIG", "false"); + System.setProperty("AFT_DME2_CLIENT_KEYSTORE_PASSWORD", "changeit"); + this.props = LoadPropertyFile.getPropertyFileDataProducer(); + String latitude = props.getProperty("Latitude"); + String longitude = props.getProperty("Longitude"); + String version = props.getProperty("Version"); + String serviceName = props.getProperty("ServiceName"); + String env = props.getProperty("Environment"); + String partner = props.getProperty("Partner"); + String protocol = props.getProperty("Protocol"); + this.url = protocol + "://" + serviceName + "?" + "version=" + version + "&" + "envContext=" + env + "&" + + "routeOffer=" + partner + "&partner=BOT_R"; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + } + + public void testCreateKey() { + LOGGER.info("Create Key test case initiated"); + + ApiKeyBean apiKeyBean = new ApiKeyBean("user1@us.att.com", "Creating Api Key.m"); + + System.out.println(url); + + returnKey(apiKeyBean, url, props); + + } + + public String returnKey(ApiKeyBean apibean, String url, Properties props) { + + String reply = null; + try { + LOGGER.info("Call to return key "); + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(props.getProperty("MethodTypePost")); + sender.setSubContext(props.getProperty("SubContextPathGetCreateKeys")); + String jsonStringApiBean = new ObjectMapper().writeValueAsString(apibean); + sender.setPayload(jsonStringApiBean); + sender.addHeader("content-type", props.getProperty("contenttype")); + sender.setCredentials(props.getProperty("user"), props.getProperty("password")); + LOGGER.info("creating ApiKey"); + reply = sender.sendAndWait(5000L); + System.out.println("reply: " + reply); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + LOGGER.info("response =" + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + return reply; + } + + public void testGetAllKey() { + LOGGER.info("Test case Get All key initiated...."); + try { + DME2Client sender = new DME2Client(new URI(this.url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(this.props.getProperty("MethodTypeGet")); + String subcontextPath = this.props.getProperty("SubContextPathGetApiKeys"); + // sender.setSubContext(subcontextPath); + sender.setPayload(""); + sender.addHeader("content-type", props.getProperty("contenttype")); + sender.setCredentials(props.getProperty("user"), props.getProperty("password")); + LOGGER.info("Fetching all keys"); + String reply = sender.sendAndWait(5000L); + System.out.println(reply); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + LOGGER.info("response =" + reply); + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public void testGetOneKey() {/* + LOGGER.info("Test case get one key initiated"); + ApiKeyBean apiKeyBean = new ApiKeyBean("user1@att.com", "Creating Api Key.m"); + JSONObject jsonObj = new JSONObject(returnKey(apiKeyBean, url, props)); + String apiKey = (String) jsonObj.get("key"); + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(props.getProperty("MethodTypeGet")); + sender.setSubContext(props.getProperty("SubContextPathGetOneKey") + apiKey); + sender.setPayload(""); + sender.addHeader("content-type", props.getProperty("contenttype")); + sender.setCredentials(props.getProperty("user"), props.getProperty("password")); + + LOGGER.info("Fetching details of api key: " + apiKey); + String reply = sender.sendAndWait(5000L); + System.out.println(reply); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + LOGGER.info("response =" + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + */} + + // ............. test case update key is not applicable in + // DME2.................// + public void testUpdateKey() {/* + ApiKeyBean apiKeyBean = new ApiKeyBean("user1@att.com", "Creating Api Key.m"); + + JSONObject jsonObj = new JSONObject(returnKey(apiKeyBean, url, props)); + String apiKey = (String) jsonObj.get("key"); + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + String p = props.getProperty("MethodTypePut"); + sender.setMethod(p); + String s = props.getProperty("SubContextPathUpdateKeys") + apiKey; + sender.setSubContext(s); + + String jsonStringApiBean = new ObjectMapper() + .writeValueAsString(new ApiKeyBean("user1@att.com", "updating key")); + sender.setPayload(jsonStringApiBean); + System.out.println(jsonStringApiBean); + String c = props.getProperty("contenttype"); + sender.addHeader("content-type", c); + sender.setCredentials(props.getProperty("keyUser"), props.getProperty("keyPass")); + + System.out.println("creating ApiKey"); + String reply = sender.sendAndWait(5000L); + assertNotNull(reply); + System.out.println("response =" + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + */} + + // ............. test case delete key is not applicable in + // DME2.................// + + public void testDeleteKey() {/* + ApiKeyBean apiKeyBean = new ApiKeyBean("user1@att.com", "Creating Api Key.m"); + + JSONObject jsonObj = new JSONObject(returnKey(apiKeyBean, url, props)); + String apiKey = (String) jsonObj.get("key"); + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + String p = props.getProperty("MethodTypeDelete"); + sender.setMethod(p); + String s = props.getProperty("SubContextPathDeleteteKeys") + apiKey; + sender.setSubContext(s); + + sender.setPayload(""); // System.out.println(jsonStringApiBean); + String c = props.getProperty("contenttype"); + sender.addHeader("content-type", c); + sender.setCredentials(props.getProperty("keyUser"), props.getProperty("keyPass")); + + System.out.println("creating ApiKey"); + String reply = sender.sendAndWait(5000L); + assertNotNull(reply); + System.out.println("response =" + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + */} + +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerFilterTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerFilterTest.java new file mode 100644 index 0000000..59e1845 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerFilterTest.java @@ -0,0 +1,96 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URLEncoder; +import java.util.HashMap; +import java.util.Properties; + +import org.apache.log4j.Logger; + +import junit.framework.TestCase; + +import com.att.aft.dme2.api.DME2Client; +import com.att.aft.dme2.api.DME2Exception; + +public class DME2ConsumerFilterTest extends TestCase { + private static final Logger LOGGER = Logger.getLogger(DME2ConsumerFilterTest.class); + + public void testConsumerFilter() { + LOGGER.info("Test case consumer filter initiated"); + + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + String latitude = props.getProperty("Latitude"); + String longitude = props.getProperty("Longitude"); + String version = props.getProperty("Version"); + String serviceName = props.getProperty("ServiceName"); + String env = props.getProperty("Environment"); + String partner = props.getProperty("Partner"); + String protocol = props.getProperty("Protocol"); + String methodType = props.getProperty("MethodTypeGet"); + String user = props.getProperty("user"); + String password = props.getProperty("password"); + String contenttype = props.getProperty("contenttype"); + + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + + try { + // ..checking for topic exist is commented + // if (!topicTestObj.topicExist(url, props, hm)) { + // throw new Exception("Topic does not exist"); + // } else { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(methodType); + String subContextPathConsumer = props.getProperty("SubContextPathConsumer") + props.getProperty("newTopic") + + "/" + props.getProperty("group") + "/" + props.getProperty("id") + "?" + + props.getProperty("filterType"); + + sender.setSubContext(URLEncoder.encode(subContextPathConsumer, "UTF-8")); + sender.setPayload(""); + + sender.addHeader("Content-Type", contenttype); + sender.setCredentials(user, password); + sender.setHeaders(hm); + + LOGGER.info("Consuming Message for Filter"); + String reply = sender.sendAndWait(5000L); + assertNotNull(reply); + LOGGER.info("Message received = " + reply); + // } + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerTest.java new file mode 100644 index 0000000..b12f001 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ConsumerTest.java @@ -0,0 +1,94 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Properties; + +import org.apache.log4j.Logger; + +import junit.framework.TestCase; + +import com.att.aft.dme2.api.DME2Client; +import com.att.aft.dme2.api.DME2Exception; + +public class DME2ConsumerTest extends TestCase { + private static final Logger LOGGER = Logger.getLogger(DME2ConsumerTest.class); + + public void testConsumer() { + LOGGER.info("Test case subcribing initiated"); + + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + String latitude = props.getProperty("Latitude"); + String longitude = props.getProperty("Longitude"); + String version = props.getProperty("Version"); + String serviceName = props.getProperty("ServiceName"); + String env = props.getProperty("Environment"); + String partner = props.getProperty("Partner"); + String protocol = props.getProperty("Protocol"); + String methodType = props.getProperty("MethodTypeGet"); + String user = props.getProperty("user"); + String password = props.getProperty("password"); + String contenttype = props.getProperty("contenttype"); + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + try { + + // topic exist logic is commented + // if (!topicTestObj.topicExist(url, props, hm)) { + // throw new Exception("Topic does not exist"); + // } else { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(methodType); + String subContextPathConsumer = props.getProperty("SubContextPathConsumer") + props.getProperty("newTopic") + + "/" + props.getProperty("group") + "/" + props.getProperty("id"); + sender.setSubContext(subContextPathConsumer); + sender.setPayload(""); + + sender.addHeader("Content-Type", contenttype); + sender.setCredentials(user, password); + sender.setHeaders(hm); + + LOGGER.info("Consuming Message"); + String reply = sender.sendAndWait(5000L); + + assertNotNull(reply); + LOGGER.info("Message received = " + reply); + // } + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2MetricsTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2MetricsTest.java new file mode 100644 index 0000000..35c3218 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2MetricsTest.java @@ -0,0 +1,132 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Properties; + +import org.apache.log4j.Logger; + +import junit.framework.TestCase; + +import com.att.aft.dme2.api.DME2Client; +import com.att.aft.dme2.api.DME2Exception; + +public class DME2MetricsTest extends TestCase { + private static final Logger LOGGER = Logger.getLogger(DME2MetricsTest.class); + + public void testGetMetrics() { + LOGGER.info("Test case get metrics initiated..."); + + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + String latitude = props.getProperty("Latitude"); + String longitude = props.getProperty("Longitude"); + String version = props.getProperty("Version"); + String serviceName = props.getProperty("ServiceName"); + String env = props.getProperty("Environment"); + String partner = props.getProperty("Partner"); + String subContextPath = props.getProperty("SubContextPathGetMetrics"); + String protocol = props.getProperty("Protocol"); + String methodType = props.getProperty("MethodTypeGet"); + String user = props.getProperty("user"); + String password = props.getProperty("password"); + String contenttype = props.getProperty("contenttype"); + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(methodType); + sender.setSubContext(subContextPath); + sender.setPayload(""); + sender.addHeader("Content-Type", contenttype); + sender.setCredentials(user, password); + sender.setHeaders(hm); + + LOGGER.info("Getting Metrics Details"); + String reply = sender.sendAndWait(5000L); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + LOGGER.info("response =" + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public void testGetMetricsByName() { + LOGGER.info("Test case get metrics by name initiated"); + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + String latitude = props.getProperty("Latitude"); + String longitude = props.getProperty("Longitude"); + String version = props.getProperty("Version"); + String serviceName = props.getProperty("ServiceName"); + String env = props.getProperty("Environment"); + String partner = props.getProperty("Partner"); + String subContextPath = props.getProperty("SubContextPathGetMetricsByName"); + String protocol = props.getProperty("Protocol"); + String methodType = props.getProperty("MethodTypeGet"); + String user = props.getProperty("user"); + String password = props.getProperty("password"); + String contenttype = props.getProperty("contenttype"); + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(methodType); + sender.setSubContext(subContextPath); + sender.setPayload(""); + sender.addHeader("Content-Type", contenttype); + sender.setCredentials(user, password); + sender.setHeaders(hm); + + LOGGER.info("Getting Metrics By name"); + String reply = sender.sendAndWait(5000L); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + LOGGER.info("response =" + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ProducerTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ProducerTest.java new file mode 100644 index 0000000..c053502 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2ProducerTest.java @@ -0,0 +1,100 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Properties; + +import org.apache.log4j.Logger; + +import junit.framework.TestCase; + +import com.att.aft.dme2.api.DME2Client; +import com.att.aft.dme2.api.DME2Exception; +import com.att.aft.dme2.internal.jackson.map.ObjectMapper; + +public class DME2ProducerTest extends TestCase { + private static final Logger LOGGER = Logger.getLogger(DME2ProducerTest.class); + + public void testProducer() { + DME2TopicTest topicTestObj = new DME2TopicTest(); + + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + String latitude = props.getProperty("Latitude"); + String longitude = props.getProperty("Longitude"); + String version = props.getProperty("Version"); + String serviceName = props.getProperty("ServiceName"); + String env = props.getProperty("Environment"); + String partner = props.getProperty("Partner"); + String protocol = props.getProperty("Protocol"); + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + // checking whether topic exist or not + if (!topicTestObj.topicExist(url, props, hm)) { + // if topic doesn't exist then create the topic + topicTestObj.createTopic(url, props, hm); + // after creating the topic publish on that topic + publishMessage(url, props, hm); + } else { + // if topic already exist start publishing on the topic + publishMessage(url, props, hm); + } + + } + + public void publishMessage(String url, Properties props, HashMap mapData) { + try { + LOGGER.info("Call to publish message "); + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(props.getProperty("MethodTypePost")); + String subcontextpathPublish = props.getProperty("SubContextPathproducer") + props.getProperty("newTopic"); + sender.setSubContext(subcontextpathPublish); + String jsonStringApiBean = new ObjectMapper().writeValueAsString(new ApiKeyBean("example@att.com", + "description")); + sender.setPayload(jsonStringApiBean); + + sender.setCredentials(props.getProperty("user"), props.getProperty("password")); + sender.addHeader("content-type", props.getProperty("contenttype")); + LOGGER.info("Publishing message"); + String reply = sender.sendAndWait(5000L); + // assertTrue(LoadPropertyFile.isValidJsonString(reply)); + assertNotNull(reply); + LOGGER.info("response =" + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + + } +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/DME2TopicTest.java b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2TopicTest.java new file mode 100644 index 0000000..94c507c --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/DME2TopicTest.java @@ -0,0 +1,545 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Properties; + +import javax.ws.rs.client.Entity; + +import org.apache.log4j.Logger; + +import junit.framework.TestCase; + +import com.att.aft.dme2.api.DME2Client; +import com.att.aft.dme2.api.DME2Exception; +import com.att.aft.dme2.internal.jackson.map.ObjectMapper; + +public class DME2TopicTest extends TestCase { + private String latitude; + private String longitude; + private String version; + private String serviceName; + private String env; + private String partner; + private String protocol; + private String methodTypeGet; + private String methodTypePost; + private String methodTypeDelete; + private String methodTypePut; + + private String user; + private String password; + private String contenttype; + private String subContextPathGetAllTopic; + private String subContextPathGetOneTopic; + private String SubContextPathCreateTopic; + private String SubContextPathGetPublisherl; + private String SubContextPathGetPublisher; + private String SubContextPathGetPermitPublisher; + private String SubContextPathGetConsumer; + private String SubContextPathGetPermitConsumer; + private static final Logger LOGGER = Logger.getLogger(DME2TopicTest.class); + + public void createTopic(String url, Properties props, HashMap mapData) { + LOGGER.info("create topic method starts"); + if (!topicExist(url, props, mapData)) { + LOGGER.info("creating a new topic"); + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(props.getProperty("MethodTypePost")); + sender.setSubContext(props.getProperty("SubContextPathCreateTopic")); + TopicBeanDME2 topicBean = new TopicBeanDME2(props.getProperty("newTopic"), + props.getProperty("topicDescription"), Integer.parseInt(props.getProperty("partition")), + Integer.parseInt(props.getProperty("replication")), Boolean.valueOf(props + .getProperty("txenabled"))); + String jsonStringApiBean = new ObjectMapper().writeValueAsString(topicBean); + sender.setPayload(jsonStringApiBean); + sender.addHeader("content-type", props.getProperty("contenttype")); + sender.setCredentials(props.getProperty("user"), props.getProperty("password")); + + LOGGER.info("creating Topic"); + String reply = sender.sendAndWait(5000L); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + LOGGER.info("response =" + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + public boolean topicExist(String url, Properties props, HashMap mapData) { + boolean topicExist = false; + try { + LOGGER.info("Checking topic exists or not"); + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(props.getProperty("MethodTypeGet")); + String subcontextPath = props.getProperty("subContextPathGetOneTopic") + props.getProperty("newTopic"); + sender.setSubContext(subcontextPath); + sender.setPayload(""); + sender.addHeader("content-type", props.getProperty("contenttype")); + sender.setCredentials(props.getProperty("user"), props.getProperty("password")); + String reply = sender.sendAndWait(5000L); + topicExist = LoadPropertyFile.isValidJsonString(reply); + LOGGER.info("Topic exist =" + topicExist); + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + return topicExist; + } + + public void testAllTopics() { + LOGGER.info("Test case get all topics initiated"); + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + latitude = props.getProperty("Latitude"); + longitude = props.getProperty("Longitude"); + version = props.getProperty("Version"); + serviceName = props.getProperty("ServiceName"); + env = props.getProperty("Environment"); + partner = props.getProperty("Partner"); + subContextPathGetAllTopic = props.getProperty("subContextPathGetAllTopic"); + protocol = props.getProperty("Protocol"); + methodTypeGet = props.getProperty("MethodTypeGet"); + user = props.getProperty("user"); + password = props.getProperty("password"); + contenttype = props.getProperty("contenttype"); + + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); // } else { + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(methodTypeGet); + sender.setSubContext(subContextPathGetAllTopic); + sender.setPayload(""); + + sender.addHeader("Content-Type", contenttype); + sender.setCredentials(user, password); + sender.setHeaders(hm); + + LOGGER.info("Retrieving all topics"); + String reply = sender.sendAndWait(5000L); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + LOGGER.info("All Topics details = " + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public void testOneTopic() { + LOGGER.info("Test case get one topic initiated"); + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + latitude = props.getProperty("Latitude"); + longitude = props.getProperty("Longitude"); + version = props.getProperty("Version"); + serviceName = props.getProperty("ServiceName"); + env = props.getProperty("Environment"); + partner = props.getProperty("Partner"); + subContextPathGetOneTopic = props.getProperty("subContextPathGetOneTopic"); + protocol = props.getProperty("Protocol"); + methodTypeGet = props.getProperty("MethodTypeGet"); + user = props.getProperty("user"); + password = props.getProperty("password"); + contenttype = props.getProperty("contenttype"); + + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + System.out.println("Retrieving topic detail"); + if (!topicExist(url, props, hm)) { + createTopic(url, props, hm); + } else { + assertTrue(true); + } + } + + public void createTopicForDeletion(String url, Properties props, HashMap mapData) { + LOGGER.info("create topic method starts"); + + LOGGER.info("creating a new topic for deletion"); + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(props.getProperty("MethodTypePost")); + sender.setSubContext(props.getProperty("SubContextPathCreateTopic")); + TopicBeanDME2 topicBean = new TopicBeanDME2(props.getProperty("deleteTopic"), + props.getProperty("topicDescription"), Integer.parseInt(props.getProperty("partition")), + Integer.parseInt(props.getProperty("replication")), Boolean.valueOf(props.getProperty("txenabled"))); + String jsonStringApiBean = new ObjectMapper().writeValueAsString(topicBean); + sender.setPayload(jsonStringApiBean); + sender.addHeader("content-type", props.getProperty("contenttype")); + sender.setCredentials(props.getProperty("user"), props.getProperty("password")); + + LOGGER.info("creating Topic"); + String reply = sender.sendAndWait(5000L); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + LOGGER.info("response =" + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + + } + + public boolean topicExistForDeletion(String url, Properties props, HashMap mapData) { + boolean topicExist = false; + try { + LOGGER.info("Checking topic exists for deletion"); + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(props.getProperty("MethodTypeGet")); + String subcontextPath = props.getProperty("subContextPathGetOneTopic") + props.getProperty("deleteTopic"); + sender.setSubContext(subcontextPath); + sender.setPayload(""); + sender.addHeader("content-type", props.getProperty("contenttype")); + sender.setCredentials(props.getProperty("user"), props.getProperty("password")); + String reply = sender.sendAndWait(5000L); + topicExist = LoadPropertyFile.isValidJsonString(reply); + LOGGER.info("Topic exist for deletion=" + topicExist); + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + return topicExist; + } + + public void testDeleteTopic() { + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + latitude = props.getProperty("Latitude"); + longitude = props.getProperty("Longitude"); + version = props.getProperty("Version"); + serviceName = props.getProperty("ServiceName"); + env = props.getProperty("Environment"); + partner = props.getProperty("Partner"); + SubContextPathCreateTopic = props.getProperty("SubContextPathCreateTopic"); + protocol = props.getProperty("Protocol"); + methodTypePost = props.getProperty("MethodTypeDelete"); + user = props.getProperty("user"); + password = props.getProperty("password"); + contenttype = props.getProperty("contenttypejson"); + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + System.out.println("deleteing topic"); + if (!topicExistForDeletion(url, props, hm)) { + createTopicForDeletion(url, props, hm); + deleteTopic(url, props, hm); + } else { + deleteTopic(url, props, hm); + } + } + + public void deleteTopic(String url, Properties props, HashMap mapData) { + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(props.getProperty("MethodTypeDelete")); + String subsontextPathDelete = props.getProperty("subContextPathGetOneTopic") + + props.getProperty("deleteTopic"); + sender.setSubContext(subsontextPathDelete); + sender.setPayload(""); + sender.addHeader("content-type", props.getProperty("contenttype")); + sender.setCredentials(props.getProperty("user"), props.getProperty("password")); + System.out.println("Deleting Topic " + props.getProperty("deleteTopic")); + String reply = sender.sendAndWait(5000L); + assertNotNull(reply); + System.out.println("response =" + reply); + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public void testGetProducersTopics() { + LOGGER.info("Test case get list of producers on topic"); + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + latitude = props.getProperty("Latitude"); + longitude = props.getProperty("Longitude"); + version = props.getProperty("Version"); + serviceName = props.getProperty("ServiceName"); + env = props.getProperty("Environment"); + partner = props.getProperty("Partner"); + SubContextPathGetPublisher = props.getProperty("SubContextPathGetPublisher"); + protocol = props.getProperty("Protocol"); + methodTypeGet = props.getProperty("MethodTypeGet"); + user = props.getProperty("user"); + password = props.getProperty("password"); + contenttype = props.getProperty("contenttype"); + + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(methodTypeGet); + sender.setSubContext(SubContextPathGetPublisher); + sender.setPayload(""); + + sender.addHeader("Content-Type", contenttype); + sender.setCredentials(user, password); + sender.setHeaders(hm); + + LOGGER.info("Retrieving List of publishers"); + String reply = sender.sendAndWait(5000L); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + LOGGER.info("All Publishers details = " + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + // permitting a producer on topic is not applicable + // public void testPermitProducersTopics() { + // LOGGER.info("Test case "); + // Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + // latitude = props.getProperty("Latitude"); + // longitude = props.getProperty("Longitude"); + // version = props.getProperty("Version"); + // serviceName = props.getProperty("ServiceName"); + // env = props.getProperty("Environment"); + // partner = props.getProperty("Partner"); + // SubContextPathGetPermitPublisher = + // props.getProperty("SubContextPathGetPermitPublisher"); + // protocol = props.getProperty("Protocol"); + // methodTypePut = props.getProperty("MethodTypePut"); + // user = props.getProperty("user"); + // password = props.getProperty("password"); + // contenttype = props.getProperty("contenttype"); + // + // String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + // + "version=" + version + "/" + // + "envContext=" + env + "/" + "partner=" + partner; + // LoadPropertyFile.loadAFTProperties(latitude, longitude); + // + // HashMap hm = new HashMap(); + // hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + // hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + // hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + // + // try { + // DME2Client sender = new DME2Client(new URI(url), 5000L); + // sender.setAllowAllHttpReturnCodes(true); + // sender.setMethod(methodTypePut); + // sender.setSubContext(SubContextPathGetPermitPublisher); + // sender.setPayload(""); + // + // sender.addHeader("Content-Type", contenttype); + // sender.setCredentials(user, password); + // sender.setHeaders(hm); + // + // System.out.println("Permitting a producer on topic"); + // String reply = sender.sendAndWait(5000L); + // System.out.println("Reply from server = " + reply); + // + // } catch (DME2Exception e) { + // e.printStackTrace(); + // } catch (URISyntaxException e) { + // e.printStackTrace(); + // } catch (Exception e) { + // e.printStackTrace(); + // } + // } + + public void testGetConsumersTopics() { + LOGGER.info("Test case get list of consumers on topic "); + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + latitude = props.getProperty("Latitude"); + longitude = props.getProperty("Longitude"); + version = props.getProperty("Version"); + serviceName = props.getProperty("ServiceName"); + env = props.getProperty("Environment"); + partner = props.getProperty("Partner"); + SubContextPathGetConsumer = props.getProperty("SubContextPathGetConsumer"); + protocol = props.getProperty("Protocol"); + methodTypeGet = props.getProperty("MethodTypeGet"); + user = props.getProperty("user"); + password = props.getProperty("password"); + contenttype = props.getProperty("contenttype"); + + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + + try { + DME2Client sender = new DME2Client(new URI(url), 5000L); + sender.setAllowAllHttpReturnCodes(true); + sender.setMethod(methodTypeGet); + sender.setSubContext(SubContextPathGetConsumer); + sender.setPayload(""); + + sender.addHeader("Content-Type", contenttype); + sender.setCredentials(user, password); + sender.setHeaders(hm); + + LOGGER.info("Retrieving consumer details on topics"); + String reply = sender.sendAndWait(5000L); + assertTrue(LoadPropertyFile.isValidJsonString(reply)); + System.out.println("Reply from server = " + reply); + + } catch (DME2Exception e) { + e.printStackTrace(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public void testCreateTopic() { + LOGGER.info("Test case create topic starts"); + Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + latitude = props.getProperty("Latitude"); + longitude = props.getProperty("Longitude"); + version = props.getProperty("Version"); + serviceName = props.getProperty("ServiceName"); + env = props.getProperty("Environment"); + partner = props.getProperty("Partner"); + SubContextPathCreateTopic = props.getProperty("SubContextPathCreateTopic"); + protocol = props.getProperty("Protocol"); + methodTypePost = props.getProperty("MethodTypePost"); + user = props.getProperty("user"); + password = props.getProperty("password"); + contenttype = props.getProperty("contenttypejson"); + + String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + "version=" + version + "/" + + "envContext=" + env + "/" + "partner=" + partner; + LoadPropertyFile.loadAFTProperties(latitude, longitude); + HashMap hm = new HashMap(); + hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + + createTopic(url, props, hm); + } + // permitting a consumer on topic is not applicable + // public void testPermitConsumerTopics() { + // Properties props = LoadPropertyFile.getPropertyFileDataProducer(); + // latitude = props.getProperty("Latitude"); + // longitude = props.getProperty("Longitude"); + // version = props.getProperty("Version"); + // serviceName = props.getProperty("ServiceName"); + // env = props.getProperty("Environment"); + // partner = props.getProperty("Partner"); + // SubContextPathGetPermitConsumer = + // props.getProperty("SubContextPathGetPermitConsumer"); + // protocol = props.getProperty("Protocol"); + // methodTypePut = props.getProperty("MethodTypePut"); + // user = props.getProperty("user"); + // password = props.getProperty("password"); + // contenttype = props.getProperty("contenttype"); + // + // String url = protocol + "://DME2SEARCH/" + "service=" + serviceName + "/" + // + "version=" + version + "/" + // + "envContext=" + env + "/" + "partner=" + partner; + // LoadPropertyFile.loadAFTProperties(latitude, longitude); + // + // HashMap hm = new HashMap(); + // hm.put("AFT_DME2_EP_READ_TIMEOUT_MS", "50000"); + // hm.put("AFT_DME2_ROUNDTRIP_TIMEOUT_MS", "240000"); + // hm.put("AFT_DME2_EP_CONN_TIMEOUT", "5000"); + // + // try { + // DME2Client sender = new DME2Client(new URI(url), 5000L); + // sender.setAllowAllHttpReturnCodes(true); + // sender.setMethod(methodTypePut); + // sender.setSubContext(SubContextPathGetPermitConsumer); + // sender.setPayload(""); + // + // sender.addHeader("Content-Type", contenttype); + // sender.setCredentials(user, password); + // sender.setHeaders(hm); + // + // System.out.println("Permitting a consumer on topic"); + // String reply = sender.sendAndWait(5000L); + // assertNotNull(reply); + // System.out.println("Reply from server = " + reply); + // + // } catch (DME2Exception e) { + // e.printStackTrace(); + // } catch (URISyntaxException e) { + // e.printStackTrace(); + // } catch (Exception e) { + // e.printStackTrace(); + // } + // } + +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/JUnitTestSuite.java b/src/test/java/org/onap/dmaap/mr/test/dme2/JUnitTestSuite.java new file mode 100644 index 0000000..163d3c1 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/JUnitTestSuite.java @@ -0,0 +1,44 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import junit.framework.TestSuite; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; +import org.apache.log4j.Logger; + +@RunWith(Suite.class) +@SuiteClasses({ DME2AdminTest.class, DME2ApiKeyTest.class, DME2ConsumerTest.class, DME2ConsumerTest.class, + DME2MetricsTest.class, DME2ProducerTest.class, DME2TopicTest.class, }) +public class JUnitTestSuite { + private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class); + + public static void main(String[] args) { + LOGGER.info("Running the test suite"); + + TestSuite tstSuite = new TestSuite(); + LOGGER.info("Total Test Counts " + tstSuite.countTestCases()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/LoadPropertyFile.java b/src/test/java/org/onap/dmaap/mr/test/dme2/LoadPropertyFile.java new file mode 100644 index 0000000..c534cce --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/LoadPropertyFile.java @@ -0,0 +1,69 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import org.json.JSONObject; + +public class LoadPropertyFile { + //private static final Logger LOGGER = Logger.getLogger(LoadPropertyFile.class); + private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(LoadPropertyFile.class); + + static public Properties getPropertyFileDataProducer() { + Properties prop = new Properties(); + LOGGER.info("loading the property file"); + try { + InputStream inputStream = LoadPropertyFile.class.getClassLoader() + .getResourceAsStream("dme2testcase.properties"); + + prop.load(inputStream); + LOGGER.info("successfully loaded the property file"); + } catch (IOException e) { + LOGGER.error("Error while retrieving API keys: " + e); + } + return prop; + } + + static public void loadAFTProperties(String lat, String longi) { + System.setProperty("AFT_LATITUDE", lat); + System.setProperty("AFT_LONGITUDE", longi); + System.setProperty("AFT_ENVIRONMENT", "AFTUAT"); + // printProperties(); + System.out.println("Latitude =" + lat); + System.out.println("Longitude =" + longi); + } + + static public boolean isValidJsonString(String chkString) { + boolean isJson = true; + try { + new JSONObject(chkString); + } catch (Exception e) { + isJson = false; + } + return isJson; + } +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/TestRunner.java b/src/test/java/org/onap/dmaap/mr/test/dme2/TestRunner.java new file mode 100644 index 0000000..f8696f4 --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/TestRunner.java @@ -0,0 +1,42 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.mr.test.dme2; + +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; +import org.apache.log4j.Logger; + +public class TestRunner { + private static final Logger LOGGER = Logger.getLogger(TestRunner.class); + + public static void main(String[] args) { + // TODO Auto-generated method stub + Result result = JUnitCore.runClasses(JUnitTestSuite.class); + for (Failure failure : result.getFailures()) { + LOGGER.info(failure.toString()); + + } + LOGGER.info(result.wasSuccessful()); + } + +} diff --git a/src/test/java/org/onap/dmaap/mr/test/dme2/TopicBeanDME2.java b/src/test/java/org/onap/dmaap/mr/test/dme2/TopicBeanDME2.java new file mode 100644 index 0000000..597546b --- /dev/null +++ b/src/test/java/org/onap/dmaap/mr/test/dme2/TopicBeanDME2.java @@ -0,0 +1,94 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +/** + * + */ +package org.onap.dmaap.mr.test.dme2; + +import java.io.Serializable; + +public class TopicBeanDME2 implements Serializable { + + private static final long serialVersionUID = -8620390377775457949L; + private String topicName; + private String description; + + + private int partitionCount; + private int replicationCount; + private boolean transactionEnabled = false; + + public boolean isTransactionEnabled() { + return transactionEnabled; + } + + public void setTransactionEnabled(boolean transactionEnabled) { + this.transactionEnabled = transactionEnabled; + } + + public TopicBeanDME2() { + super(); + } + + public TopicBeanDME2(String topicName, String description, int partitionCount, int replicationCount, + boolean transactionEnabled) { + super(); + this.topicName = topicName; + this.description = description; + this.partitionCount = partitionCount; + this.replicationCount = replicationCount; + this.transactionEnabled = transactionEnabled; + } + + public String getTopicName() { + return topicName; + } + + public void setTopicName(String topicName) { + this.topicName = topicName; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public int getPartitionCount() { + return partitionCount; + } + + public void setPartitionCount(int partitionCount) { + this.partitionCount = partitionCount; + } + + public int getReplicationCount() { + return replicationCount; + } + + public void setReplicationCount(int replicationCount) { + this.replicationCount = replicationCount; + } + +} -- cgit 1.2.3-korg