summaryrefslogtreecommitdiffstats
path: root/src/main/java
diff options
context:
space:
mode:
Diffstat (limited to 'src/main/java')
-rw-r--r--src/main/java/com/att/dmf/mr/CambriaApiException.java (renamed from src/main/java/com/att/nsa/cambria/CambriaApiException.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java (renamed from src/main/java/com/att/nsa/cambria/CambriaApiVersionInfo.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/backends/Consumer.java (renamed from src/main/java/com/att/nsa/cambria/backends/Consumer.java)24
-rw-r--r--src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java (renamed from src/main/java/com/att/nsa/cambria/backends/ConsumerFactory.java)23
-rw-r--r--src/main/java/com/att/dmf/mr/backends/MetricsSet.java (renamed from src/main/java/com/att/nsa/cambria/backends/MetricsSet.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/backends/Publisher.java (renamed from src/main/java/com/att/nsa/cambria/backends/Publisher.java)19
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java403
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java126
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt386
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java (renamed from src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCache.java)331
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java161
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java (renamed from src/main/java/com/att/nsa/cambria/backends/kafka/KafkaPublisher.java)131
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java (renamed from src/main/java/com/att/nsa/cambria/backends/memory/MessageDropper.java)44
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java100
-rw-r--r--src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java (renamed from src/main/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactory.java)38
-rw-r--r--src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java (renamed from src/main/java/com/att/nsa/cambria/backends/memory/MemoryMetaBroker.java)13
-rw-r--r--src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java (renamed from src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueue.java)16
-rw-r--r--src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java (renamed from src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisher.java)32
-rw-r--r--src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java (renamed from src/main/java/com/att/nsa/cambria/backends/memory/MessageLogger.java)24
-rw-r--r--src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java (renamed from src/main/java/com/att/nsa/cambria/beans/ApiKeyBean.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java (renamed from src/main/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiter.java)157
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPContext.java (renamed from src/main/java/com/att/nsa/cambria/beans/DMaaPContext.java)12
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java365
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java (renamed from src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBroker.java)226
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java (renamed from src/main/java/com/att/nsa/cambria/beans/DMaaPMetricsSet.java)15
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java (renamed from src/main/java/com/att/nsa/cambria/beans/DMaaPNsaApiDb.java)11
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java (renamed from src/main/java/com/att/nsa/cambria/beans/DMaaPZkClient.java)10
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java (renamed from src/main/java/com/att/nsa/cambria/beans/DMaaPZkConfigDb.java)11
-rw-r--r--src/main/java/com/att/dmf/mr/beans/LogDetails.java (renamed from src/main/java/com/att/nsa/cambria/beans/LogDetails.java)18
-rw-r--r--src/main/java/com/att/dmf/mr/beans/TopicBean.java (renamed from src/main/java/com/att/nsa/cambria/beans/TopicBean.java)18
-rw-r--r--src/main/java/com/att/dmf/mr/constants/CambriaConstants.java (renamed from src/main/java/com/att/nsa/cambria/constants/CambriaConstants.java)14
-rw-r--r--src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java (renamed from src/main/java/com/att/nsa/cambria/exception/DMaaPAccessDeniedException.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java (renamed from src/main/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java)16
-rw-r--r--src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java (renamed from src/main/java/com/att/nsa/cambria/exception/DMaaPErrorMessages.java)42
-rw-r--r--src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java (renamed from src/main/java/com/att/nsa/cambria/exception/DMaaPResponseCode.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java (renamed from src/main/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapper.java)13
-rw-r--r--src/main/java/com/att/dmf/mr/exception/ErrorResponse.java (renamed from src/main/java/com/att/nsa/cambria/exception/ErrorResponse.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java (renamed from src/main/java/com/att/nsa/cambria/listener/CambriaServletContextListener.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java (renamed from src/main/java/com/att/nsa/cambria/listener/DME2EndPointLoader.java)12
-rw-r--r--src/main/java/com/att/dmf/mr/metabroker/Broker.java (renamed from src/main/java/com/att/nsa/cambria/metabroker/Broker.java)16
-rw-r--r--src/main/java/com/att/dmf/mr/metabroker/Broker1.java95
-rw-r--r--src/main/java/com/att/dmf/mr/metabroker/Topic.java (renamed from src/main/java/com/att/nsa/cambria/metabroker/Topic.java)10
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java (renamed from src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java (renamed from src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaClient.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java (renamed from src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaConsumer.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java (renamed from src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisher.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java (renamed from src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java)10
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java (renamed from src/main/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java)28
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java (renamed from src/main/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java)24
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/impl/Clock.java (renamed from src/main/java/com/att/nsa/cambria/metrics/publisher/impl/Clock.java)14
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java (renamed from src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java)19
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java (renamed from src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java)33
-rw-r--r--src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java (renamed from src/main/java/com/att/nsa/cambria/resources/CambriaEventSet.java)22
-rw-r--r--src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java (renamed from src/main/java/com/att/nsa/cambria/resources/CambriaOutboundEventStream.java)383
-rw-r--r--src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java (renamed from src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java)24
-rw-r--r--src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java (renamed from src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java)18
-rw-r--r--src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java (renamed from src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReader.java)16
-rw-r--r--src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java (renamed from src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java)21
-rw-r--r--src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java (renamed from src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticator.java)10
-rw-r--r--src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java (renamed from src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java)40
-rw-r--r--src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java (renamed from src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticator.java)10
-rw-r--r--src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java (renamed from src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImpl.java)17
-rw-r--r--src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java (renamed from src/main/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java)18
-rw-r--r--src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java (renamed from src/main/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java)14
-rw-r--r--src/main/java/com/att/dmf/mr/service/AdminService.java (renamed from src/main/java/com/att/nsa/cambria/service/AdminService.java)10
-rw-r--r--src/main/java/com/att/dmf/mr/service/ApiKeysService.java (renamed from src/main/java/com/att/nsa/cambria/service/ApiKeysService.java)12
-rw-r--r--src/main/java/com/att/dmf/mr/service/EventsService.java (renamed from src/main/java/com/att/nsa/cambria/service/EventsService.java)18
-rw-r--r--src/main/java/com/att/dmf/mr/service/MMService.java (renamed from src/main/java/com/att/nsa/cambria/service/MMService.java)18
-rw-r--r--src/main/java/com/att/dmf/mr/service/MetricsService.java (renamed from src/main/java/com/att/nsa/cambria/service/MetricsService.java)14
-rw-r--r--src/main/java/com/att/dmf/mr/service/TopicService.java (renamed from src/main/java/com/att/nsa/cambria/service/TopicService.java)18
-rw-r--r--src/main/java/com/att/dmf/mr/service/TransactionService.java (renamed from src/main/java/com/att/nsa/cambria/service/TransactionService.java)12
-rw-r--r--src/main/java/com/att/dmf/mr/service/UIService.java (renamed from src/main/java/com/att/nsa/cambria/service/UIService.java)21
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java (renamed from src/main/java/com/att/nsa/cambria/service/impl/AdminServiceImpl.java)62
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java (renamed from src/main/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImpl.java)38
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java (renamed from src/main/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImpl.java)16
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java867
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java (renamed from src/main/java/com/att/nsa/cambria/service/impl/MMServiceImpl.java)98
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java (renamed from src/main/java/com/att/nsa/cambria/service/impl/MetricsServiceImpl.java)22
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java (renamed from src/main/java/com/att/nsa/cambria/service/impl/TopicServiceImpl.java)512
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java (renamed from src/main/java/com/att/nsa/cambria/service/impl/TransactionServiceImpl.java)14
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java (renamed from src/main/java/com/att/nsa/cambria/service/impl/UIServiceImpl.java)29
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java (renamed from src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionFactory.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java (renamed from src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObj.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java (renamed from src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObjDB.java)10
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/TransactionObj.java (renamed from src/main/java/com/att/nsa/cambria/transaction/TransactionObj.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/TrnRequest.java (renamed from src/main/java/com/att/nsa/cambria/transaction/TrnRequest.java)8
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java (renamed from src/main/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java)14
-rw-r--r--src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java (renamed from src/main/java/com/att/nsa/cambria/utils/ConfigurationReader.java)69
-rw-r--r--src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java (renamed from src/main/java/com/att/nsa/cambria/utils/DMaaPCuratorFactory.java)11
-rw-r--r--src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java (renamed from src/main/java/com/att/nsa/cambria/utils/DMaaPResponseBuilder.java)40
-rw-r--r--src/main/java/com/att/dmf/mr/utils/Emailer.java (renamed from src/main/java/com/att/nsa/cambria/utils/Emailer.java)11
-rw-r--r--src/main/java/com/att/dmf/mr/utils/PropertyReader.java (renamed from src/main/java/com/att/nsa/cambria/utils/PropertyReader.java)16
-rw-r--r--src/main/java/com/att/dmf/mr/utils/Utils.java (renamed from src/main/java/com/att/nsa/cambria/utils/Utils.java)10
-rw-r--r--src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java (renamed from src/main/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java)30
-rw-r--r--src/main/java/com/att/mr/filter/ContentLengthFilter.java (renamed from src/main/java/com/att/nsa/filter/ContentLengthFilter.java)20
-rw-r--r--src/main/java/com/att/mr/filter/DefaultLength.java (renamed from src/main/java/com/att/nsa/filter/DefaultLength.java)6
-rw-r--r--src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java245
-rw-r--r--src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java323
-rw-r--r--src/main/java/com/att/nsa/cambria/beans/ZkClientFactory.java36
-rw-r--r--src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java795
100 files changed, 4393 insertions, 2805 deletions
diff --git a/src/main/java/com/att/nsa/cambria/CambriaApiException.java b/src/main/java/com/att/dmf/mr/CambriaApiException.java
index 1f8f705..84dd32c 100644
--- a/src/main/java/com/att/nsa/cambria/CambriaApiException.java
+++ b/src/main/java/com/att/dmf/mr/CambriaApiException.java
@@ -8,23 +8,23 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria;
+package com.att.dmf.mr;
import org.json.JSONObject;
+import com.att.dmf.mr.exception.ErrorResponse;
import com.att.nsa.apiServer.NsaAppException;
-import com.att.nsa.cambria.exception.ErrorResponse;
public class CambriaApiException extends NsaAppException
{
diff --git a/src/main/java/com/att/nsa/cambria/CambriaApiVersionInfo.java b/src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java
index 117f8ab..f0c57b5 100644
--- a/src/main/java/com/att/nsa/cambria/CambriaApiVersionInfo.java
+++ b/src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria;
+package com.att.dmf.mr;
import java.io.IOException;
import java.io.InputStream;
@@ -30,7 +30,7 @@ import com.att.eelf.configuration.EELFManager;
/**
* CambriaApiVersionInfo will provide the version of cambria code
*
- * @author author
+ * @author peter
*
*/
public class CambriaApiVersionInfo {
diff --git a/src/main/java/com/att/nsa/cambria/backends/Consumer.java b/src/main/java/com/att/dmf/mr/backends/Consumer.java
index d4946ba..279d48b 100644
--- a/src/main/java/com/att/nsa/cambria/backends/Consumer.java
+++ b/src/main/java/com/att/dmf/mr/backends/Consumer.java
@@ -8,28 +8,30 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends;
+package com.att.dmf.mr.backends;
+
+import java.util.ArrayList;
/**
* A consumer interface. Consumers pull the next message from a given topic.
- * @author author
+ * @author peter
*/
public interface Consumer
{
/**
* A message interface provide the offset and message
- * @author author
+ * @author nilanjana.maity
*
*/
public interface Message
@@ -75,12 +77,14 @@ public interface Consumer
* @param atOffset start with the next message at or after atOffset. -1 means next from last request
* @return the next message, or null if none are waiting
*/
+// Message nextMessage ( long atOffset );
-
+ //Message nextMessage (ArrayList cl);
/**
* Close/clean up this consumer
+ * @return
*/
- void close();
+ boolean close();
/**
* Commit the offset of the last consumed message
@@ -93,4 +97,10 @@ public interface Consumer
* @return offset
*/
long getOffset();
+
+ void setOffset(long offset);
+
+ //public org.apache.kafka.clients.consumer.KafkaConsumer getConsumer();
+
+
}
diff --git a/src/main/java/com/att/nsa/cambria/backends/ConsumerFactory.java b/src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java
index 1597c07..0b684fe 100644
--- a/src/main/java/com/att/nsa/cambria/backends/ConsumerFactory.java
+++ b/src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java
@@ -8,25 +8,28 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends;
+package com.att.dmf.mr.backends;
import java.util.Collection;
+import java.util.HashMap;
+
+import com.att.dmf.mr.CambriaApiException;
/**
* This is the factory class to instantiate the consumer
*
- * @author author
+ * @author nilanjana.maity
*
*/
@@ -37,7 +40,7 @@ public interface ConsumerFactory {
/**
* User defined exception for Unavailable Exception
*
- * @author author
+ * @author nilanjana.maity
*
*/
public class UnavailableException extends Exception {
@@ -87,8 +90,8 @@ public interface ConsumerFactory {
* @return
* @throws UnavailableException
*/
- public Consumer getConsumerFor(String topic, String consumerGroupId,
- String clientId, int timeoutMs) throws UnavailableException;
+ //public Consumer getConsumerFor(String topic, String consumerGroupId,
+ // String clientId, int timeoutMs) throws UnavailableException;
/**
* For factories that employ a caching mechanism, this allows callers to
@@ -107,4 +110,10 @@ public interface ConsumerFactory {
* @return a collection of consumers
*/
public Collection<? extends Consumer> getConsumers();
+
+ public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException;
+ public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException;
+
+
+
}
diff --git a/src/main/java/com/att/nsa/cambria/backends/MetricsSet.java b/src/main/java/com/att/dmf/mr/backends/MetricsSet.java
index ce104ac..de665b8 100644
--- a/src/main/java/com/att/nsa/cambria/backends/MetricsSet.java
+++ b/src/main/java/com/att/dmf/mr/backends/MetricsSet.java
@@ -8,23 +8,23 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends;
+package com.att.dmf.mr.backends;
import com.att.nsa.metrics.CdmMetricsRegistry;
/**
* This interface will help to generate metrics
- * @author author
+ * @author nilanjana.maity
*
*/
public interface MetricsSet extends CdmMetricsRegistry{
diff --git a/src/main/java/com/att/nsa/cambria/backends/Publisher.java b/src/main/java/com/att/dmf/mr/backends/Publisher.java
index 696e78f..8ff6ce9 100644
--- a/src/main/java/com/att/nsa/cambria/backends/Publisher.java
+++ b/src/main/java/com/att/dmf/mr/backends/Publisher.java
@@ -8,36 +8,37 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends;
+package com.att.dmf.mr.backends;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import kafka.producer.KeyedMessage;
+import org.apache.kafka.clients.producer.ProducerRecord;
-import com.att.nsa.cambria.beans.LogDetails;
+import com.att.dmf.mr.beans.LogDetails;
+//import kafka.producer.KeyedMessage;
/**
* A publisher interface. Publishers receive messages and post them to a topic.
- * @author author
+ * @author peter
*/
public interface Publisher
{
/**
* A message interface. The message has a key and a body.
- * @author author
+ * @author peter
*/
public interface message
{
@@ -94,5 +95,7 @@ public interface Publisher
*/
public void sendMessages ( String topic, List<? extends message> msgs ) throws IOException;
- public void sendBatchMessage(String topic ,ArrayList<KeyedMessage<String,String>> kms) throws IOException;
+ //public void sendBatchMessage(String topic ,ArrayList<KeyedMessage<String,String>> kms) throws IOException;
+ public void sendBatchMessageNew(String topic ,ArrayList<ProducerRecord<String,String>> kms) throws IOException;
+ public void sendMessagesNew( String topic, List<? extends message> msgs ) throws IOException;
}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java b/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java
new file mode 100644
index 0000000..f7f5ba7
--- /dev/null
+++ b/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java
@@ -0,0 +1,403 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+*
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.dmf.mr.backends.kafka;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.KafkaException;
+
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.constants.CambriaConstants;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * A consumer instance that's created per-request. These are stateless so that
+ * clients can connect to this service as a proxy.
+ *
+ * @author Ram
+ *
+ */
+public class Kafka011Consumer implements Consumer {
+ private enum State {
+ OPENED, CLOSED
+ }
+
+ // @Autowired
+ // KafkaLiveLockAvoider kafkaLiveLockAvoider;
+ /**
+ * KafkaConsumer() is constructor. It has following 4 parameters:-
+ *
+ * @param topic
+ * @param group
+ * @param id
+ * @param cc
+ *
+ */
+
+ public Kafka011Consumer(String topic, String group, String id, KafkaConsumer<String, String> cc,
+ KafkaLiveLockAvoider2 klla) throws Exception {
+ fTopic = topic;
+ fGroup = group;
+ fId = id;
+ fCreateTimeMs = System.currentTimeMillis();
+ fLastTouch = fCreateTimeMs;
+ fPendingMsgs = new LinkedBlockingQueue<ConsumerRecord<String, String>>();
+ fLogTag = fGroup + "(" + fId + ")/" + fTopic;
+ offset = 0;
+ state = Kafka011Consumer.State.OPENED;
+ kConsumer = cc;
+ fKafkaLiveLockAvoider = klla;
+ synchronized (kConsumer) {
+ kConsumer.subscribe(Arrays.asList(topic));
+ }
+ }
+
+ private Consumer.Message makeMessage(final ConsumerRecord<String, String> msg) {
+ return new Consumer.Message() {
+ @Override
+ public long getOffset() {
+ offset = msg.offset();
+ return offset;
+ }
+
+ @Override
+ public String getMessage() {
+ return new String(msg.value());
+ }
+ };
+ }
+
+ @Override
+ public synchronized Consumer.Message nextMessage() {
+
+ try {
+ if (fPendingMsgs.size() > 0) {
+ return makeMessage(fPendingMsgs.take());
+ }
+ } catch (InterruptedException x) {
+ log.warn("After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage() + ")",
+ x);
+ }
+
+ Callable<Boolean> run = new Callable<Boolean>() {
+ @Override
+ public Boolean call() throws Exception {
+ try {
+ ConsumerRecords<String, String> records;
+ synchronized (kConsumer) {
+ records = kConsumer.poll(500);
+ }
+ for (ConsumerRecord<String, String> record : records) {
+ // foundMsgs = true;
+ fPendingMsgs.offer(record);
+ }
+
+ } catch (KafkaException x) {
+ log.debug(fLogTag + ": KafkaException " + x.getMessage());
+
+ } catch (java.lang.IllegalStateException | java.lang.IllegalArgumentException x) {
+ log.error(fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. "
+ + x.getMessage());
+
+ }
+
+ // return null;
+ return true;
+ }
+ };
+
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ RunnableFuture future = new FutureTask(run);
+ ExecutorService service = Executors.newSingleThreadExecutor();
+ service.execute(future);
+ try {
+ future.get(5, TimeUnit.SECONDS); // wait 1
+ // second
+ } catch (TimeoutException ex) {
+ // timed out. Try to stop the code if possible.
+ String apiNodeId = null;
+ try {
+ apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port;
+ } catch (UnknownHostException e1) {
+ // TODO Auto-generated catch block
+ e1.printStackTrace();
+ }
+
+ try {
+ if (fKafkaLiveLockAvoider != null)
+ fKafkaLiveLockAvoider.unlockConsumerGroup(apiNodeId, fTopic + "::" + fGroup);
+ } catch (Exception e) {
+ log.error("unlockConsumerGroup(" + apiNodeId + "," + fTopic + "::" + fGroup);
+ }
+
+ forcePollOnConsumer();
+ future.cancel(true);
+ } catch (Exception ex) {
+ // timed out. Try to stop the code if possible.
+ future.cancel(true);
+ }
+ service.shutdown();
+
+ return null;
+
+ }
+
+ /**
+ * getName() method returns string type value. returns 3 parameters in
+ * string:- fTopic,fGroup,fId
+ *
+ * @Override
+ */
+ public String getName() {
+ return fTopic + " : " + fGroup + " : " + fId;
+ }
+
+ /**
+ * getCreateTimeMs() method returns long type value. returns fCreateTimeMs
+ * variable value
+ *
+ * @Override
+ *
+ */
+ public long getCreateTimeMs() {
+ return fCreateTimeMs;
+ }
+
+ public org.apache.kafka.clients.consumer.KafkaConsumer<String, String> getConsumer() {
+ return kConsumer;
+ }
+
+ /**
+ * getLastAccessMs() method returns long type value. returns fLastTouch
+ * variable value
+ *
+ * @Override
+ *
+ */
+ public long getLastAccessMs() {
+ return fLastTouch;
+ }
+
+ /**
+ * getOffset() method returns long type value. returns offset variable value
+ *
+ * @Override
+ *
+ */
+ public long getOffset() {
+ return offset;
+ }
+
+ /**
+ * commit offsets commitOffsets() method will be called on closed of
+ * KafkaConsumer.
+ *
+ * @Override
+ *
+ *
+ * public void commitOffsets() { if (getState() ==
+ * KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called
+ * on closed KafkaConsumer " + getName()); return; }
+ * fConnector.commitOffsets(); }
+ */
+
+ /**
+ * updating fLastTouch with current time in ms
+ */
+ public void touch() {
+ fLastTouch = System.currentTimeMillis();
+ }
+
+ /**
+ * getLastTouch() method returns long type value. returns fLastTouch
+ * variable value
+ *
+ */
+ public long getLastTouch() {
+ return fLastTouch;
+ }
+
+ /**
+ * setting the kafkaConsumer state to closed
+ */
+ // public synchronized boolean close() {
+ public boolean close() {
+ if (getState() == Kafka011Consumer.State.CLOSED) {
+
+ log.error("close() called on closed KafkaConsumer " + getName());
+ return true;
+ }
+
+ // fConnector.shutdown();
+ boolean retVal = kafkaConnectorshuttask();
+ return retVal;
+
+ }
+
+ /* time out if the kafka shutdown fails for some reason */
+
+ private boolean kafkaConnectorshuttask() {
+ Callable<Boolean> run = new Callable<Boolean>() {
+ @Override
+ public Boolean call() throws Exception {
+
+ try {
+ // System.out.println("attempt to delete " + kConsumer);
+ kConsumer.close();
+
+ } catch (Exception e) {
+ log.info("@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
+ throw new Exception("@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
+ // return false;
+ }
+ log.info("Kafka connection closure with in 15 seconds by a Executors task");
+
+ return true;
+ }
+ };
+
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ RunnableFuture future = new FutureTask(run);
+ ExecutorService service = Executors.newSingleThreadExecutor();
+ service.execute(future);
+ try {
+ future.get(200, TimeUnit.SECONDS); // wait 1
+ // second
+ } catch (TimeoutException ex) {
+ // timed out. Try to stop the code if possible.
+ log.info("Timeout Occured - Kafka connection closure with in 300 seconds by a Executors task");
+ future.cancel(true);
+ setState(Kafka011Consumer.State.OPENED);
+ } catch (Exception ex) {
+ // timed out. Try to stop the code if possible.
+ log.error("Exception occured Occured - Kafka connection closure with in 300 seconds by a Executors task"
+ + ex);
+ future.cancel(true);
+ setState(Kafka011Consumer.State.OPENED);
+ return false;
+ }
+ service.shutdown();
+ setState(Kafka011Consumer.State.CLOSED);
+ return true;
+ }
+
+ public void forcePollOnConsumer() {
+ Kafka011ConsumerUtil.forcePollOnConsumer(fTopic, fGroup, fId);
+
+ }
+
+ /**
+ * getConsumerGroup() returns Consumer group
+ *
+ * @return
+ */
+ public String getConsumerGroup() {
+ return fGroup;
+ }
+
+ /**
+ * getConsumerId returns Consumer Id
+ *
+ * @return
+ */
+ public String getConsumerId() {
+ return fId;
+ }
+
+ /**
+ * getState returns kafkaconsumer state
+ *
+ * @return
+ */
+ private Kafka011Consumer.State getState() {
+ return this.state;
+ }
+
+ /**
+ * setState() sets the kafkaConsumer state
+ *
+ * @param state
+ */
+ private void setState(Kafka011Consumer.State state) {
+ this.state = state;
+ }
+
+ // private ConsumerConnector fConnector;
+ private final String fTopic;
+ private final String fGroup;
+ private final String fId;
+ private final String fLogTag;
+ // private final KafkaStream<byte[], byte[]> fStream;
+ private KafkaConsumer<String, String> kConsumer;
+ private long fCreateTimeMs;
+ private long fLastTouch;
+ private long offset;
+ private Kafka011Consumer.State state;
+ private KafkaLiveLockAvoider2 fKafkaLiveLockAvoider;
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011Consumer.class);
+ private final LinkedBlockingQueue<ConsumerRecord<String, String>> fPendingMsgs;
+ //private ArrayList<Kafka011Consumer> fconsumerList;
+ @Override
+ public void commitOffsets() {
+ if (getState() == Kafka011Consumer.State.CLOSED) {
+ log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
+ return;
+ }
+ kConsumer.commitSync();
+ // fConsumer.close();
+
+ }
+
+ @Override
+ public void setOffset(long offsetval) {
+ offset = offsetval;
+ }
+
+
+ public void setConsumerCache(KafkaConsumerCache cache) {
+ }
+
+ //@Override
+ //public Message nextMessage(ArrayList<?> l) {
+ // TODO Auto-generated method stub
+ //return null;
+ //}
+}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java b/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java
new file mode 100644
index 0000000..ea9407b
--- /dev/null
+++ b/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java
@@ -0,0 +1,126 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+*
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.dmf.mr.backends.kafka;
+
+import java.util.ArrayList;
+
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * A consumer Util class for force polling when a rebalance issue is anticipated
+ *
+ * @author Ram
+ *
+ */
+public class Kafka011ConsumerUtil {
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011ConsumerUtil.class);
+
+ /**
+ * @param fconsumercache
+ * @param fTopic
+ * @param fGroup
+ * @param fId
+ * @return
+ */
+ public static boolean forcePollOnConsumer(final String fTopic, final String fGroup, final String fId) {
+
+ Thread forcepollThread = new Thread(new Runnable() {
+ public void run() {
+ try {
+
+ ArrayList<Kafka011Consumer> kcsList = null;
+
+ kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(fTopic + "::" + fGroup + "::", fId);
+ if (null != kcsList) {
+ for (int counter = 0; counter < kcsList.size(); counter++) {
+
+ Kafka011Consumer kc1 = kcsList.get(counter);
+
+ try {
+ ConsumerRecords<String, String> recs = kc1.getConsumer().poll(0);
+ log.info("soft poll on " + kc1);
+ } catch (java.util.ConcurrentModificationException e) {
+ log.error("Error occurs for " + e);
+ }
+
+ }
+
+ }
+
+ } catch (Exception e) {
+ log.error("Failed and go to Exception block for " + fGroup + " " + e.getMessage());
+ e.printStackTrace();
+ }
+ }
+ });
+
+ forcepollThread.start();
+
+ return false;
+
+ }
+
+ /**
+ * @param fconsumercache
+ * @param group
+ * @return
+ */
+ public static boolean forcePollOnConsumer(final String group) {
+
+ Thread forcepollThread = new Thread(new Runnable() {
+ public void run() {
+ try {
+ ArrayList<Kafka011Consumer> kcsList = new ArrayList<Kafka011Consumer>();
+ kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(group);
+
+ if (null != kcsList) {
+
+ for (int counter = 0; counter < kcsList.size(); counter++) {
+
+ Kafka011Consumer kc1 = kcsList.get(counter);
+ log.info("soft poll on remote nodes " + kc1);
+ ConsumerRecords<String, String> recs = kc1.getConsumer().poll(0);
+ }
+
+ }
+
+ } catch (java.util.ConcurrentModificationException e) {
+ log.error("Error occurs for " + e);
+ } catch (Exception e) {
+ log.error("Failed and go to Exception block for " + group + " " + e.getMessage());
+ e.printStackTrace();
+ }
+ }
+ });
+
+ forcepollThread.start();
+ return false;
+
+ }
+
+}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt
new file mode 100644
index 0000000..dd6259f
--- /dev/null
+++ b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt
@@ -0,0 +1,386 @@
+package com.att.dmf.mr.backends.kafka;
+
+import java.util.Arrays;
+import java.util.Properties;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.common.KafkaException;
+
+import com.att.dmf.mr.backends.Consumer;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * A consumer instance that's created per-request. These are stateless so that
+ * clients can connect to this service as a proxy.
+ *
+ * @author peter
+ *
+ */
+public class KafkaConsumer implements Consumer {
+ private enum State {
+ OPENED, CLOSED
+ }
+
+ /**
+ * KafkaConsumer() is constructor. It has following 4 parameters:-
+ *
+ * @param topic
+ * @param group
+ * @param id
+ * @param cc
+ *
+ */
+
+ public KafkaConsumer(String topic, String group, String id, Properties prop) throws Exception {
+ fTopic = topic;
+ fGroup = group;
+ fId = id;
+ // fConnector = cc;
+
+ fCreateTimeMs = System.currentTimeMillis();
+ fLastTouch = fCreateTimeMs;
+ fPendingMsgs = new LinkedBlockingQueue<ConsumerRecord<String,String>> ();
+ fLogTag = fGroup + "(" + fId + ")/" + fTopic;
+ offset = 0;
+
+ state = KafkaConsumer.State.OPENED;
+
+ // final Map<String, Integer> topicCountMap = new HashMap<String,
+ // Integer>();
+ // topicCountMap.put(fTopic, 1);
+ // log.info(fLogTag +" kafka Consumer started at "
+ // +System.currentTimeMillis());
+ // final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
+ // fConnector.createMessageStreams(topicCountMap);
+ // final List<KafkaStream<byte[], byte[]>> streams =
+ // consumerMap.get(fTopic);
+
+ kConsumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(prop);
+ // System.out.println("I am in Consumer APP " + topic + "-- " +
+ // fConsumer);
+ kConsumer.subscribe(Arrays.asList(topic));
+ log.info(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
+ System.out.println("-----id " +id);
+
+
+ try { ConsumerRecords<String, String> records =
+ kConsumer.poll(500); System.out.println("---" +
+ records.count());
+
+ for (ConsumerRecord<String, String> record : records) {
+ System.out.printf("offset = %d, key = %s, value = %s",
+ record.offset(), record.key(), record.value()); String t =
+ record.value();
+
+ }
+ }catch(Exception e){
+ System.out.println( e);
+ }
+ System.out.println(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
+ kConsumer.commitSync();
+ // fConsumer.close();
+
+
+ /*
+ * ConsumerRecords<String, String> records = fConsumer.poll(500);
+ * System.out.println("---" + records.count());
+ *
+ * for (ConsumerRecord<String, String> record : records) {
+ * System.out.printf("offset = %d, key = %s, value = %s",
+ * record.offset(), record.key(), record.value()); String t =
+ * record.value();
+ *
+ * }
+ *
+ *
+ * fConsumer.commitSync(); fConsumer.close();
+ */
+
+ // fStream = streams.iterator().next();
+ }
+
+
+
+ private Consumer.Message makeMessage ( final ConsumerRecord<String,String> msg )
+ {
+ return new Consumer.Message()
+ {
+ @Override
+ public long getOffset ()
+ {
+ return msg.offset ();
+ }
+
+ @Override
+ public String getMessage ()
+ {
+ return new String ( msg.value () );
+ }
+ };
+ }
+
+ @Override
+ public synchronized Consumer.Message nextMessage ()
+ {
+
+ try
+ {
+ if ( fPendingMsgs.size () > 0 )
+ {
+ return makeMessage ( fPendingMsgs.take () );
+ }
+ }
+ catch ( InterruptedException x )
+ {
+ log.warn ( "After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage () + ")", x );
+ }
+
+
+ try
+ {
+ boolean foundMsgs = false;
+ System.out.println("entering into pollingWWWWWWWWWWWWWWWWW");
+ final ConsumerRecords<String,String> records = kConsumer.poll ( 100 );
+ System.out.println("polling doneXXXXXXXXXXXXXXXXXXXXXXXXXXX....");
+ for ( ConsumerRecord<String,String> record : records )
+ {
+ foundMsgs = true;
+ fPendingMsgs.offer ( record );
+ }
+
+ }
+ catch ( KafkaException x )
+ {
+ log.debug ( fLogTag + ": KafkaException " + x.getMessage () );
+
+ }
+ catch ( java.lang.IllegalStateException | java.lang.IllegalArgumentException x )
+ {
+ log.error ( fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. " + x.getMessage () );
+
+ }
+
+ return null;
+ }
+
+
+
+ /**
+ * getName() method returns string type value. returns 3 parameters in
+ * string:- fTopic,fGroup,fId
+ *
+ * @Override
+ */
+ public String getName() {
+ return fTopic + " : " + fGroup + " : " + fId;
+ }
+
+ /**
+ * getCreateTimeMs() method returns long type value. returns fCreateTimeMs
+ * variable value
+ *
+ * @Override
+ *
+ */
+ public long getCreateTimeMs() {
+ return fCreateTimeMs;
+ }
+
+ public org.apache.kafka.clients.consumer.KafkaConsumer getConsumer() {
+ return kConsumer;
+ }
+
+ /**
+ * getLastAccessMs() method returns long type value. returns fLastTouch
+ * variable value
+ *
+ * @Override
+ *
+ */
+ public long getLastAccessMs() {
+ return fLastTouch;
+ }
+
+
+
+ /**
+ * getOffset() method returns long type value. returns offset variable value
+ *
+ * @Override
+ *
+ */
+ public long getOffset() {
+ return offset;
+ }
+
+ /**
+ * commit offsets commitOffsets() method will be called on closed of
+ * KafkaConsumer.
+ *
+ * @Override
+ *
+ *
+ * public void commitOffsets() { if (getState() ==
+ * KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called
+ * on closed KafkaConsumer " + getName()); return; }
+ * fConnector.commitOffsets(); }
+ */
+
+ /**
+ * updating fLastTouch with current time in ms
+ */
+ public void touch() {
+ fLastTouch = System.currentTimeMillis();
+ }
+
+ /**
+ * getLastTouch() method returns long type value. returns fLastTouch
+ * variable value
+ *
+ */
+ public long getLastTouch() {
+ return fLastTouch;
+ }
+
+ /**
+ * setting the kafkaConsumer state to closed
+ */
+ public synchronized boolean close() {
+
+ if (getState() == KafkaConsumer.State.CLOSED) {
+
+ log.warn("close() called on closed KafkaConsumer " + getName());
+ return true;
+ }
+
+ setState(KafkaConsumer.State.CLOSED);
+ // fConnector.shutdown();
+ boolean retVal = kafkaConnectorshuttask();
+ return retVal;
+
+ }
+
+ /* time out if the kafka shutdown fails for some reason */
+
+ private boolean kafkaConnectorshuttask() {
+ Callable<Boolean> run = new Callable<Boolean>() {
+ @Override
+ public Boolean call() throws Exception {
+ // your code to be timed
+ try {
+ System.out.println("consumer closing....." + kConsumer);
+ kConsumer.close();
+ } catch (Exception e) {
+ log.info("@@@@@@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
+ }
+ log.info("Kafka connection closure with in 15 seconds by a Executors task");
+ return true;
+ }
+ };
+
+ RunnableFuture future = new FutureTask(run);
+ ExecutorService service = Executors.newSingleThreadExecutor();
+ service.execute(future);
+ Boolean result = null;
+ try {
+ result = (Boolean) future.get(15, TimeUnit.SECONDS); // wait 1
+ // second
+ } catch (TimeoutException ex) {
+ // timed out. Try to stop the code if possible.
+ log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task");
+ future.cancel(true);
+ } catch (Exception ex) {
+ // timed out. Try to stop the code if possible.
+ log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task" + ex);
+ future.cancel(true);
+ return false;
+ }
+ service.shutdown();
+ return true;
+ }
+
+ /**
+ * getConsumerGroup() returns Consumer group
+ *
+ * @return
+ */
+ public String getConsumerGroup() {
+ return fGroup;
+ }
+
+ /**
+ * getConsumerId returns Consumer Id
+ *
+ * @return
+ */
+ public String getConsumerId() {
+ return fId;
+ }
+
+ /**
+ * getState returns kafkaconsumer state
+ *
+ * @return
+ */
+ private KafkaConsumer.State getState() {
+ return this.state;
+ }
+
+ /**
+ * setState() sets the kafkaConsumer state
+ *
+ * @param state
+ */
+ private void setState(KafkaConsumer.State state) {
+ this.state = state;
+ }
+
+ // private ConsumerConnector fConnector;
+ private final String fTopic;
+ private final String fGroup;
+ private final String fId;
+ private final String fLogTag;
+ // private final KafkaStream<byte[], byte[]> fStream;
+ private final org.apache.kafka.clients.consumer.KafkaConsumer<String, String> kConsumer;
+ private long fCreateTimeMs;
+ private long fLastTouch;
+ private long offset;
+ private KafkaConsumer.State state;
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class);
+ private final LinkedBlockingQueue<ConsumerRecord<String,String>> fPendingMsgs;
+ // private static final Logger log =
+ // LoggerFactory.getLogger(KafkaConsumer.class);
+
+ @Override
+ public void commitOffsets() {
+ if (getState() == KafkaConsumer.State.CLOSED) {
+ log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
+ return;
+ }
+ kConsumer.commitSync();
+ // fConsumer.close();
+
+ }
+
+
+
+ @Override
+ public void setOffset(long offsetval) {
+ // TODO Auto-generated method stub
+ offset = offsetval;
+ }
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCache.java b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java
index 9bd67d1..4340cae 100644
--- a/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCache.java
+++ b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java
@@ -8,21 +8,24 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends.kafka;
+package com.att.dmf.mr.backends.kafka;
import java.io.IOException;
+import java.net.InetAddress;
+import java.util.ArrayList;
import java.util.Collection;
+import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
@@ -31,6 +34,8 @@ import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
+import javax.annotation.Resource;
+
import org.I0Itec.zkclient.exception.ZkException;
import org.I0Itec.zkclient.exception.ZkInterruptedException;
import org.apache.curator.framework.CuratorFramework;
@@ -46,26 +51,40 @@ import org.apache.curator.utils.ZKPaths;
import org.apache.http.annotation.NotThreadSafe;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.NoNodeException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.annotation.ComponentScan;
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.exception.DMaaPErrorMessages;
+import com.att.dmf.mr.utils.ConfigurationReader;
+//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.utils.ConfigurationReader;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.metrics.CdmTimer;
/**
* @NotThreadSafe but expected to be used within KafkaConsumerFactory, which
* must be
- * @author author
+ * @author peter
*
*/
@NotThreadSafe
public class KafkaConsumerCache {
+ private static KafkaConsumerCache kafkaconscache = null;
+
+ public static KafkaConsumerCache getInstance() {
+ if (kafkaconscache == null)
+ kafkaconscache = new KafkaConsumerCache();
+
+ return kafkaconscache;
+ }
+
private static final String kSetting_ConsumerHandoverWaitMs = "cambria.consumer.cache.handoverWaitMs";
private static final int kDefault_ConsumerHandoverWaitMs = 500;
@@ -82,7 +101,7 @@ public class KafkaConsumerCache {
// the server at least every 30 seconds, timing out after 2 minutes should
// be okay.
// FIXME: consider allowing the client to specify its expected call rate?
- private static final long kDefault_MustTouchEveryMs = (long)1000 * 60 * 2;
+ private static final long kDefault_MustTouchEveryMs = 1000 * 60 * 2;
// check for expirations pretty regularly
private static final long kDefault_SweepEverySeconds = 15;
@@ -91,10 +110,20 @@ public class KafkaConsumerCache {
NOT_STARTED, CONNECTED, DISCONNECTED, SUSPENDED
}
+ // @Qualifier("kafkalockavoid")
+
+ // @Resource
+ // @Qualifier("kafkalockavoid")
+ // KafkaLiveLockAvoider2 kafkaLiveLockAvoider;
+
+ @Autowired
+ private DMaaPErrorMessages errorMessages;
+
+ // KafkaLiveLockAvoider kafkaLiveLockAvoider = new KafkaLiveLockAvoider();
/**
* User defined exception class for kafka consumer cache
*
- * @author author
+ * @author nilanjana.maity
*
*/
public class KafkaConsumerCacheException extends Exception {
@@ -126,29 +155,27 @@ public class KafkaConsumerCache {
* @param s
* @param metrics
*/
- public KafkaConsumerCache(String apiId, MetricsSet metrics) {
-
- if (apiId == null) {
- throw new IllegalArgumentException("API Node ID must be specified.");
- }
+ public KafkaConsumerCache() {
- fApiId = apiId;
- // fSettings = s;
- fMetrics = metrics;
- String strkSetting_ZkBasePath= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_ZkBasePath);
- if(null==strkSetting_ZkBasePath)strkSetting_ZkBasePath = kDefault_ZkBasePath;
+ String strkSetting_ZkBasePath = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ kSetting_ZkBasePath);
+ if (null == strkSetting_ZkBasePath)
+ strkSetting_ZkBasePath = kDefault_ZkBasePath;
fBaseZkPath = strkSetting_ZkBasePath;
- fConsumers = new ConcurrentHashMap<String, KafkaConsumer>();
+ fConsumers = new ConcurrentHashMap<String, Kafka011Consumer>();
fSweepScheduler = Executors.newScheduledThreadPool(1);
curatorConsumerCache = null;
status = Status.NOT_STARTED;
+ // Watcher for consumer rebalancing across nodes. Kafka011 rebalancing
+ // work around
listener = new ConnectionStateListener() {
public void stateChanged(CuratorFramework client, ConnectionState newState) {
if (newState == ConnectionState.LOST) {
+
log.info("ZooKeeper connection expired");
handleConnectionLoss();
} else if (newState == ConnectionState.READ_ONLY) {
@@ -175,33 +202,32 @@ public class KafkaConsumerCache {
* @throws KafkaConsumerCacheException
*/
public void startCache(String mode, CuratorFramework curator) throws KafkaConsumerCacheException {
- try {
- // CuratorFramework curator = null;
+ if (fApiId == null) {
+ throw new IllegalArgumentException("API Node ID must be specified.");
+ }
+
+ try {
- // Changed the class from where we are initializing the curator
- // framework
- if (mode != null && mode.equals(CambriaConstants.CAMBRIA)) {
- curator = ConfigurationReader.getCurator();
- } else if (mode != null && mode.equals(CambriaConstants.DMAAP)) {
+ if (mode != null && mode.equals(CambriaConstants.DMAAP)) {
curator = getCuratorFramework(curator);
}
-
curator.getConnectionStateListenable().addListener(listener);
-
setStatus(Status.CONNECTED);
-
curatorConsumerCache = new PathChildrenCache(curator, fBaseZkPath, true);
curatorConsumerCache.start();
-
curatorConsumerCache.getListenable().addListener(new PathChildrenCacheListener() {
public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
switch (event.getType()) {
case CHILD_ADDED: {
- final String apiId = new String(event.getData().getData());
- final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
+ try {
+ final String apiId = new String(event.getData().getData());
+ final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
- log.info(apiId + " started consumer " + consumer);
+ log.info(apiId + " started consumer " + consumer);
+ } catch (Exception ex) {
+ log.info("#Error Occured during Adding child" + ex);
+ }
break;
}
case CHILD_UPDATED: {
@@ -209,8 +235,11 @@ public class KafkaConsumerCache {
final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
if (fConsumers.containsKey(consumer)) {
- log.info(apiId + " claimed consumer " + consumer + " from " + fApiId);
-
+ log.info(apiId + " claimed consumer " + consumer + " from " + fApiId
+ + " but wont hand over");
+ // Commented so that it dont give the connection
+ // until the active node is running for this client
+ // id.
dropClaimedConsumer(consumer);
}
@@ -220,12 +249,14 @@ public class KafkaConsumerCache {
final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
if (fConsumers.containsKey(consumer)) {
- log.info("Someone wanted consumer " + consumer + " gone; removing it from the cache");
+ log.info("Someone wanted consumer " + consumer
+ + " gone; but not removing it from the cache");
dropConsumer(consumer, false);
}
break;
}
+
default:
break;
}
@@ -236,21 +267,23 @@ public class KafkaConsumerCache {
EnsurePath ensurePath = new EnsurePath(fBaseZkPath);
ensurePath.ensure(curator.getZookeeperClient());
- //final long freq = fSettings.getLong(kSetting_SweepEverySeconds, kDefault_SweepEverySeconds);
+ // final long freq = fSettings.getLong(kSetting_SweepEverySeconds,
+ // kDefault_SweepEverySeconds);
long freq = kDefault_SweepEverySeconds;
- String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_SweepEverySeconds);
- if(null==strkSetting_SweepEverySeconds) {
- strkSetting_SweepEverySeconds = kDefault_SweepEverySeconds+"";
+ String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ kSetting_SweepEverySeconds);
+ if (null != strkSetting_SweepEverySeconds) {
+ freq = Long.parseLong(strkSetting_SweepEverySeconds);
}
-
- freq = Long.parseLong(strkSetting_SweepEverySeconds);
-
+
fSweepScheduler.scheduleAtFixedRate(new sweeper(), freq, freq, TimeUnit.SECONDS);
log.info("KafkaConsumerCache started");
log.info("sweeping cached clients every " + freq + " seconds");
} catch (ZkException e) {
+ log.error("@@@@@@ ZK Exception occured for " + e);
throw new KafkaConsumerCacheException(e);
} catch (Exception e) {
+ log.error("@@@@@@ Exception occured for " + e);
throw new KafkaConsumerCacheException(e);
}
}
@@ -270,7 +303,6 @@ public class KafkaConsumerCache {
} catch (InterruptedException e) {
// Ignore
log.error("error while setting curator framework :" + e.getMessage());
- Thread.currentThread().interrupt();
}
}
@@ -291,9 +323,9 @@ public class KafkaConsumerCache {
curatorConsumerCache.close();
log.info("Curator client closed");
} catch (ZkInterruptedException e) {
- log.error("Curator client close interrupted: " + e);
+ log.warn("Curator client close interrupted: " + e.getMessage());
} catch (IOException e) {
- log.error("Error while closing curator PathChildrenCache for KafkaConsumerCache" + e);
+ log.warn("Error while closing curator PathChildrenCache for KafkaConsumerCache" + e.getMessage());
}
curatorConsumerCache = null;
@@ -325,13 +357,13 @@ public class KafkaConsumerCache {
* @param clientId
* @return a consumer, or null
*/
- public KafkaConsumer getConsumerFor(String topic, String consumerGroupId, String clientId)
+ public Kafka011Consumer getConsumerFor(String topic, String consumerGroupId, String clientId)
throws KafkaConsumerCacheException {
if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
throw new KafkaConsumerCacheException("The cache service is unavailable.");
final String consumerKey = makeConsumerKey(topic, consumerGroupId, clientId);
- final KafkaConsumer kc = fConsumers.get(consumerKey);
+ final Kafka011Consumer kc = fConsumers.get(consumerKey);
if (kc != null) {
log.debug("Consumer cache hit for [" + consumerKey + "], last was at " + kc.getLastTouch());
@@ -346,6 +378,66 @@ public class KafkaConsumerCache {
}
/**
+ * Get a cached consumer by topic, group, and id, if it exists (and remains
+ * valid) In addition, this method waits for all other consumer caches in
+ * the cluster to release their ownership and delete their version of this
+ * consumer.
+ *
+ * @param topic
+ * @param consumerGroupId
+ * @param clientId
+ * @return a consumer, or null
+ */
+ public ArrayList<Kafka011Consumer> getConsumerListForCG(String topicgroup, String clientId)
+ throws KafkaConsumerCacheException {
+ if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
+ throw new KafkaConsumerCacheException("The cache service is unavailable.");
+ ArrayList<Kafka011Consumer> kcl = new ArrayList<Kafka011Consumer>();
+ // final String consumerKey = makeConsumerKey(topic, consumerGroupId,
+ // clientId);
+ Enumeration<String> strEnum = fConsumers.keys();
+ String consumerLocalKey = null;
+ while (strEnum.hasMoreElements()) {
+ consumerLocalKey = strEnum.nextElement();
+
+ if (consumerLocalKey.startsWith(topicgroup) && (!consumerLocalKey.endsWith("::" + clientId))) {
+
+ // System.out.println("consumer key returning from
+ // getConsumerListforCG +++++++++ " + consumerLocalKey
+ // + " " + fConsumers.get(consumerLocalKey));
+ kcl.add(fConsumers.get(consumerLocalKey));
+
+ }
+ }
+
+ return kcl;
+ }
+
+ public ArrayList<Kafka011Consumer> getConsumerListForCG(String group) throws KafkaConsumerCacheException {
+ if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
+ throw new KafkaConsumerCacheException("The cache service is unavailable.");
+ ArrayList<Kafka011Consumer> kcl = new ArrayList<Kafka011Consumer>();
+ // final String consumerKey = makeConsumerKey(topic, consumerGroupId,
+ // clientId);
+ Enumeration<String> strEnum = fConsumers.keys();
+ String consumerLocalKey = null;
+ while (strEnum.hasMoreElements()) {
+ consumerLocalKey = strEnum.nextElement();
+
+ if (consumerLocalKey.startsWith(group)) {
+
+ // System.out.println("consumer key returning from
+ // getConsumerListforCG +++++++++ " + consumerLocalKey
+ // + " " + fConsumers.get(consumerLocalKey));
+ kcl.add(fConsumers.get(consumerLocalKey));
+
+ }
+ }
+
+ return kcl;
+ }
+
+ /**
* Put a consumer into the cache by topic, group and ID
*
* @param topic
@@ -354,24 +446,28 @@ public class KafkaConsumerCache {
* @param consumer
* @throws KafkaConsumerCacheException
*/
- public void putConsumerFor(String topic, String consumerGroupId, String consumerId, KafkaConsumer consumer)
+ public void putConsumerFor(String topic, String consumerGroupId, String consumerId, Kafka011Consumer consumer)
throws KafkaConsumerCacheException {
if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
throw new KafkaConsumerCacheException("The cache service is unavailable.");
final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
fConsumers.put(consumerKey, consumer);
+
+ // String appId = "node-instance-"+i;
+
+ log.info("^@ Consumer Added to Cache Consumer Key" + consumerKey + " ApiId" + fApiId);
}
public Collection<? extends Consumer> getConsumers() {
- return new LinkedList<KafkaConsumer>(fConsumers.values());
+ return new LinkedList<Kafka011Consumer>(fConsumers.values());
}
/**
* This method is to drop all the consumer
*/
public void dropAllConsumers() {
- for (Entry<String, KafkaConsumer> entry : fConsumers.entrySet()) {
+ for (Entry<String, Kafka011Consumer> entry : fConsumers.entrySet()) {
dropConsumer(entry.getKey(), true);
}
@@ -396,18 +492,33 @@ public class KafkaConsumerCache {
}
// First, drop this consumer from our cache
- dropConsumer(key, true);
-
+ boolean isdrop = dropConsumer(key, true);
+ if (!isdrop) {
+ return;
+ }
final CuratorFramework curator = ConfigurationReader.getCurator();
try {
curator.delete().guaranteed().forPath(fBaseZkPath + "/" + key);
+ log.info(" ^ deleted " + fBaseZkPath + "/" + key);
} catch (NoNodeException e) {
- log.error("Exception at : " + e);
+ log.warn("A consumer was deleted from " + fApiId
+ + "'s cache, but no Cambria API node had ownership of it in ZooKeeper");
} catch (Exception e) {
- log.error("Unexpected exception while deleting consumer: " + e);
+ log.debug("Unexpected exception while deleting consumer: " + e.getMessage());
+ log.info(" %%%%%%@# Unexpected exception while deleting consumer: " + e.getMessage());
}
+ try {
+ int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
+ String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ kSetting_ConsumerHandoverWaitMs);
+ if (strkSetting_ConsumerHandoverWaitMs != null)
+ consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
+ Thread.sleep(consumerHandoverWaitMs);
+ } catch (InterruptedException e) {
+ // Ignore
+ }
log.info("Dropped " + key + " consumer due to timeout");
}
@@ -423,7 +534,7 @@ public class KafkaConsumerCache {
fMetrics.onKafkaConsumerClaimed();
log.info("Consumer [" + key + "] claimed by another node.");
}
-
+ log.info("^dropping claimed Kafka consumer " + key);
dropConsumer(key, false);
}
@@ -434,23 +545,38 @@ public class KafkaConsumerCache {
* @param key
* @param dueToTimeout
*/
- private void dropConsumer(String key, boolean dueToTimeout) {
- final KafkaConsumer kc = fConsumers.remove(key);
-
+ private boolean dropConsumer(String key, boolean dueToTimeout) {
+ final Kafka011Consumer kc = fConsumers.get(key);
+ log.info("closing Kafka consumer " + key + " object " + kc);
if (kc != null) {
- log.info("closing Kafka consumer " + key);
- kc.close();
+ // log.info("closing Kafka consumer " + key);
+ if (kc.close()) {
+ fConsumers.remove(key);
+
+ } else {
+ return false;
+ }
}
+ return true;
}
-// private final rrNvReadable fSettings;
- private final MetricsSet fMetrics;
+ // private final rrNvReadable fSettings;
+ private MetricsSet fMetrics;
private final String fBaseZkPath;
private final ScheduledExecutorService fSweepScheduler;
- private final String fApiId;
+ private String fApiId;
+
+ public void setfMetrics(final MetricsSet metrics) {
+ this.fMetrics = metrics;
+ }
+
+ public void setfApiId(final String id) {
+ this.fApiId = id;
+ }
+
private final ConnectionStateListener listener;
- private ConcurrentHashMap<String, KafkaConsumer> fConsumers;
+ private ConcurrentHashMap<String, Kafka011Consumer> fConsumers;
private PathChildrenCache curatorConsumerCache;
private volatile Status status;
@@ -458,18 +584,19 @@ public class KafkaConsumerCache {
private void handleReconnection() {
log.info("Reading current cache data from ZK and synchronizing local cache");
-
final List<ChildData> cacheData = curatorConsumerCache.getCurrentData();
-
// Remove all the consumers in this API nodes cache that now belong to
// other API nodes.
for (ChildData cachedConsumer : cacheData) {
final String consumerId = ZKPaths.getNodeFromPath(cachedConsumer.getPath());
final String owningApiId = (cachedConsumer.getData() != null) ? new String(cachedConsumer.getData())
: "undefined";
-
if (!fApiId.equals(owningApiId)) {
- fConsumers.remove(consumerId);
+ fConsumers.remove(consumerId); // Commented to avoid removing
+ // the value cache hashmap but the lock still exists.
+ // This is not considered in kafka consumer Factory
+ log.info("@@@ Validating current cache data from ZK and synchronizing local cache" + owningApiId
+ + " removing " + consumerId);
}
}
@@ -492,8 +619,12 @@ public class KafkaConsumerCache {
}
private void closeAllCachedConsumers() {
- for (Entry<String, KafkaConsumer> entry : fConsumers.entrySet()) {
- entry.getValue().close();
+ for (Entry<String, Kafka011Consumer> entry : fConsumers.entrySet()) {
+ try {
+ entry.getValue().close();
+ } catch (Exception e) {
+ log.info("@@@@@@ Error occurd while closing Clearing All cache " + e);
+ }
}
}
@@ -513,22 +644,20 @@ public class KafkaConsumerCache {
throws KafkaConsumerCacheException {
// get a lock at <base>/<topic>::<consumerGroupId>::<consumerId>
final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
+ final CdmTimer timer = new CdmTimer(fMetrics, "CacheSignalOwnership");
try {
final String consumerPath = fBaseZkPath + "/" + consumerKey;
-
log.debug(fApiId + " attempting to claim ownership of consumer " + consumerKey);
-
final CuratorFramework curator = ConfigurationReader.getCurator();
try {
curator.setData().forPath(consumerPath, fApiId.getBytes());
} catch (KeeperException.NoNodeException e) {
- log.error(e.toString());
curator.create().creatingParentsIfNeeded().forPath(consumerPath, fApiId.getBytes());
}
-
log.info(fApiId + " successfully claimed ownership of consumer " + consumerKey);
+ timer.end();
} catch (Exception e) {
log.error(fApiId + " failed to claim ownership of consumer " + consumerKey);
throw new KafkaConsumerCacheException(e);
@@ -537,36 +666,37 @@ public class KafkaConsumerCache {
log.info("Backing off to give the Kafka broker time to clean up the ZK data for this consumer");
try {
- int kSetting_ConsumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
- String strkSetting_ConsumerHandoverWaitMs= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_ConsumerHandoverWaitMs+"");
- if(strkSetting_ConsumerHandoverWaitMs!=null) {
- kSetting_ConsumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
- }
- Thread.sleep(kSetting_ConsumerHandoverWaitMs);
- //Thread.sleep(fSettings.getInt(kSetting_ConsumerHandoverWaitMs, kDefault_ConsumerHandoverWaitMs));
+ int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
+ String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ kSetting_ConsumerHandoverWaitMs);
+ if (strkSetting_ConsumerHandoverWaitMs != null)
+ consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
+ Thread.sleep(consumerHandoverWaitMs);
} catch (InterruptedException e) {
- log.error(e.toString());
- Thread.currentThread().interrupt();
+ // Ignore
}
}
- private void sweep() {
+ public KafkaLiveLockAvoider2 getkafkaLiveLockAvoiderObj() {
+ return null;
+ }
+
+ public void sweep() {
final LinkedList<String> removals = new LinkedList<String>();
long mustTouchEveryMs = kDefault_MustTouchEveryMs;
- String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_TouchEveryMs);
- //if(null!=strkSetting_TouchEveryMs) strkSetting_TouchEveryMs = kDefault_MustTouchEveryMs+"";
- if(null!=strkSetting_TouchEveryMs)
- {
- mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs);
+ String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ kSetting_TouchEveryMs);
+ if (null != strkSetting_TouchEveryMs) {
+ mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs);
}
- //final long mustTouchEveryMs = fSettings.getLong(kSetting_TouchEveryMs, kDefault_MustTouchEveryMs);
+ // final long mustTouchEveryMs =
+ // fSettings.getLong(kSetting_TouchEveryMs, kDefault_MustTouchEveryMs);
final long oldestAllowedTouchMs = System.currentTimeMillis() - mustTouchEveryMs;
- for (Entry<String, KafkaConsumer> e : fConsumers.entrySet()) {
+ for (Entry<String, Kafka011Consumer> e : fConsumers.entrySet()) {
final long lastTouchMs = e.getValue().getLastTouch();
-
- log.debug("consumer " + e.getKey() + " last touched at " + lastTouchMs);
+ log.debug("consumer #####1" + e.getKey() + " " + lastTouchMs + " < " + oldestAllowedTouchMs);
if (lastTouchMs < oldestAllowedTouchMs) {
log.info("consumer " + e.getKey() + " has expired");
@@ -582,7 +712,7 @@ public class KafkaConsumerCache {
/**
* Creating a thread to run the sweep method
*
- * @author author
+ * @author nilanjana.maity
*
*/
private class sweeper implements Runnable {
@@ -614,5 +744,6 @@ public class KafkaConsumerCache {
}
private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumerCache.class);
- //private static final Logger log = LoggerFactory.getLogger(KafkaConsumerCache.class);
+ // private static final Logger log =
+ // LoggerFactory.getLogger(KafkaConsumerCache.class);
} \ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java
new file mode 100644
index 0000000..805701a
--- /dev/null
+++ b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java
@@ -0,0 +1,161 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+*
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.dmf.mr.backends.kafka;
+
+
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import javax.annotation.PostConstruct;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.locks.InterProcessMutex;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.Watcher;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.stereotype.Component;
+import org.springframework.stereotype.Service;
+
+//@ComponentScan(basePackages="com.att.dmf.mr.backends.kafka")
+@Component
+public class KafkaLiveLockAvoider2 {
+
+ public static final String ZNODE_ROOT = "/live-lock-avoid";
+ public static final String ZNODE_LOCKS = "/locks";
+ public static final String ZNODE_UNSTICK_TASKS ="/unstick-tasks";
+
+ private static String locksPath = ZNODE_ROOT+ZNODE_LOCKS;
+ private static String tasksPath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS;
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaLiveLockAvoider2.class.getName());
+
+ @Autowired
+ @Qualifier("curator")
+ private CuratorFramework curatorFramework;
+
+ @PostConstruct
+ public void init() {
+ System.out.println("Welcome......................................................................................");
+ try {
+ if (curatorFramework.checkExists().forPath(locksPath) == null) {
+ curatorFramework.create().creatingParentsIfNeeded().forPath(locksPath);
+ }
+ if (curatorFramework.checkExists().forPath(tasksPath) == null) {
+ curatorFramework.create().creatingParentsIfNeeded().forPath(tasksPath);
+ }
+
+ } catch (Exception e) {
+ //e.printStackTrace();
+ log.error("Error during creation of permanent Znodes under /live-lock-avoid ",e);
+
+ }
+
+
+ }
+ public void unlockConsumerGroup(String appId, String groupName) throws Exception {
+
+ log.info("Signalling unlock to all conumsers of in group [{}] now, " , groupName);
+
+ String fullLockPath = String.format("%s/%s", locksPath, groupName );
+ String fullTasksPath = null;
+
+ try {
+
+ //Use the Curator recipe for a Mutex lock, only one process can be broadcasting unlock instructions for a group
+ InterProcessMutex lock = new InterProcessMutex(curatorFramework, fullLockPath);
+ if ( lock.acquire(100L, TimeUnit.MILLISECONDS) )
+ {
+ try
+ {
+ List<String> taskNodes = curatorFramework.getChildren().forPath(tasksPath);
+ for (String taskNodeName : taskNodes) {
+ if(!taskNodeName.equals(appId)) {
+
+ fullTasksPath = String.format("%s/%s/%s", tasksPath, taskNodeName, groupName);
+ log.info("Writing groupName {} to path {}",groupName, fullTasksPath);
+
+
+ if(curatorFramework.checkExists().forPath(fullTasksPath) != null) {
+ curatorFramework.delete().forPath(fullTasksPath);
+ }
+ curatorFramework.create().withMode(CreateMode.EPHEMERAL).forPath(fullTasksPath);
+ }
+ }
+
+
+ }
+ finally
+ {
+ //Curator lock recipe requires a acquire() to be followed by a release()
+ lock.release();
+ }
+ }else {
+ log.info("Could not obtain the avoider lock, another process has the avoider lock? {}", !lock.isAcquiredInThisProcess() );
+ }
+
+
+ } catch (Exception e) {
+ log.error("Error setting up either lock ZNode {} or task ZNode {}",fullLockPath, fullTasksPath,e);
+ throw e;
+ }
+
+
+ }
+
+ /*
+ * Shoud be called once per MR server instance.
+ *
+ */
+ public void startNewWatcherForServer(String appId, LiveLockAvoidance avoidanceCallback) {
+ LockInstructionWatcher instructionWatcher = new LockInstructionWatcher(curatorFramework,avoidanceCallback,this);
+ assignNewProcessNode(appId, instructionWatcher);
+
+ }
+
+
+ protected void assignNewProcessNode(String appId, Watcher processNodeWatcher ) {
+
+ String taskHolderZnodePath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS+"/"+appId;
+ //Watcher processNodeWatcher = createWatcher();
+
+ try {
+
+ if(curatorFramework.checkExists().forPath(taskHolderZnodePath) != null) {
+ curatorFramework.delete().deletingChildrenIfNeeded().forPath(taskHolderZnodePath);
+
+ }
+ curatorFramework.create().forPath(taskHolderZnodePath);
+ //setup the watcher
+ curatorFramework.getChildren().usingWatcher(processNodeWatcher).inBackground().forPath(taskHolderZnodePath);
+ log.info("Done creating task holder and watcher for APP name: {}",appId);
+
+ } catch (Exception e) {
+ log.error("Could not add new processing node for name {}", appId, e);
+ }
+
+ }
+
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaPublisher.java b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java
index 42a6bb9..30209f0 100644
--- a/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaPublisher.java
+++ b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends.kafka;
+package com.att.dmf.mr.backends.kafka;
import java.io.IOException;
import java.util.ArrayList;
@@ -27,28 +27,32 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
-import kafka.common.FailedToSendMessageException;
-import kafka.javaapi.producer.Producer;
-import kafka.producer.KeyedMessage;
-import kafka.producer.ProducerConfig;
-
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.Producer;
+import org.apache.kafka.clients.producer.ProducerRecord;
import org.json.JSONException;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.dmf.mr.backends.Publisher;
+import com.att.dmf.mr.constants.CambriaConstants;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.cambria.backends.Publisher;
-import com.att.nsa.cambria.constants.CambriaConstants;
import com.att.nsa.drumlin.till.nv.rrNvReadable;
+//import kafka.FailedToSendMessageException;
+//import kafka.javaapi.producer.Producer;
+//import kafka.producer.KeyedMessage;
+//import kafka.producer.ProducerConfig;
+//import kafka.producer.KeyedMessage;
+
/**
* Sends raw JSON objects into Kafka.
*
* Could improve space: BSON rather than JSON?
*
- * @author author
+ * @author peter
*
*/
@@ -67,21 +71,32 @@ public class KafkaPublisher implements Publisher {
transferSetting(fSettings, props, "request.required.acks", "1");
transferSetting(fSettings, props, "message.send.max.retries", "5");
transferSetting(fSettings, props, "retry.backoff.ms", "150"); */
- String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
- System.out.println("kafkaConnUrl:- "+kafkaConnUrl);
- if(null==kafkaConnUrl){
-
- kafkaConnUrl="localhost:9092";
- }
- transferSetting( props, "metadata.broker.list", kafkaConnUrl);
+ String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
+ if(null==kafkaConnUrl){
+
+ kafkaConnUrl="localhost:9092";
+ }
+ //String jaaspath="C:/ATT/Apps/dmaapCodedmaap-framework/dmaap/bundleconfig-local/etc/appprops/kafka_pub_jaas.conf";
+ // props.put("bootstrap.servers", bootSever);
+ //System.setProperty("java.security.auth.login.config",jaaspath);
+
+ /*transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+ transferSetting( props, "security.protocol", "SASL_PLAINTEXT");
+ transferSetting( props, "sasl.mechanism", "PLAIN");*/
+ transferSetting( props, "bootstrap.servers",kafkaConnUrl);
+ //transferSetting( props, "metadata.broker.list", kafkaConnUrl);
transferSetting( props, "request.required.acks", "1");
transferSetting( props, "message.send.max.retries", "5");
transferSetting(props, "retry.backoff.ms", "150");
- props.put("serializer.class", "kafka.serializer.StringEncoder");
+ //props.put("serializer.class", "kafka.serializer.StringEncoder");
+
+ props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+ props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
- fConfig = new ProducerConfig(props);
- fProducer = new Producer<String, String>(fConfig);
+ //fConfig = new ProducerConfig(props);
+ //fProducer = new Producer<String, String>(fConfig);
+ fProducer = new KafkaProducer<>(props);
}
/**
@@ -92,19 +107,19 @@ public class KafkaPublisher implements Publisher {
* @throws JSONException
*/
@Override
- public void sendMessage(String topic, message msg) throws IOException, FailedToSendMessageException {
+ public void sendMessage(String topic, message msg) throws IOException{
final List<message> msgs = new LinkedList<message>();
msgs.add(msg);
sendMessages(topic, msgs);
}
- /**
+ /**
* method publishing batch messages
- *
+ * This method is commented from 0.8 to 0.11 upgrade
* @param topic
* @param kms
* throws IOException
- */
+ *
public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
try {
fProducer.send(kms);
@@ -114,8 +129,26 @@ public class KafkaPublisher implements Publisher {
throw new FailedToSendMessageException(excp.getMessage(), excp);
}
- }
+ } */
+
+ /*
+ * Kafka 11.0 Interface
+ * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList)
+ */
+ public void sendBatchMessageNew(String topic, ArrayList <ProducerRecord<String,String>> kms) throws IOException {
+ try {
+ for (ProducerRecord<String,String> km : kms) {
+ fProducer.send(km);
+ }
+
+ } catch (Exception excp) {
+ log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+ throw new IOException(excp.getMessage(), excp);
+ }
+
+ }
+
/**
* Send a set of messages. Each must have a "key" string value.
*
@@ -123,7 +156,7 @@ public class KafkaPublisher implements Publisher {
* @param msg
* @throws FailedToSendMessageException
* @throws JSONException
- */
+ *
@Override
public void sendMessages(String topic, List<? extends message> msgs)
throws IOException, FailedToSendMessageException {
@@ -141,11 +174,33 @@ public class KafkaPublisher implements Publisher {
log.error("Failed to send message(s) to topic [" + topic + "].", excp);
throw new FailedToSendMessageException(excp.getMessage(), excp);
}
- }
+ } */
+ @Override
+ public void sendMessagesNew(String topic, List<? extends message> msgs)
+ throws IOException {
+ log.info("sending " + msgs.size() + " events to [" + topic + "]");
+try{
+ final List<ProducerRecord<String, String>> kms = new ArrayList<ProducerRecord<String, String>>(msgs.size());
+ for (message o : msgs) {
+
+ final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, o.getKey(), o.toString());
+ //kms.add(data);
+
+ try {
+
+ fProducer.send(data);
+ } catch (Exception excp) {
+ log.error("Failed to send message(s) to topic [" + topic + "].", excp);
+ throw new Exception(excp.getMessage(), excp);
+ }
+ }
+
+ }catch(Exception e){}
+}
//private final rrNvReadable fSettings;
- private ProducerConfig fConfig;
+ //private ProducerConfig fConfig;
private Producer<String, String> fProducer;
/**
@@ -165,4 +220,16 @@ public class KafkaPublisher implements Publisher {
//private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
-}
+
+ @Override
+ public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
+ // TODO Auto-generated method stub
+
+ }
+
+ //@Override
+ //public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
+ // TODO Auto-generated method stub
+
+ //}
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MessageDropper.java b/src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java
index c49ac4f..a13ecea 100644
--- a/src/main/java/com/att/nsa/cambria/backends/memory/MessageDropper.java
+++ b/src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java
@@ -8,54 +8,38 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
+package com.att.dmf.mr.backends.kafka;
-import com.att.nsa.cambria.backends.Publisher;
-import kafka.producer.KeyedMessage;
/**
- * class is used to message publishing
- *
- * @author author
+ * Live Lock Avoidance interface. To be implemented by the main message router client
*
*/
-public class MessageDropper implements Publisher {
+public interface LiveLockAvoidance {
+
/**
- * publish single messages
- * param topic
- * param msg
+ * Gets the unique id
+ * @return the unique id for the Message Router server instance
*/
- @Override
- public void sendMessage(String topic, message msg) throws IOException {
- }
-
+ String getAppId();
+
+
/**
- * publish multiple messages
+ * Main callback to inform the local MR server instance that all consumers in a group need to soft poll
+ * @param groupName name of the Kafka consumer group needed a soft poll
*/
- @Override
- public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
- }
+ void handleRebalanceUnlock( String groupName);
- /**
- * publish batch messages
- */
- @Override
- public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
- }
}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java b/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java
new file mode 100644
index 0000000..5d3bc47
--- /dev/null
+++ b/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java
@@ -0,0 +1,100 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+*
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.dmf.mr.backends.kafka;
+
+import java.util.List;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.zookeeper.WatchedEvent;
+import org.apache.zookeeper.Watcher;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ *
+ * LockInstructionWatcher
+ * A package-private class used internally by the KafkaLiveLockAvoider.
+ *
+ * This class implements the zookeeper Watcher callback and listens for changes on child nodes changing.
+ * Each child node is actually a Kafka group name that needs to be soft polled. Deletion of the child nodes
+ * after soft poll unlocking is finished.
+ *
+ *
+ */
+public class LockInstructionWatcher implements Watcher {
+
+ private CuratorFramework curatorFramework;
+ private LiveLockAvoidance avoidanceCallback;
+ private KafkaLiveLockAvoider2 avoider;
+
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(LockInstructionWatcher.class.getName());
+
+
+ public LockInstructionWatcher(CuratorFramework curatorFramework, LiveLockAvoidance avoidanceCallback,
+ KafkaLiveLockAvoider2 avoider) {
+ super();
+ this.curatorFramework = curatorFramework;
+ this.avoidanceCallback = avoidanceCallback;
+ this.avoider = avoider;
+ }
+
+
+ @Override
+ public void process(WatchedEvent event) {
+
+ switch (event.getType()) {
+ case NodeChildrenChanged:
+
+
+ try {
+
+ log.info("node children changed at path: {}", event.getPath());
+ //String grpName = new String(curatorFramework.getData().forPath(event.getPath()));
+ List<String> children = curatorFramework.getChildren().forPath(event.getPath());
+
+ log.info("found children nodes prodcessing now");
+ for (String child : children) {
+ String childPath = String.format("%s/%s", event.getPath(), child);
+ log.info("Processing child task at node {}",childPath);
+ avoidanceCallback.handleRebalanceUnlock( child);
+ log.info("Deleting child task at node {}",childPath);
+ curatorFramework.delete().forPath(childPath);
+ }
+ //reset the watch with the avoider
+ avoider.assignNewProcessNode(avoidanceCallback.getAppId(), this);
+
+
+ } catch (Exception e) {
+ log.error("Error manipulating ZNode data in watcher",e);
+ }
+
+ break;
+
+ default:
+ log.info("Listner fired on path: {}, with event: {}", event.getPath(), event.getType());
+ break;
+ }
+ }
+
+
+}
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactory.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java
index f0982a9..0c34bfd 100644
--- a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryConsumerFactory.java
+++ b/src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java
@@ -8,27 +8,29 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
+package com.att.dmf.mr.backends.memory;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.HashMap;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.ConsumerFactory;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public class MemoryConsumerFactory implements ConsumerFactory
@@ -52,7 +54,7 @@ public class MemoryConsumerFactory implements ConsumerFactory
* @return Consumer
*/
@Override
- public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs )
+ public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs, String remotehost )
{
return new MemoryConsumer ( topic, consumerGroupId );
}
@@ -97,8 +99,9 @@ public class MemoryConsumerFactory implements ConsumerFactory
private long fLastAccessMs;
@Override
- public void close() {
+ public boolean close() {
//Nothing to close/clean up.
+ return true;
}
/**
*
@@ -135,6 +138,16 @@ public class MemoryConsumerFactory implements ConsumerFactory
{
return fLastAccessMs;
}
+
+
+
+ @Override
+ public void setOffset(long offset) {
+ // TODO Auto-generated method stub
+
+ }
+
+
}
@Override
@@ -157,4 +170,13 @@ public class MemoryConsumerFactory implements ConsumerFactory
{
return new ArrayList<MemoryConsumer> ();
}
+
+ @Override
+ public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs,
+ String remotehost) throws UnavailableException, CambriaApiException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+
}
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryMetaBroker.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java
index 87e59c2..22f0588 100644
--- a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryMetaBroker.java
+++ b/src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
+package com.att.dmf.mr.backends.memory;
import java.util.HashMap;
import java.util.LinkedList;
@@ -27,16 +27,15 @@ import java.util.List;
import java.util.Set;
import java.util.TreeSet;
-import com.att.nsa.cambria.metabroker.Broker;
-import com.att.nsa.cambria.metabroker.Topic;
+import com.att.dmf.mr.metabroker.Broker;
+import com.att.dmf.mr.metabroker.Topic;
import com.att.nsa.configs.ConfigDb;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
import com.att.nsa.security.NsaAcl;
import com.att.nsa.security.NsaApiKey;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public class MemoryMetaBroker implements Broker {
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueue.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java
index a0dc8b8..0629972 100644
--- a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueue.java
+++ b/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java
@@ -8,30 +8,30 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
+package com.att.dmf.mr.backends.memory;
import java.util.ArrayList;
import java.util.HashMap;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.Publisher.message;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.Publisher.message;
/**
* When broker type is memory, then this class is doing all the topic related
* operations
*
- * @author author
+ * @author anowarul.islam
*
*/
public class MemoryQueue {
@@ -120,7 +120,7 @@ public class MemoryQueue {
/**
* static inner class used to details about consumed messages
*
- * @author author
+ * @author anowarul.islam
*
*/
private static class msgInfo implements Consumer.Message {
@@ -152,7 +152,7 @@ public class MemoryQueue {
/**
*
- * @author author
+ * @author sneha.d.desai
*
* private LogBuffer class has synchronized push and read method
*/
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisher.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java
index d653f6e..2b43ed3 100644
--- a/src/main/java/com/att/nsa/cambria/backends/memory/MemoryQueuePublisher.java
+++ b/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java
@@ -8,31 +8,33 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
+package com.att.dmf.mr.backends.memory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import com.att.nsa.cambria.backends.Publisher;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import org.apache.kafka.clients.producer.ProducerRecord;
+
+import com.att.dmf.mr.backends.Publisher;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
+
-import kafka.producer.KeyedMessage;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public class MemoryQueuePublisher implements Publisher {
@@ -46,14 +48,6 @@ public class MemoryQueuePublisher implements Publisher {
fQueue = q;
}
- /**
- * sendBatchMessages
- *
- * @param topic
- * @param kms
- */
- public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
- }
/**
*
@@ -79,6 +73,14 @@ public class MemoryQueuePublisher implements Publisher {
* @param msgs
* @throws IOException
*/
+
+ public void sendBatchMessageNew(String topic, ArrayList<ProducerRecord<String, String>> kms) throws IOException {
+
+ }
+
+ public void sendMessagesNew(String topic, List<? extends message> msgs) throws IOException {
+ }
+
public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
for (message m : msgs) {
sendMessage(topic, m);
diff --git a/src/main/java/com/att/nsa/cambria/backends/memory/MessageLogger.java b/src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java
index 9ff8bd6..8e41c9f 100644
--- a/src/main/java/com/att/nsa/cambria/backends/memory/MessageLogger.java
+++ b/src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.backends.memory;
+package com.att.dmf.mr.backends.memory;
import java.io.File;
import java.io.FileNotFoundException;
@@ -28,14 +28,16 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import com.att.nsa.cambria.backends.Publisher;
+import org.apache.kafka.clients.producer.ProducerRecord;
-import kafka.producer.KeyedMessage;
+import com.att.dmf.mr.backends.Publisher;
+
+//import kafka.producer.KeyedMessage;
/**
* class used for logging perspective
*
- * @author author
+ * @author anowarul.islam
*
*/
public class MessageLogger implements Publisher {
@@ -73,13 +75,13 @@ public class MessageLogger implements Publisher {
* @param topic
* @param kms
* @throws IOException
- */
+
@Override
public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws
IOException {
}
-
+ */
private FileOutputStream fStream;
/**
@@ -98,4 +100,10 @@ public class MessageLogger implements Publisher {
fStream.write(msg.getMessage().getBytes());
fStream.write('\n');
}
+ public void sendBatchMessageNew(String topic, ArrayList<ProducerRecord<String, String>> kms) throws IOException {
+
+ }
+
+ public void sendMessagesNew(String topic, List<? extends message> msgs) throws IOException {
+ }
}
diff --git a/src/main/java/com/att/nsa/cambria/beans/ApiKeyBean.java b/src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java
index df4a2ed..4f0108f 100644
--- a/src/main/java/com/att/nsa/cambria/beans/ApiKeyBean.java
+++ b/src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
import java.io.Serializable;
@@ -28,7 +28,7 @@ import javax.xml.bind.annotation.XmlRootElement;
import com.att.nsa.drumlin.till.data.uniqueStringGenerator;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
@XmlRootElement
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiter.java b/src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java
index 4e9fc02..5f28367 100644
--- a/src/main/java/com/att/nsa/cambria/beans/DMaaPCambriaLimiter.java
+++ b/src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java
@@ -8,36 +8,38 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
+import java.util.Date;
import java.util.HashMap;
import java.util.concurrent.TimeUnit;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.exception.DMaaPResponseCode;
+import com.att.dmf.mr.exception.ErrorResponse;
+import com.att.dmf.mr.utils.Utils;
+
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Component;
-
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.exception.DMaaPResponseCode;
-import com.att.nsa.cambria.exception.ErrorResponse;
import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
import com.att.nsa.drumlin.till.nv.rrNvReadable;
import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
@@ -47,7 +49,7 @@ import com.att.nsa.metrics.impl.CdmRateTicker;
/**
* class provide rate information
*
- * @author author
+ * @author anowarul.islam
*
*/
@Component
@@ -62,13 +64,19 @@ public class DMaaPCambriaLimiter {
@Autowired
public DMaaPCambriaLimiter(@Qualifier("propertyReader") rrNvReadable settings)
throws missingReqdSetting, invalidSettingValue {
- fRateInfo = new HashMap<String, RateInfo>();
+ fRateInfo = new HashMap<String, RateInfo>();
+ fRateInfoCheck = new HashMap<String, RateInfoCheck>();
fMaxEmptyPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
CambriaConstants.kDefault_MaxEmptyPollsPerMinute);
+ fMaxPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxPollsPerMinute,
+ 30);
fWindowLengthMins = settings.getInt(CambriaConstants.kSetting_RateLimitWindowLength,
CambriaConstants.kDefault_RateLimitWindowLength);
- fSleepMs = settings.getLong(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
+ fSleepMs = settings.getLong(CambriaConstants.kSetting_SleepMsOnRateLimit,
CambriaConstants.kDefault_SleepMsOnRateLimit);
+ fSleepMs1 = settings.getLong(CambriaConstants.kSetting_SleepMsRealOnRateLimit,
+ 5000);
+
}
/**
@@ -90,8 +98,8 @@ public class DMaaPCambriaLimiter {
* Pass <= 0 to deactivate rate limiting.
* @param windowLengthMins
*/
- public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, int windowLengthMins) {
- this(maxEmptyPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute));
+ public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, double maxPollsPerMinute,int windowLengthMins) {
+ this(maxEmptyPollsPerMinute,maxPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute),getSleepMsForRate(1));
}
/**
@@ -102,11 +110,14 @@ public class DMaaPCambriaLimiter {
* @param sleepMs
* @param windowLengthMins
*/
- public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, int windowLengthMins, long sleepMs) {
+ public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute,double maxPollsPerMinute, int windowLengthMins, long sleepMs ,long sleepMS1) {
fRateInfo = new HashMap<String, RateInfo>();
+ fRateInfoCheck = new HashMap<String, RateInfoCheck>();
fMaxEmptyPollsPerMinute = Math.max(0, maxEmptyPollsPerMinute);
+ fMaxPollsPerMinute = Math.max(0, maxPollsPerMinute);
fWindowLengthMins = windowLengthMins;
fSleepMs = Math.max(0, sleepMs);
+ fSleepMs1 = Math.max(0, sleepMS1);
}
/**
@@ -118,40 +129,72 @@ public class DMaaPCambriaLimiter {
* @param clientId
* @throws CambriaApiException
*/
- public void onCall(String topic, String consumerGroup, String clientId) throws CambriaApiException {
+ public void onCall(String topic, String consumerGroup, String clientId,String remoteHost) throws CambriaApiException {
// do nothing if rate is configured 0 or less
if (fMaxEmptyPollsPerMinute <= 0) {
return;
}
-
- // setup rate info for this tuple
+ // setup rate info for this tuple
final RateInfo ri = getRateInfo(topic, consumerGroup, clientId);
-
final double rate = ri.onCall();
log.info(ri.getLabel() + ": " + rate + " empty replies/minute.");
-
if (rate > fMaxEmptyPollsPerMinute) {
try {
- log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxEmptyPollsPerMinute
+ log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxPollsPerMinute
+ ".");
if (fSleepMs > 0) {
log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs
+ " ms sleep, then responding in error.");
Thread.sleep(fSleepMs);
+
} else {
log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error.");
}
} catch (InterruptedException e) {
- log.error(e.toString());
- Thread.currentThread().interrupt();
+ // ignore
}
+
+
ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests,
DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(),
"This client is making too many requests. Please use a long poll "
- + "setting to decrease the number of requests that result in empty responses. ");
+ + "setting to decrease the number of requests that result in empty responses. ","",Utils.getFormattedDate(new Date()),topic,"","",consumerGroup+"/"+clientId,remoteHost);
+
log.info(errRes.toString());
throw new CambriaApiException(errRes);
}
+ /*if (fMaxPollsPerMinute <= 0) {
+ return;
+ }
+
+ final RateInfoCheck ric = getRateInfoCheck(topic, consumerGroup, clientId);
+ final double ratevalue = ric.onCall();
+ if (ratevalue > fMaxPollsPerMinute) {
+ try {
+ log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxEmptyPollsPerMinute
+ + ".");
+ if (fSleepMs1 > fMaxPollsPerMinute) {
+ log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs
+ + " ms sleep, then responding in error.");
+ Thread.sleep(fSleepMs1);
+ ric.reset();
+ } else {
+ log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error.");
+ }
+ } catch (InterruptedException e) {
+ // ignore
+ }
+
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests,
+ DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(),
+ "This client is making too many requests "
+ + ",decrease the number of requests. ","",Utils.getFormattedDate(new Date()),topic,"","",consumerGroup+"/"+clientId,remoteHost);
+
+ log.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }*/
+
}
/**
@@ -181,7 +224,7 @@ public class DMaaPCambriaLimiter {
fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES,
windowLengthMinutes, TimeUnit.MINUTES);
}
-
+
public String getLabel() {
return fLabel;
}
@@ -205,13 +248,57 @@ public class DMaaPCambriaLimiter {
private final String fLabel;
private final CdmRateTicker fCallRateSinceLastMsgSend;
}
+
+
+
+ private static class RateInfoCheck {
+ /**
+ * constructor initialzes
+ *
+ * @param label
+ * @param windowLengthMinutes
+ */
+ public RateInfoCheck(String label, int windowLengthMinutes) {
+ fLabel = label;
+ fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES,
+ windowLengthMinutes, TimeUnit.MINUTES);
+ }
+
+ public String getLabel() {
+ return fLabel;
+ }
+
+ /**
+ * CdmRateTicker is reset
+ */
+ public void reset() {
+ fCallRateSinceLastMsgSend.reset();
+ }
+
+ /**
+ *
+ * @return
+ */
+ public double onCall() {
+ fCallRateSinceLastMsgSend.tick();
+ return fCallRateSinceLastMsgSend.getRate();
+ }
+ private final String fLabel;
+ private final CdmRateTicker fCallRateSinceLastMsgSend;
+ }
+
+
private final HashMap<String, RateInfo> fRateInfo;
+ private final HashMap<String, RateInfoCheck> fRateInfoCheck;
private final double fMaxEmptyPollsPerMinute;
+ private final double fMaxPollsPerMinute;
private final int fWindowLengthMins;
private final long fSleepMs;
+ private final long fSleepMs1;
//private static final Logger log = LoggerFactory.getLogger(DMaaPCambriaLimiter.class);
private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPCambriaLimiter.class);
+
private RateInfo getRateInfo(String topic, String consumerGroup, String clientId) {
final String key = makeKey(topic, consumerGroup, clientId);
RateInfo ri = fRateInfo.get(key);
@@ -221,7 +308,21 @@ public class DMaaPCambriaLimiter {
}
return ri;
}
+
+
+ private RateInfoCheck getRateInfoCheck(String topic, String consumerGroup, String clientId) {
+ final String key = makeKey(topic, consumerGroup, clientId);
+ RateInfoCheck ri = fRateInfoCheck.get(key);
+ if (ri == null) {
+ ri = new RateInfoCheck(key, 1);
+ fRateInfoCheck.put(key, ri);
+ }
+ return ri;
+ }
+
+
+
private String makeKey(String topic, String group, String id) {
return topic + "::" + group + "::" + id;
}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPContext.java b/src/main/java/com/att/dmf/mr/beans/DMaaPContext.java
index 79a8e1f..a880877 100644
--- a/src/main/java/com/att/nsa/cambria/beans/DMaaPContext.java
+++ b/src/main/java/com/att/dmf/mr/beans/DMaaPContext.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
import java.text.SimpleDateFormat;
import java.util.Date;
@@ -28,12 +28,12 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
-import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.ConfigurationReader;
/**
* DMaaPContext provide and maintain all the configurations , Http request/response
* Session and consumer Request Time
- * @author author
+ * @author nilanjana.maity
*
*/
public class DMaaPContext {
@@ -85,7 +85,7 @@ public class DMaaPContext {
}
public ConfigurationReader getConfigReader() {
- return configReader;
+ return configReader;
}
public void setConfigReader(ConfigurationReader configReader) {
diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java
new file mode 100644
index 0000000..6fc0838
--- /dev/null
+++ b/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java
@@ -0,0 +1,365 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+*
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.dmf.mr.beans;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Properties;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.locks.InterProcessMutex;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.springframework.beans.factory.annotation.Qualifier;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.ConsumerFactory;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.backends.kafka.Kafka011Consumer;
+import com.att.dmf.mr.backends.kafka.Kafka011ConsumerUtil;
+import com.att.dmf.mr.backends.kafka.KafkaConsumerCache;
+import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import com.att.dmf.mr.backends.kafka.KafkaLiveLockAvoider2;
+import com.att.dmf.mr.backends.kafka.LiveLockAvoidance;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.utils.ConfigurationReader;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+
+/**
+ * @author nilanjana.maity
+ *
+ */
+public class DMaaPKafkaConsumerFactory implements ConsumerFactory {
+
+ // private static final Logger log = LoggerFactory
+ // .getLogger(DMaaPKafkaConsumerFactory.class);
+ private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class);
+ // @Autowired
+ // private KafkaLiveLockAvoider kafkaLiveLockAvoider = new
+ // KafkaLiveLockAvoider();
+
+ /**
+ * constructor initialization
+ *
+ * @param settings
+ * @param metrics
+ * @param curator
+ * @throws missingReqdSetting
+ * @throws KafkaConsumerCacheException
+ * @throws UnknownHostException
+ */
+
+ public DMaaPKafkaConsumerFactory(@Qualifier("dMaaPMetricsSet") MetricsSet metrics,
+ @Qualifier("curator") CuratorFramework curator,
+ @Qualifier("kafkalockavoid") KafkaLiveLockAvoider2 kafkaLiveLockAvoider)
+ throws missingReqdSetting, KafkaConsumerCacheException, UnknownHostException {
+
+ String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ CambriaConstants.kSetting_ApiNodeIdentifier);
+ if (apiNodeId == null) {
+
+ apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port;
+ }
+
+ log.info("This Cambria API Node identifies itself as [" + apiNodeId + "].");
+ final String mode = CambriaConstants.DMAAP;
+
+ fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "kafka.metadata.broker.list");
+ if (null == fkafkaBrokers) {
+
+ fkafkaBrokers = "localhost:9092";
+ }
+
+ boolean kSetting_EnableCache = kDefault_IsCacheEnabled;
+ String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "cambria.consumer.cache.enabled");
+ if (null != strkSetting_EnableCache)
+ kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
+
+ final boolean isCacheEnabled = kSetting_EnableCache;
+
+ // fCache = (isCacheEnabled) ? new KafkaConsumerCache(apiNodeId,
+ // metrics) : null;
+ fCache = null;
+ if (isCacheEnabled) {
+ fCache = KafkaConsumerCache.getInstance();
+
+ }
+ if (fCache != null) {
+ fCache.setfMetrics(metrics);
+ fCache.setfApiId(apiNodeId);
+ fCache.startCache(mode, curator);
+ if(kafkaLiveLockAvoider!=null){
+ kafkaLiveLockAvoider.startNewWatcherForServer(apiNodeId, makeAvoidanceCallback(apiNodeId));
+ fkafkaLiveLockAvoider = kafkaLiveLockAvoider;
+ }
+ }
+ }
+
+ /*
+ * getConsumerFor
+ *
+ * @see
+ * com.att.dmf.mr.backends.ConsumerFactory#getConsumerFor(java.lang.String,
+ * java.lang.String, java.lang.String, int, java.lang.String) This method is
+ * used by EventServiceImpl.getEvents() method to get a Kakfa consumer
+ * either from kafkaconsumer cache or create a new connection This also get
+ * the list of other consumer objects for the same consumer group and set to
+ * KafkaConsumer object. This list may be used during poll-rebalancing
+ * issue.
+ */
+ @Override
+ public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs,
+ String remotehost) throws UnavailableException, CambriaApiException {
+ Kafka011Consumer kc;
+
+ // To synchronize based on the consumer group.
+
+ Object syncObject = synchash.get(topic + consumerGroupName);
+ if (null == syncObject) {
+ syncObject = new Object();
+ synchash.put(topic + consumerGroupName, syncObject);
+ }
+
+ synchronized (syncObject) {
+ try {
+ kc = (fCache != null) ? fCache.getConsumerFor(topic, consumerGroupName, consumerId) : null; // consumerId
+
+ } catch (KafkaConsumerCacheException e) {
+ log.info("######@@@@### Error occured in Kafka Caching" + e + " " + topic + "::" + consumerGroupName
+ + "::" + consumerId);
+ log.error("####@@@@## Error occured in Kafka Caching" + e + " " + topic + "::" + consumerGroupName
+ + "::" + consumerId);
+ throw new UnavailableException(e);
+ }
+
+ // Ideally if cache exists below flow should be skipped. If cache
+ // didnt
+ // exist, then create this first time on this node.
+ if (kc == null) {
+
+ log.info("^Kafka consumer cache value " + topic + "::" + consumerGroupName + "::" + consumerId + " =>"
+ + kc);
+
+ final InterProcessMutex ipLock = new InterProcessMutex(ConfigurationReader.getCurator(),
+ "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId);
+ boolean locked = false;
+
+ try {
+
+ locked = ipLock.acquire(30, TimeUnit.SECONDS);
+ if (!locked) {
+
+ log.info("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic
+ + ", " + consumerGroupName + ", " + consumerId + ") from " + remotehost);
+ throw new UnavailableException(
+ "Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic
+ + ", " + consumerGroupName + ", " + consumerId + ") " + remotehost);
+ }
+
+ // ConfigurationReader.getCurator().checkExists().forPath("S").
+
+ log.info("Creating Kafka consumer for group [" + consumerGroupName + "], consumer [" + consumerId
+ + "], on topic [" + topic + "].");
+
+ fCache.signalOwnership(topic, consumerGroupName, consumerId);
+
+ final Properties props = createConsumerConfig(topic,consumerGroupName, consumerId);
+ long fCreateTimeMs = System.currentTimeMillis();
+ KafkaConsumer<String, String> cc = new KafkaConsumer<>(props);
+ kc = new Kafka011Consumer(topic, consumerGroupName, consumerId, cc, fkafkaLiveLockAvoider);// ,fCache.getkafkaLiveLockAvoiderObj()
+ // );
+ log.info(" kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
+
+ if (fCache != null) {
+ fCache.putConsumerFor(topic, consumerGroupName, consumerId, kc); //
+ }
+
+ } catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) {
+ log.info(
+ "Kafka consumer couldn't connect to ZK. " + x + " " + consumerGroupName + "/" + consumerId);
+ throw new UnavailableException("Couldn't connect to ZK.");
+ } catch (KafkaConsumerCacheException e) {
+ log.info("Failed to cache consumer (this may have performance implications): " + e.getMessage()
+ + " " + consumerGroupName + "/" + consumerId);
+ } catch (UnavailableException u) {
+ log.info("Failed and in UnavailableException block " + u.getMessage() + " " + consumerGroupName
+ + "/" + consumerId);
+ throw new UnavailableException("Error while acquiring consumer factory lock " + u.getMessage(), u);
+ } catch (Exception e) {
+ log.info("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/"
+ + consumerId);
+ log.error("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/"
+ + consumerId);
+
+ } finally {
+ if (locked) {
+ try {
+ ipLock.release();
+ } catch (Exception e) {
+ throw new UnavailableException("Error while releasing consumer factory lock" + e, e);
+ }
+ }
+ }
+ }
+ }
+ return kc;
+ }
+
+ @Override
+ public synchronized void destroyConsumer(String topic, String consumerGroup, String clientId) {
+ if (fCache != null) {
+ fCache.dropConsumer(topic, consumerGroup, clientId);
+ }
+ }
+
+ @Override
+ public synchronized Collection<? extends Consumer> getConsumers() {
+ return fCache.getConsumers();
+ }
+
+ @Override
+ public synchronized void dropCache() {
+ fCache.dropAllConsumers();
+ }
+
+
+ private KafkaConsumerCache fCache;
+ private KafkaLiveLockAvoider2 fkafkaLiveLockAvoider;
+ private String fkafkaBrokers;
+
+
+
+ private static String makeLongKey(String key, String prefix) {
+ return prefix + "." + key;
+ }
+
+ private void transferSettingIfProvided(Properties target, String key, String prefix) {
+ String keyVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, makeLongKey(key, prefix));
+
+ // if (fSettings.hasValueFor(makeLongKey(key, prefix))) {
+ if (null != keyVal) {
+ // final String val = fSettings
+ // .getString(makeLongKey(key, prefix), "");
+ log.info("Setting [" + key + "] to " + keyVal + ".");
+ target.put(key, keyVal);
+ }
+ }
+
+ /**
+ * Name CreateConsumerconfig
+ * @param topic
+ * @param groupId
+ * @param consumerId
+ * @return Properties
+ *
+ * This method is to create Properties required to create kafka connection
+ * Group name is replaced with different format groupid--topic to address same
+ * groupids for multiple topics. Same groupid with multiple topics
+ * may start frequent consumer rebalancing on all the topics . Replacing them makes it unique
+ */
+ private Properties createConsumerConfig(String topic ,String groupId, String consumerId) {
+ final Properties props = new Properties();
+ //fakeGroupName is added to avoid multiple consumer group for multiple topics.Donot Change this logic
+ //Fix for CPFMF-644 :
+ final String fakeGroupName = groupId + "--" + topic;
+ props.put("group.id", fakeGroupName);
+ props.put("enable.auto.commit", "false"); // 0.11
+ props.put("bootstrap.servers", fkafkaBrokers);
+ /*props.put("sasl.jaas.config",
+ "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+ props.put("security.protocol", "SASL_PLAINTEXT");
+ props.put("sasl.mechanism", "PLAIN");*/
+ props.put("client.id", consumerId);
+
+ // additional settings: start with our defaults, then pull in configured
+ // overrides
+ populateKafkaInternalDefaultsMap();
+ for (String key : KafkaConsumerKeys) {
+ transferSettingIfProvided(props, key, "kafka");
+ }
+
+ props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+ props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+
+ return props;
+ }
+
+
+ private static final String KafkaConsumerKeys[] = { "bootstrap.servers", "heartbeat.interval.ms",
+ "auto.offset.reset", "exclude.internal.topics", "session.timeout.ms", "fetch.max.bytes",
+ "auto.commit.interval.ms", "connections.max.idle.ms", "fetch.min.bytes", "isolation.level",
+ "fetch.max.bytes", "request.timeout.ms", "fetch.max.wait.bytes", "reconnect.backoff.max.ms",
+ "max.partition.fetch.bytes", "reconnect.backoff.max.ms", "reconnect.backoff.ms", "retry.backoff.ms",
+ "max.poll.interval.ms", "max.poll.records", "receive.buffer.bytes", "metadata.max.age.ms" };
+
+ /**
+ * putting values in hashmap like consumer timeout, zookeeper time out, etc
+ *
+ * @param setting
+ */
+ private static void populateKafkaInternalDefaultsMap() { }
+
+ /*
+ * The starterIncremnt value is just to emulate calling certain consumers,
+ * in this test app all the consumers are local
+ *
+ */
+ private LiveLockAvoidance makeAvoidanceCallback(final String appId) {
+
+ return new LiveLockAvoidance() {
+
+ @Override
+ public String getAppId() {
+ return appId;
+ }
+
+ @Override
+ public void handleRebalanceUnlock(String groupName) {
+ log.info("FORCE A POLL NOW FOR appId: [{}] group: [{}]", getAppId(), groupName);
+ Kafka011ConsumerUtil.forcePollOnConsumer(groupName + "::");
+ }
+
+ };
+
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs,
+ String remotehost) throws UnavailableException, CambriaApiException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ private HashMap<String, Object> synchash = new HashMap<String, Object>();
+
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBroker.java b/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java
index e7d777e..643eae9 100644
--- a/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaMetaBroker.java
+++ b/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java
@@ -8,39 +8,49 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
+import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
+import java.util.concurrent.ExecutionException;
import org.I0Itec.zkclient.ZkClient;
import org.I0Itec.zkclient.exception.ZkNoNodeException;
+import org.apache.kafka.clients.admin.AdminClient;
+import org.apache.kafka.clients.admin.AdminClientConfig;
+import org.apache.kafka.clients.admin.CreateTopicsResult;
+import org.apache.kafka.clients.admin.NewTopic;
+import org.apache.kafka.common.KafkaFuture;
+import org.json.JSONObject;
+import org.json.JSONArray;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metabroker.Broker;
+import com.att.dmf.mr.metabroker.Broker1;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.utils.ConfigurationReader;
//import org.apache.log4-j.Logger;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.metabroker.Broker;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.utils.ConfigurationReader;
+//import com.att.dmf.mr.backends.kafka.kafka011.SettingsUtil;
import com.att.nsa.configs.ConfigDb;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.configs.ConfigPath;
@@ -50,20 +60,42 @@ import com.att.nsa.security.NsaAcl;
import com.att.nsa.security.NsaAclUtils;
import com.att.nsa.security.NsaApiKey;
-import kafka.admin.AdminUtils;
-import kafka.utils.ZKStringSerializer$;
/**
* class performing all topic operations
*
- * @author author
+ * @author anowarul.islam
*
*/
-
-public class DMaaPKafkaMetaBroker implements Broker {
+//@Component
+public class DMaaPKafkaMetaBroker implements Broker1 {
+
+ public DMaaPKafkaMetaBroker() {
+ fZk = null;
+ fCambriaConfig = null;
+ fBaseTopicData = null;
+ final Properties props = new Properties ();
+ String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "kafka.metadata.broker.list");
+ if (null == fkafkaBrokers) {
+
+ fkafkaBrokers = "localhost:9092";
+ }
+
+
+
+ props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers );
+ /* props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+ props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
+ props.put("sasl.mechanism", "PLAIN");*/
+ fKafkaAdminClient=AdminClient.create ( props );
+ // fKafkaAdminClient = null;
+ }
//private static final Logger log = Logger.getLogger(DMaaPKafkaMetaBroker.class);
private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
+ private final AdminClient fKafkaAdminClient;
+
/**
@@ -79,6 +111,36 @@ public class DMaaPKafkaMetaBroker implements Broker {
fZk = zk;
fCambriaConfig = configDb;
fBaseTopicData = configDb.parse("/topics");
+ final Properties props = new Properties ();
+ String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "kafka.metadata.broker.list");
+ if (null == fkafkaBrokers) {
+
+ fkafkaBrokers = "localhost:9092";
+ }
+
+
+
+ props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers );
+ /* props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+ props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
+ props.put("sasl.mechanism", "PLAIN");*/
+ fKafkaAdminClient=AdminClient.create ( props );
+ // fKafkaAdminClient = null;
+
+
+ }
+
+ public DMaaPKafkaMetaBroker( rrNvReadable settings,
+ ZkClient zk, ConfigDb configDb,AdminClient client) {
+ //fSettings = settings;
+ fZk = zk;
+ fCambriaConfig = configDb;
+ fBaseTopicData = configDb.parse("/topics");
+ fKafkaAdminClient= client;
+ // fKafkaAdminClient = null;
+
+
}
@Override
@@ -91,7 +153,6 @@ public class DMaaPKafkaMetaBroker implements Broker {
for (String topic : topics) {
result.add(new KafkaTopic(topic, fCambriaConfig, fBaseTopicData));
}
-
JSONObject dataObj = new JSONObject();
dataObj.put("topics", new JSONObject());
@@ -132,7 +193,7 @@ public class DMaaPKafkaMetaBroker implements Broker {
*/
@Override
public Topic createTopic(String topic, String desc, String ownerApiKey, int partitions, int replicas,
- boolean transactionEnabled) throws TopicExistsException, CambriaApiException {
+ boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException {
log.info("Creating topic: " + topic);
try {
log.info("Check if topic [" + topic + "] exist.");
@@ -162,78 +223,53 @@ public class DMaaPKafkaMetaBroker implements Broker {
}
// create via kafka
- try {
- ZkClient zkClient = null;
- try {
- log.info("Loading zookeeper client for creating topic.");
- // FIXME: use of this scala module$ thing is a goofy hack to
- // make Kafka aware of the
- // topic creation. (Otherwise, the topic is only partially
- // created in ZK.)
- zkClient = ZkClientFactory.createZkClient();
- log.info("Zookeeper client loaded successfully. Creating topic.");
- AdminUtils.createTopic(zkClient, topic, partitions, replicas, new Properties());
- } catch (kafka.common.TopicExistsException e) {
- log.error("Topic [" + topic + "] could not be created. " + e.getMessage(), e);
- throw new TopicExistsException(topic);
- } catch (ZkNoNodeException e) {
- log.error("Topic [" + topic + "] could not be created. The Kafka cluster is not setup.", e);
- // Kafka throws this when the server isn't running (and perhaps
- // hasn't ever run)
- throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
- "The Kafka cluster is not setup.");
- } catch (kafka.admin.AdminOperationException e) {
- // Kafka throws this when the server isn't running (and perhaps
- // hasn't ever run)
- log.error("The Kafka cluster can't handle your request. Talk to the administrators: " + e.getMessage(),
- e);
- throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
- "The Kafka cluster can't handle your request. Talk to the administrators.");
- } finally {
- log.info("Closing zookeeper connection.");
- if (zkClient != null)
- zkClient.close();
+
+ try
+ {
+ final NewTopic topicRequest = new NewTopic ( topic, partitions, new Integer(replicas).shortValue () );
+ final CreateTopicsResult ctr = fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) );
+ final KafkaFuture<Void> ctrResult = ctr.all ();
+ ctrResult.get ();
+ // underlying Kafka topic created. now setup our API info
+ return createTopicEntry ( topic, desc, ownerApiKey, transactionEnabled );
}
-
- log.info("Creating topic entry for topic: " + topic);
- // underlying Kafka topic created. now setup our API info
- return createTopicEntry(topic, desc, ownerApiKey, transactionEnabled);
- } catch (ConfigDbException excp) {
- log.error("Failed to create topic data. Talk to the administrators: " + excp.getMessage(), excp);
- throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
- "Failed to create topic data. Talk to the administrators.");
- }
+ catch ( InterruptedException e )
+ {
+ //timer.fail ( "Timeout" );
+ log.warn ( "Execution of describeTopics timed out." );
+ throw new ConfigDbException ( e );
+ }
+ catch ( ExecutionException e )
+ {
+ //timer.fail ( "ExecutionError" );
+ log.warn ( "Execution of describeTopics failed: " + e.getCause ().getMessage (), e.getCause () );
+ throw new ConfigDbException ( e.getCause () );
+ }
+
}
@Override
- public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException {
+ public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException,ConfigDbException {
log.info("Deleting topic: " + topic);
ZkClient zkClient = null;
try {
log.info("Loading zookeeper client for topic deletion.");
- // FIXME: use of this scala module$ thing is a goofy hack to make
- // Kafka aware of the
- // topic creation. (Otherwise, the topic is only partially created
+ // topic creation. (Otherwise, the topic is only partially created
// in ZK.)
- zkClient = ZkClientFactory.createZkClient();
-
+ /*zkClient = new ZkClient(ConfigurationReader.getMainZookeeperConnectionString(), 10000, 10000,
+ ZKStringSerializer$.MODULE$);
+ String strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
+ if (null==strkSettings_KafkaZookeeper) strkSettings_KafkaZookeeper = CambriaConstants.kDefault_ZkConfigDbServers;
+ ZkUtils zkutils =new ZkUtils(zkClient , new ZkConnection(strkSettings_KafkaZookeeper),false);
+ */
+
+ fKafkaAdminClient.deleteTopics(Arrays.asList(topic));
log.info("Zookeeper client loaded successfully. Deleting topic.");
- AdminUtils.deleteTopic(zkClient, topic);
- } catch (kafka.common.TopicExistsException e) {
+ //AdminUtils.deleteTopic(zkutils, topic);
+ } catch (Exception e) {
log.error("Failed to delete topic [" + topic + "]. " + e.getMessage(), e);
- throw new TopicExistsException(topic);
- } catch (ZkNoNodeException e) {
- log.error("Failed to delete topic [" + topic + "]. The Kafka cluster is not setup." + e.getMessage(), e);
- // Kafka throws this when the server isn't running (and perhaps
- // hasn't ever run)
- throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable, "The Kafka cluster is not setup.");
- } catch (kafka.admin.AdminOperationException e) {
- // Kafka throws this when the server isn't running (and perhaps
- // hasn't ever run)
- log.error("The Kafka cluster can't handle your request. Talk to the administrators." + e.getMessage(), e);
- throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
- "The Kafka cluster can't handle your request. Talk to the administrators.");
- } finally {
+ throw new ConfigDbException(e);
+ } finally {
log.info("Closing zookeeper connection.");
if (zkClient != null)
zkClient.close();
@@ -242,8 +278,6 @@ public class DMaaPKafkaMetaBroker implements Broker {
// throw new UnsupportedOperationException ( "We can't programmatically
// delete Kafka topics yet." );
}
-
-
//private final rrNvReadable fSettings;
private final ZkClient fZk;
@@ -295,7 +329,7 @@ public class DMaaPKafkaMetaBroker implements Broker {
* class performing all user opearation like user is eligible to read,
* write. permitting a user to write and read,
*
- * @author author
+ * @author anowarul.islam
*
*/
public static class KafkaTopic implements Topic {
@@ -328,25 +362,27 @@ public class DMaaPKafkaMetaBroker implements Broker {
// created topics.
JSONObject readers = o.optJSONObject ( "readers" );
if ( readers == null && fOwner.length () > 0 ) readers = kEmptyAcl;
- fReaders = fromJson ( readers );
+ fReaders = fromJson ( readers );
JSONObject writers = o.optJSONObject ( "writers" );
if ( writers == null && fOwner.length () > 0 ) writers = kEmptyAcl;
fWriters = fromJson ( writers );
}
- private NsaAcl fromJson(JSONObject o) {
- NsaAcl acl = new NsaAcl();
- if (o != null) {
- JSONArray a = o.optJSONArray("allowed");
- if (a != null) {
- for (int i = 0; i < a.length(); ++i) {
- String user = a.getString(i);
- acl.add(user);
- }
+
+ private NsaAcl fromJson(JSONObject o) {
+ NsaAcl acl = new NsaAcl();
+ if (o != null) {
+ JSONArray a = o.optJSONArray("allowed");
+ if (a != null) {
+ for (int i = 0; i < a.length(); ++i) {
+ String user = a.getString(i);
+ acl.add(user);
}
}
- return acl;
}
+ return acl;
+ }
+
@Override
public String getName() {
return fName;
@@ -445,7 +481,7 @@ public class DMaaPKafkaMetaBroker implements Broker {
private final NsaAcl fReaders;
private final NsaAcl fWriters;
private boolean fTransactionEnabled;
-
+
public boolean isTransactionEnabled() {
return fTransactionEnabled;
}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPMetricsSet.java b/src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java
index 3c3aa6d..9942837 100644
--- a/src/main/java/com/att/nsa/cambria/beans/DMaaPMetricsSet.java
+++ b/src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
import java.text.SimpleDateFormat;
import java.util.Date;
@@ -28,9 +28,9 @@ import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
-import com.att.nsa.apiServer.metrics.cambria.DMaaPMetricsSender;
-import com.att.nsa.cambria.CambriaApiVersionInfo;
-import com.att.nsa.cambria.backends.MetricsSet;
+import com.att.dmf.mr.CambriaApiVersionInfo;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.mr.apiServer.metrics.cambria.DMaaPMetricsSender;
import com.att.nsa.drumlin.till.nv.rrNvReadable;
import com.att.nsa.metrics.impl.CdmConstant;
import com.att.nsa.metrics.impl.CdmCounter;
@@ -45,7 +45,7 @@ import com.att.nsa.metrics.impl.CdmTimeSince;
/**
* Metrics related information
*
- * @author author
+ * @author anowarul.islam
*
*/
public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSet {
@@ -87,7 +87,6 @@ public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSe
//public DMaaPMetricsSet() {
public DMaaPMetricsSet(rrNvReadable cs) {
//fSettings = cs;
-
fVersion = new CdmStringConstant("Version " + CambriaApiVersionInfo.getVersion());
super.putItem("version", fVersion);
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPNsaApiDb.java b/src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java
index ce257d4..e29403f 100644
--- a/src/main/java/com/att/nsa/cambria/beans/DMaaPNsaApiDb.java
+++ b/src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java
@@ -8,26 +8,27 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
import java.security.Key;
//import org.apache.log4-j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
+
+import com.att.dmf.mr.constants.CambriaConstants;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.constants.CambriaConstants;
import com.att.nsa.configs.ConfigDb;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.configs.confimpl.EncryptingLayer;
@@ -42,7 +43,7 @@ import com.att.nsa.util.rrConvertor;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public class DMaaPNsaApiDb {
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPZkClient.java b/src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java
index 590ecd6..78a7426 100644
--- a/src/main/java/com/att/nsa/cambria/beans/DMaaPZkClient.java
+++ b/src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java
@@ -8,28 +8,28 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
import org.I0Itec.zkclient.ZkClient;
import org.springframework.beans.factory.annotation.Qualifier;
-import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.ConfigurationReader;
import com.att.nsa.drumlin.till.nv.rrNvReadable;
/**
* Created for Zookeeper client which will read configuration and settings parameter
- * @author author
+ * @author nilanjana.maity
*
*/
public class DMaaPZkClient extends ZkClient {
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPZkConfigDb.java b/src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java
index 8fe96e9..d543721 100644
--- a/src/main/java/com/att/nsa/cambria/beans/DMaaPZkConfigDb.java
+++ b/src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java
@@ -8,29 +8,28 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
import org.springframework.beans.factory.annotation.Qualifier;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.ConfigurationReader;
import com.att.nsa.configs.confimpl.ZkConfigDb;
import com.att.nsa.drumlin.till.nv.rrNvReadable;
//import com.att.nsa.configs.confimpl.ZkConfigDb;
/**
* Provide the zookeeper config db connection
- * @author author
+ * @author nilanjana.maity
*
*/
public class DMaaPZkConfigDb extends ZkConfigDb {
diff --git a/src/main/java/com/att/nsa/cambria/beans/LogDetails.java b/src/main/java/com/att/dmf/mr/beans/LogDetails.java
index 5a195e9..b7fb325 100644
--- a/src/main/java/com/att/nsa/cambria/beans/LogDetails.java
+++ b/src/main/java/com/att/dmf/mr/beans/LogDetails.java
@@ -1,3 +1,6 @@
+/**
+ *
+ */
/*******************************************************************************
* ============LICENSE_START=======================================================
* org.onap.dmaap
@@ -8,29 +11,26 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-/**
- *
- */
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
import java.util.Date;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.utils.Utils;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.utils.Utils;
/**
- * @author author
+ * @author muzainulhaque.qazi
*
*/
diff --git a/src/main/java/com/att/nsa/cambria/beans/TopicBean.java b/src/main/java/com/att/dmf/mr/beans/TopicBean.java
index 3303c07..a397921 100644
--- a/src/main/java/com/att/nsa/cambria/beans/TopicBean.java
+++ b/src/main/java/com/att/dmf/mr/beans/TopicBean.java
@@ -1,3 +1,6 @@
+/**
+ *
+ */
/*******************************************************************************
* ============LICENSE_START=======================================================
* org.onap.dmaap
@@ -8,28 +11,25 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-/**
- *
- */
-package com.att.nsa.cambria.beans;
+package com.att.dmf.mr.beans;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlRootElement;
/**
- * @author author
+ * @author muzainulhaque.qazi
*
*/
@XmlRootElement
@@ -39,8 +39,8 @@ public class TopicBean implements Serializable {
private String topicName;
private String topicDescription;
- private int partitionCount = 1; //default values
- private int replicationCount = 1; //default value
+ private int partitionCount;
+ private int replicationCount;
private boolean transactionEnabled;
diff --git a/src/main/java/com/att/nsa/cambria/constants/CambriaConstants.java b/src/main/java/com/att/dmf/mr/constants/CambriaConstants.java
index 019fa38..b640688 100644
--- a/src/main/java/com/att/nsa/cambria/constants/CambriaConstants.java
+++ b/src/main/java/com/att/dmf/mr/constants/CambriaConstants.java
@@ -8,26 +8,24 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.constants;
-
-import org.apache.coyote.http11.Http11NioProtocol;
+package com.att.dmf.mr.constants;
-import com.att.nsa.cambria.utils.Utils;
+import com.att.dmf.mr.utils.Utils;
/**
* This is the constant files for all the property or parameters.
- * @author author
+ * @author nilanjana.maity
*
*/
public interface CambriaConstants {
@@ -55,9 +53,11 @@ String msgRtr_prop="MsgRtrApi.properties";
* value to use to signal max empty poll per minute
*/
String kSetting_MaxEmptyPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
+ String kSetting_MaxPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
double kDefault_MaxEmptyPollsPerMinute = 10.0;
String kSetting_SleepMsOnRateLimit = "cambria.rateLimit.delay.ms";
+ String kSetting_SleepMsRealOnRateLimit = "cambria.rateLimitActual.delay.ms";
long kDefault_SleepMsOnRateLimit = Utils.getSleepMsForRate ( kDefault_MaxEmptyPollsPerMinute );
String kSetting_RateLimitWindowLength = "cambria.rateLimit.window.minutes";
diff --git a/src/main/java/com/att/nsa/cambria/exception/DMaaPAccessDeniedException.java b/src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java
index 7558b25..de66617 100644
--- a/src/main/java/com/att/nsa/cambria/exception/DMaaPAccessDeniedException.java
+++ b/src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java
@@ -8,20 +8,20 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
-import com.att.nsa.cambria.CambriaApiException;
+import com.att.dmf.mr.CambriaApiException;
public class DMaaPAccessDeniedException extends CambriaApiException{
diff --git a/src/main/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java b/src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java
index 8838a49..84634a9 100644
--- a/src/main/java/com/att/nsa/cambria/exception/DMaaPCambriaExceptionMapper.java
+++ b/src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java
@@ -8,36 +8,36 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
import javax.inject.Singleton;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.apache.http.HttpStatus;
+import org.apache.http.HttpStatus;
import org.springframework.beans.factory.annotation.Autowired;
-import com.att.nsa.cambria.CambriaApiException;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
/**
* Exception Mapper class to handle
* CambriaApiException
- * @author author
+ * @author rajashree.khare
*
*/
@Provider
diff --git a/src/main/java/com/att/nsa/cambria/exception/DMaaPErrorMessages.java b/src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java
index dfcb227..409aa60 100644
--- a/src/main/java/com/att/nsa/cambria/exception/DMaaPErrorMessages.java
+++ b/src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java
@@ -8,66 +8,91 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
+
+import javax.annotation.PostConstruct;
+import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
+import org.springframework.web.context.support.SpringBeanAutowiringSupport;
/**
* This Class reads the error message properties
* from the properties file
- * @author author
+ * @author rajashree.khare
*
*/
@Component
public class DMaaPErrorMessages {
- private String notFound="The requested resource was not found.Please verify the URL and try again.";
+
+
+ //@Value("${resource.not.found}")
+ private String notFound="The requested resource was not found.Please verify the URL and try again";
+
+// @Value("${server.unavailable}")
private String serverUnav="Server is temporarily unavailable or busy.Try again later, or try another server in the cluster.";
+// @Value("${http.method.not.allowed}")
private String methodNotAllowed="The specified HTTP method is not allowed for the requested resource.Enter a valid HTTP method and try again.";
+ //@Value("${incorrect.request.json}")
private String badRequest="Incorrect JSON object. Please correct the JSON format and try again.";
+// @Value("${network.time.out}")
private String nwTimeout="Connection to the DMaaP MR was timed out.Please try again.";
+ //@Value("${get.topic.failure}")
private String topicsfailure="Failed to retrieve list of all topics.";
+ //@Value("${not.permitted.access.1}")
private String notPermitted1="Access Denied.User does not have permission to perform";
- private String notPermitted2="operation on Topic";
+ //@Value("${not.permitted.access.2}")
+ private String notPermitted2="operation on Topic:";
+ //@Value("${get.topic.details.failure}")
private String topicDetailsFail="Failed to retrieve details of topic:";
+ //@Value("${create.topic.failure}")
private String createTopicFail="Failed to create topic:";
+ //@Value("${delete.topic.failure}")
private String deleteTopicFail="Failed to delete topic:";
+ //@Value("${incorrect.json}")
private String incorrectJson="Incorrect JSON object.Could not parse JSON. Please correct the JSON format and try again.";
+ //@Value("${consume.msg.error}")
private String consumeMsgError="Error while reading data from topic.";
+ //@Value("${publish.msg.error}")
private String publishMsgError="Error while publishing data to topic.";
+ //@Value("${publish.msg.count}")
private String publishMsgCount="Successfully published number of messages :";
+ //@Value("${authentication.failure}")
private String authFailure="Access Denied: Invalid Credentials. Enter a valid MechId and Password and try again.";
+ //@Value("${msg_size_exceeds}")
private String msgSizeExceeds="Message size exceeds the default size.";
+ //@Value("${topic.not.exist}")
private String topicNotExist="No such topic exists.";
public String getMsgSizeExceeds() {
@@ -215,6 +240,9 @@ public class DMaaPErrorMessages {
}
-
+ @PostConstruct
+ public void init() {
+ SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(this);
+ }
}
diff --git a/src/main/java/com/att/nsa/cambria/exception/DMaaPResponseCode.java b/src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java
index 4011112..593863a 100644
--- a/src/main/java/com/att/nsa/cambria/exception/DMaaPResponseCode.java
+++ b/src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java
@@ -8,23 +8,23 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
/**
* Define the Error Response Codes for MR
* using this enumeration
- * @author author
+ * @author rajashree.khare
*
*/
public enum DMaaPResponseCode {
diff --git a/src/main/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapper.java b/src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java
index 59ede30..a971c3f 100644
--- a/src/main/java/com/att/nsa/cambria/exception/DMaaPWebExceptionMapper.java
+++ b/src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
import javax.inject.Singleton;
import javax.ws.rs.BadRequestException;
@@ -34,16 +34,17 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
import org.apache.http.HttpStatus;
//import org.apache.log-4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
/**
* Exception Mapper class to handle
* Jersey Exceptions
- * @author author
+ * @author rajashree.khare
*
*/
@Provider
diff --git a/src/main/java/com/att/nsa/cambria/exception/ErrorResponse.java b/src/main/java/com/att/dmf/mr/exception/ErrorResponse.java
index 03b7aee..c92cadd 100644
--- a/src/main/java/com/att/nsa/cambria/exception/ErrorResponse.java
+++ b/src/main/java/com/att/dmf/mr/exception/ErrorResponse.java
@@ -8,24 +8,24 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.exception;
+package com.att.dmf.mr.exception;
import org.json.JSONObject;
/**
* Represents the Error Response Object
* that is rendered as a JSON object when
* an exception or error occurs on MR Rest Service.
- * @author author
+ * @author rajashree.khare
*
*/
//@XmlRootElement
diff --git a/src/main/java/com/att/nsa/cambria/listener/CambriaServletContextListener.java b/src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java
index 9fbfee8..6022b91 100644
--- a/src/main/java/com/att/nsa/cambria/listener/CambriaServletContextListener.java
+++ b/src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.listener;
+package com.att.dmf.mr.listener;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
@@ -29,7 +29,7 @@ import com.att.eelf.configuration.EELFManager;
/**
* This is the Cambria Servlet Context Listner which helpes while loading the app which provide the endpoints
- * @author author
+ * @author nilanjana.maity
*
*/
public class CambriaServletContextListener implements ServletContextListener {
diff --git a/src/main/java/com/att/nsa/cambria/listener/DME2EndPointLoader.java b/src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java
index 20871e5..7f27798 100644
--- a/src/main/java/com/att/nsa/cambria/listener/DME2EndPointLoader.java
+++ b/src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java
@@ -8,33 +8,33 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.listener;
+package com.att.dmf.mr.listener;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
+import com.att.aft.dme2.manager.registry.DME2EndpointRegistry;
import com.att.aft.dme2.api.DME2Exception;
import com.att.aft.dme2.api.DME2Manager;
-import com.att.aft.dme2.manager.registry.DME2EndpointRegistry;
+import com.att.dmf.mr.service.impl.EventsServiceImpl;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.service.impl.EventsServiceImpl;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public class DME2EndPointLoader {
diff --git a/src/main/java/com/att/nsa/cambria/metabroker/Broker.java b/src/main/java/com/att/dmf/mr/metabroker/Broker.java
index 8c1fff5..e5fe8da 100644
--- a/src/main/java/com/att/nsa/cambria/metabroker/Broker.java
+++ b/src/main/java/com/att/dmf/mr/metabroker/Broker.java
@@ -8,35 +8,35 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metabroker;
+package com.att.dmf.mr.metabroker;
import java.util.List;
-import com.att.nsa.cambria.CambriaApiException;
+import com.att.dmf.mr.CambriaApiException;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
/**
* A broker interface to manage metadata around topics, etc.
*
- * @author author
+ * @author peter
*
*/
public interface Broker {
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public class TopicExistsException extends Exception {
@@ -81,12 +81,12 @@ public interface Broker {
* @throws CambriaApiException
*/
Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
- boolean transactionEnabled) throws TopicExistsException, CambriaApiException;
+ boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException;
/**
* Delete a topic by name
*
* @param topic
*/
- void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException;
+ void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException;
}
diff --git a/src/main/java/com/att/dmf/mr/metabroker/Broker1.java b/src/main/java/com/att/dmf/mr/metabroker/Broker1.java
new file mode 100644
index 0000000..e7d7f6c
--- /dev/null
+++ b/src/main/java/com/att/dmf/mr/metabroker/Broker1.java
@@ -0,0 +1,95 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+*
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.dmf.mr.metabroker;
+
+import java.util.List;
+
+import com.att.dmf.mr.CambriaApiException;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+
+/**
+ * A broker interface to manage metadata around topics, etc.
+ * alternate for Broker1 to avoid this error in spring boot
+ *org.springframework.beans.factory.NoUniqueBeanDefinitionException:
+ * No qualifying bean of type [com.att.dmf.mr.metabroker.Broker] is defined:
+ * expected single matching bean but found 2: mmb,dMaaPKafkaMetaBroker
+
+ *
+ */
+public interface Broker1 {
+ /**
+ *
+ * @author Ramkumar
+ *
+ */
+ public class TopicExistsException extends Exception {
+ /**
+ *
+ * @param topicName
+ */
+ public TopicExistsException(String topicName) {
+ super("Topic " + topicName + " exists.");
+ }
+
+ private static final long serialVersionUID = 1L;
+ }
+
+ /**
+ * Get all topics in the underlying broker.
+ *
+ * @return
+ * @throws ConfigDbException
+ */
+ List<Topic> getAllTopics() throws ConfigDbException;
+
+ /**
+ * Get a specific topic from the underlying broker.
+ *
+ * @param topic
+ * @return a topic, or null
+ */
+ Topic getTopic(String topic) throws ConfigDbException;
+
+ /**
+ * create a topic
+ *
+ * @param topic
+ * @param description
+ * @param ownerApiKey
+ * @param partitions
+ * @param replicas
+ * @param transactionEnabled
+ * @return
+ * @throws TopicExistsException
+ * @throws CambriaApiException
+ */
+ Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
+ boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException;
+
+ /**
+ * Delete a topic by name
+ *
+ * @param topic
+ */
+ void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException;
+}
diff --git a/src/main/java/com/att/nsa/cambria/metabroker/Topic.java b/src/main/java/com/att/dmf/mr/metabroker/Topic.java
index b53736d..422a2cc 100644
--- a/src/main/java/com/att/nsa/cambria/metabroker/Topic.java
+++ b/src/main/java/com/att/dmf/mr/metabroker/Topic.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metabroker;
+package com.att.dmf.mr.metabroker;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.security.NsaAcl;
@@ -28,14 +28,14 @@ import com.att.nsa.security.ReadWriteSecuredResource;
/**
* This is the interface for topic and all the topic related operations
* get topic name, owner, description, transactionEnabled etc.
- * @author author
+ * @author nilanjana.maity
*
*/
public interface Topic extends ReadWriteSecuredResource
{
/**
* User defined exception for access denied while access the topic for Publisher and consumer
- * @author author
+ * @author nilanjana.maity
*
*//*
public class AccessDeniedException extends Exception
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java
index 7e01fac..45644b7 100644
--- a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaBatchingPublisher.java
+++ b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
import java.io.IOException;
import java.util.List;
@@ -29,7 +29,7 @@ import java.util.concurrent.TimeUnit;
* A Cambria batching publisher is a publisher with additional functionality
* for managing delayed sends.
*
- * @author author
+ * @author peter
*
*/
public interface CambriaBatchingPublisher extends CambriaPublisher
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaClient.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java
index e80235e..0993aa6 100644
--- a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaClient.java
+++ b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
//import org.slf4j.Logger;
@@ -29,7 +29,7 @@ import com.att.eelf.configuration.EELFLogger;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public interface CambriaClient {
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaConsumer.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java
index f7c5f89..4a6ca81 100644
--- a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaConsumer.java
+++ b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java
@@ -8,24 +8,24 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
import java.io.IOException;
/**
* This interface will provide fetch mechanism for consumer
- * @author author
+ * @author nilanjana.maity
*
*/
public interface CambriaConsumer extends CambriaClient
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisher.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java
index 1873f7f..4020a6d 100644
--- a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisher.java
+++ b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
import java.io.IOException;
import java.util.Collection;
@@ -27,7 +27,7 @@ import java.util.Collection;
/**
* A Cambria publishing interface.
*
- * @author author
+ * @author peter
*
*/
public interface CambriaPublisher extends CambriaClient {
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java
index 511053e..1510c32 100644
--- a/src/main/java/com/att/nsa/cambria/metrics/publisher/CambriaPublisherUtility.java
+++ b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
@@ -30,7 +30,7 @@ import java.util.List;
import org.apache.http.HttpHost;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public class CambriaPublisherUtility
@@ -53,7 +53,7 @@ public class CambriaPublisherUtility
}
catch ( UnsupportedEncodingException e )
{
- throw new AssertionError ( e );
+ throw new RuntimeException ( e );
}
}
/**
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java b/src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java
index 98e16f7..d02438f 100644
--- a/src/main/java/com/att/nsa/cambria/metrics/publisher/DMaaPCambriaClientFactory.java
+++ b/src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java
@@ -8,26 +8,26 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher;
+package com.att.dmf.mr.metrics.publisher;
import java.net.MalformedURLException;
import java.util.Collection;
import java.util.TreeSet;
import java.util.UUID;
-import com.att.nsa.cambria.metrics.publisher.impl.DMaaPCambriaConsumerImpl;
-import com.att.nsa.cambria.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;
+import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaConsumerImpl;
+import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;
/**
* A factory for Cambria clients.<br/>
@@ -42,7 +42,7 @@ import com.att.nsa.cambria.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublis
* <br/>
* Publishers
*
- * @author author
+ * @author peter
*/
public class DMaaPCambriaClientFactory {
/**
@@ -52,7 +52,8 @@ public class DMaaPCambriaClientFactory {
*
* @param hostList
* A comma separated list of hosts to use to connect to Cambria.
- * You can include port numbers (3904 is the default).
+ * You can include port numbers (3904 is the default). For
+ * example, "ueb01hydc.it.att.com:8080,ueb02hydc.it.att.com"
*
* @param topic
* The topic to consume
@@ -168,7 +169,8 @@ public class DMaaPCambriaClientFactory {
*
* @param hostList
* A comma separated list of hosts to use to connect to Cambria.
- * You can include port numbers (3904 is the default).
+ * You can include port numbers (3904 is the default). For
+ * example, "ueb01hydc.it.att.com:8080,ueb02hydc.it.att.com"
* @param topic
* The topic to consume
* @param consumerGroup
@@ -239,11 +241,11 @@ public class DMaaPCambriaClientFactory {
if (sfMock != null)
return sfMock;
try {
- return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup,
- consumerId, timeoutMs, limit, filter, apiKey, apiSecret);
- } catch (MalformedURLException e) {
- throw new RuntimeException(e);
- }
+ return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup,
+ consumerId, timeoutMs, limit, filter, apiKey, apiSecret);
+ } catch (MalformedURLException e) {
+ throw new RuntimeException(e);
+ }
}
/*************************************************************************/
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java
index bd480e3..08b2fd1 100644
--- a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/CambriaBaseClient.java
+++ b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher.impl;
+package com.att.dmf.mr.metrics.publisher.impl;
import java.net.MalformedURLException;
import java.util.Collection;
@@ -29,30 +29,32 @@ import java.util.concurrent.TimeUnit;
import org.json.JSONArray;
import org.json.JSONException;
+
+import com.att.dmf.mr.constants.CambriaConstants;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
import com.att.nsa.apiClient.http.CacheUse;
import com.att.nsa.apiClient.http.HttpClient;
-import com.att.nsa.cambria.constants.CambriaConstants;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
-public class CambriaBaseClient extends HttpClient implements com.att.nsa.cambria.metrics.publisher.CambriaClient
+public class CambriaBaseClient extends HttpClient implements com.att.dmf.mr.metrics.publisher.CambriaClient
{
protected CambriaBaseClient ( Collection<String> hosts ) throws MalformedURLException
{
this ( hosts, null );
}
- protected CambriaBaseClient ( Collection<String> hosts, String clientSignature ) throws MalformedURLException
+ public CambriaBaseClient ( Collection<String> hosts, String clientSignature ) throws MalformedURLException
{
-// super ( hosts, CambriaConstants.kStdCambriaServicePort, clientSignature,
-// CacheUse.NONE, 1, 1, TimeUnit.MILLISECONDS );
+ /*super ( hosts, CambriaConstants.kStdCambriaServicePort, clientSignature,
+ CacheUse.NONE, 1, 1, TimeUnit.MILLISECONDS );*/
+
super(ConnectionType.HTTP, hosts, CambriaConstants.kStdCambriaServicePort, clientSignature, CacheUse.NONE, 1, 1L, TimeUnit.MILLISECONDS, 32, 32, 600000);
//fLog = LoggerFactory.getLogger ( this.getClass().getName () );
@@ -65,7 +67,7 @@ public class CambriaBaseClient extends HttpClient implements com.att.nsa.cambria
{
}
- protected Set<String> jsonArrayToSet ( JSONArray a ) throws JSONException
+ public Set<String> jsonArrayToSet ( JSONArray a ) throws JSONException
{
if ( a == null ) return null;
@@ -86,7 +88,7 @@ public class CambriaBaseClient extends HttpClient implements com.att.nsa.cambria
//replaceLogger ( log );
}
- protected EELFLogger getLog ()
+ public EELFLogger getLog ()
{
return fLog;
}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/Clock.java b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/Clock.java
index 1702ec8..7463700 100644
--- a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/Clock.java
+++ b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/Clock.java
@@ -8,23 +8,23 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher.impl;
+package com.att.dmf.mr.metrics.publisher.impl;
/**
*
* This class maintains the system clocks
- * @author author
+ * @author nilanjana.maity
*
*/
public class Clock
@@ -53,7 +53,7 @@ public class Clock
* Get current time in milliseconds
* @return current time in ms
*/
- protected long nowImpl ()
+ public long nowImpl ()
{
return System.currentTimeMillis ();
}
@@ -61,13 +61,13 @@ public class Clock
/**
* Initialize constructor
*/
- protected Clock ()
+ public Clock ()
{
}
private static Clock sfClock = null;
- protected synchronized static void register ( Clock testClock )
+ public synchronized static void register ( Clock testClock )
{
sfClock = testClock;
}
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
index adff2a7..ee56213 100644
--- a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
+++ b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher.impl;
+package com.att.dmf.mr.metrics.publisher.impl;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
@@ -29,23 +29,23 @@ import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
-import jline.internal.Log;
-
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
+import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility;
import com.att.nsa.apiClient.http.HttpException;
import com.att.nsa.apiClient.http.HttpObjectNotFoundException;
-import com.att.nsa.cambria.metrics.publisher.CambriaPublisherUtility;
+
+import jline.internal.Log;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
- implements com.att.nsa.cambria.metrics.publisher.CambriaConsumer {
+ implements com.att.dmf.mr.metrics.publisher.CambriaConsumer {
private final String fTopic;
private final String fGroup;
private final String fId;
@@ -64,7 +64,6 @@ public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
* @param filter
* @param apiKey
* @param apiSecret
- * @throws MalformedURLException
*/
public DMaaPCambriaConsumerImpl(Collection<String> hostPart, final String topic, final String consumerGroup,
final String consumerId, int timeoutMs, int limit, String filter, String apiKey, String apiSecret) throws MalformedURLException {
@@ -138,7 +137,7 @@ public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
return msgs;
}
- protected String createUrlPath(int timeoutMs, int limit) {
+ public String createUrlPath(int timeoutMs, int limit) {
final StringBuilder url = new StringBuilder(CambriaPublisherUtility.makeConsumerUrl(fTopic, fGroup, fId));
final StringBuilder adds = new StringBuilder();
if (timeoutMs > -1) {
diff --git a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
index d7e6816..d8d8799 100644
--- a/src/main/java/com/att/nsa/cambria/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
+++ b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.metrics.publisher.impl;
+package com.att.dmf.mr.metrics.publisher.impl;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@@ -40,24 +40,24 @@ import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Response;
import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.metrics.publisher.CambriaPublisherUtility;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility;
/**
*
* class DMaaPCambriaSimplerBatchPublisher used to send the publish the messages
* in batch
*
- * @author author
+ * @author anowarul.islam
*
*/
public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
- implements com.att.nsa.cambria.metrics.publisher.CambriaBatchingPublisher {
+ implements com.att.dmf.mr.metrics.publisher.CambriaBatchingPublisher {
/**
*
* static inner class initializes with urls, topic,batchSize
*
- * @author author
+ * @author anowarul.islam
*
*/
public static class Builder {
@@ -118,12 +118,13 @@ public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
*
* @return
*/
- public DMaaPCambriaSimplerBatchPublisher build() {
+ public DMaaPCambriaSimplerBatchPublisher build() {
+
try {
- return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress);
- } catch (MalformedURLException e) {
- throw new RuntimeException(e);
- }
+ return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress);
+ } catch (MalformedURLException e) {
+ throw new RuntimeException(e);
+ }
}
private Collection<String> fUrls;
@@ -190,8 +191,7 @@ public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
+ "Consider using CambriaBatchingPublisher.close( long timeout, TimeUnit timeoutUnits ) to recapture unsent messages on close.");
}
} catch (InterruptedException e) {
- getLog().info(" Interruption Exception is caught here : " + e.getMessage());
- Thread.currentThread().interrupt();
+ getLog().warn("Possible message loss. " + e.getMessage(), e);
} catch (IOException e) {
getLog().warn("Possible message loss. " + e.getMessage(), e);
}
@@ -374,7 +374,6 @@ public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
* @param maxBatchSize
* @param maxBatchAgeMs
* @param compress
- * @throws MalformedURLException
*/
private DMaaPCambriaSimplerBatchPublisher(Collection<String> hosts, String topic, int maxBatchSize,
long maxBatchAgeMs, boolean compress) throws MalformedURLException {
@@ -406,7 +405,7 @@ public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
/**
*
*
- * @author author
+ * @author anowarul.islam
*
*/
private static class TimestampedMessage extends message {
diff --git a/src/main/java/com/att/nsa/cambria/resources/CambriaEventSet.java b/src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java
index 85cc902..4565d3a 100644
--- a/src/main/java/com/att/nsa/cambria/resources/CambriaEventSet.java
+++ b/src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.resources;
+package com.att.dmf.mr.resources;
import java.io.IOException;
import java.io.InputStream;
@@ -27,19 +27,19 @@ import java.util.zip.GZIPInputStream;
import javax.servlet.http.HttpServletResponse;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.resources.streamReaders.CambriaJsonStreamReader;
+import com.att.dmf.mr.resources.streamReaders.CambriaRawStreamReader;
+import com.att.dmf.mr.resources.streamReaders.CambriaStreamReader;
+import com.att.dmf.mr.resources.streamReaders.CambriaTextStreamReader;
import com.att.nsa.apiServer.streams.ChunkedInputStream;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.resources.streamReaders.CambriaJsonStreamReader;
-import com.att.nsa.cambria.resources.streamReaders.CambriaRawStreamReader;
-import com.att.nsa.cambria.resources.streamReaders.CambriaStreamReader;
-import com.att.nsa.cambria.resources.streamReaders.CambriaTextStreamReader;
import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
/**
* An inbound event set.
*
- * @author author
+ * @author peter
*/
public class CambriaEventSet {
private final reader fReader;
@@ -99,7 +99,7 @@ public class CambriaEventSet {
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public interface reader {
diff --git a/src/main/java/com/att/nsa/cambria/resources/CambriaOutboundEventStream.java b/src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java
index 7366dde..837c956 100644
--- a/src/main/java/com/att/nsa/cambria/resources/CambriaOutboundEventStream.java
+++ b/src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java
@@ -8,49 +8,57 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.resources;
+package com.att.dmf.mr.resources;
import java.io.IOException;
import java.io.OutputStream;
+import java.util.ArrayList;
import java.util.Date;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONTokener;
import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.Consumer.Message;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder.StreamWriter;
-import com.att.nsa.cambria.utils.Utils;
-
-import jline.internal.Log;
-
-
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder.StreamWriter;
+import com.att.dmf.mr.utils.Utils;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+//import com.att.nsa.drumlin.till.nv.rrNvReadable;
+/*import com.att.sa.highlandPark.config.HpConfigContext;
+import com.att.sa.highlandPark.config.HpReaderException;
+import com.att.sa.highlandPark.events.HpJsonEvent;
+import com.att.sa.highlandPark.events.HpJsonEventFactory;
+import com.att.sa.highlandPark.processor.HpAlarmFilter;
+import com.att.sa.highlandPark.processor.HpEvent;
+import com.att.sa.highlandPark.processor.HpProcessingEngine;
+import com.att.sa.highlandPark.processor.HpProcessingEngine.EventFactory;
+*/
/**
* class used to write the consumed messages
*
- * @author author
+ * @author anowarul.islam
*
*/
public class CambriaOutboundEventStream implements StreamWriter {
@@ -61,16 +69,16 @@ public class CambriaOutboundEventStream implements StreamWriter {
* static innerclass it takes all the input parameter for kafka consumer
* like limit, timeout, meta, pretty
*
- * @author author
+ * @author anowarul.islam
*
*/
public static class Builder {
// Required
private final Consumer fConsumer;
- //private final rrNvReadable fSettings; // used during write to tweak
- // format, decide to explicitly
- // close stream or not
+ // private final rrNvReadable fSettings; // used during write to tweak
+ // format, decide to explicitly
+ // close stream or not
// Optional
private int fLimit;
@@ -78,6 +86,7 @@ public class CambriaOutboundEventStream implements StreamWriter {
private String fTopicFilter;
private boolean fPretty;
private boolean fWithMeta;
+ ArrayList<Consumer> fKafkaConsumerList;
// private int fOffset;
/**
@@ -88,13 +97,14 @@ public class CambriaOutboundEventStream implements StreamWriter {
*/
public Builder(Consumer c) {
this.fConsumer = c;
- //this.fSettings = settings;
+ // this.fSettings = settings;
fLimit = CambriaConstants.kNoTimeout;
fTimeoutMs = CambriaConstants.kNoLimit;
fTopicFilter = CambriaConstants.kNoFilter;
fPretty = false;
fWithMeta = false;
+ //this.fKafkaConsumerList = consList;
// fOffset = CambriaEvents.kNextOffset;
}
@@ -187,31 +197,31 @@ public class CambriaOutboundEventStream implements StreamWriter {
fConsumer = builder.fConsumer;
fLimit = builder.fLimit;
fTimeoutMs = builder.fTimeoutMs;
- //fSettings = builder.fSettings;
+ // fSettings = builder.fSettings;
fSent = 0;
fPretty = builder.fPretty;
fWithMeta = builder.fWithMeta;
-
-// if (CambriaConstants.kNoFilter.equals(builder.fTopicFilter)) {
-// fHpAlarmFilter = null;
-// fHppe = null;
-// } else {
-// try {
-// final JSONObject filter = new JSONObject(new JSONTokener(builder.fTopicFilter));
-// HpConfigContext<HpEvent> cc = new HpConfigContext<HpEvent>();
-// fHpAlarmFilter = cc.create(HpAlarmFilter.class, filter);
-// final EventFactory<HpJsonEvent> ef = new HpJsonEventFactory();
-// fHppe = new HpProcessingEngine<HpJsonEvent>(ef);
-// } catch (HpReaderException e) {
-// // JSON was okay, but the filter engine says it's bogus
-// throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
-// "Couldn't create filter: " + e.getMessage());
-// } catch (JSONException e) {
-// // user sent a bogus JSON object
-// throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
-// "Couldn't parse JSON: " + e.getMessage());
-// }
-// }
+ fKafkaConsumerList = builder.fKafkaConsumerList;
+ /* if (CambriaConstants.kNoFilter.equals(builder.fTopicFilter)) {
+ fHpAlarmFilter = null;
+ fHppe = null;
+ } else {
+ try {
+ final JSONObject filter = new JSONObject(new JSONTokener(builder.fTopicFilter));
+ HpConfigContext<HpEvent> cc = new HpConfigContext<HpEvent>();
+ fHpAlarmFilter = cc.create(HpAlarmFilter.class, filter);
+ final EventFactory<HpJsonEvent> ef = new HpJsonEventFactory();
+ fHppe = new HpProcessingEngine<HpJsonEvent>(ef);
+ } catch (HpReaderException e) {
+ // JSON was okay, but the filter engine says it's bogus
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+ "Couldn't create filter: " + e.getMessage());
+ } catch (JSONException e) {
+ // user sent a bogus JSON object
+ throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
+ "Couldn't parse JSON: " + e.getMessage());
+ }
+ }*/
}
/**
@@ -222,16 +232,20 @@ public class CambriaOutboundEventStream implements StreamWriter {
public interface operation {
/**
* Call thread.sleep
+ *
* @throws IOException
*/
void onWait() throws IOException;
-/**
- * provides the output based in the consumer paramter
- * @param count
- * @param msg
- * @throws IOException
- */
- void onMessage(int count, Message msg) throws IOException;
+
+ /**
+ * provides the output based in the consumer paramter
+ *
+ * @param count
+ * @param msg
+ * @throws IOException
+ */
+ // void onMessage(int count, Message msg) throws IOException;
+ void onMessage(int count, String msg, String transId, long offSet) throws IOException, JSONException;
}
/**
@@ -246,74 +260,63 @@ public class CambriaOutboundEventStream implements StreamWriter {
/**
*
* @param os
- * throws IOException
+ * throws IOException
*/
public void write(final OutputStream os) throws IOException {
- //final boolean transactionEnabled = topic.isTransactionEnabled();
- //final boolean transactionEnabled = isTransEnabled();
- final boolean transactionEnabled = istransEnable;
+ // final boolean transactionEnabled = topic.isTransactionEnabled();
+ // final boolean transactionEnabled = isTransEnabled();
+ // final boolean transactionEnabled = istransEnable;
+ // synchronized(this){
os.write('[');
-
fSent = forEachMessage(new operation() {
@Override
- public void onMessage(int count, Message msg) throws IOException, JSONException {
-
- String message = "";
- JSONObject jsonMessage = null;
- if (transactionEnabled) {
- jsonMessage = new JSONObject(msg.getMessage());
- message = jsonMessage.getString("message");
- }
+ public void onMessage(int count, String msg, String transId, long offSet)
+ throws IOException, JSONException {
if (count > 0) {
os.write(',');
}
-
if (fWithMeta) {
final JSONObject entry = new JSONObject();
- entry.put("offset", msg.getOffset());
- entry.put("message", message);
+ entry.put("offset", offSet);
+ entry.put("message", msg);
os.write(entry.toString().getBytes());
} else {
- //os.write(message.getBytes());
- String jsonString = "";
- if(transactionEnabled){
- jsonString= JSONObject.valueToString(message);
- }else{
- jsonString = JSONObject.valueToString (msg.getMessage());
- }
- os.write ( jsonString.getBytes () );
+ // os.write(message.getBytes());
+ String jsonString = JSONObject.valueToString(msg);
+ os.write(jsonString.getBytes());
}
if (fPretty) {
os.write('\n');
}
-
- String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
- if (null==metricTopicname)
- metricTopicname="msgrtr.apinode.metrics.dmaap";
-
- if (!metricTopicname.equalsIgnoreCase(topic.getName())) {
- if (transactionEnabled) {
- final String transactionId = jsonMessage.getString("transactionId");
- responseTransactionId = transactionId;
-
- StringBuilder consumerInfo = new StringBuilder();
- if (null != dmaapContext && null != dmaapContext.getRequest()) {
- final HttpServletRequest request = dmaapContext.getRequest();
- consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\",");
- consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\",");
- consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\",");
- consumerInfo.append(
- "consumerGroup= \"" + getConsumerGroupFromRequest(request.getRequestURI()) + "\",");
- consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\",");
+ String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap
+ .getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic");
+ if (null == metricTopicname)
+ metricTopicname = "msgrtr.apinode.metrics.dmaap";
+ if (!metricTopicname.equalsIgnoreCase(topic.getName())) {
+ try {
+ if (istransEnable && istransType) {
+ // final String transactionId =
+ // jsonMessage.getString("transactionId");
+ // responseTransactionId = transId;
+ StringBuilder consumerInfo = new StringBuilder();
+ if (null != dmaapContext && null != dmaapContext.getRequest()) {
+ final HttpServletRequest request = dmaapContext.getRequest();
+ consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\",");
+ consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\",");
+ consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\",");
+ consumerInfo.append("consumerGroup= \""
+ + getConsumerGroupFromRequest(request.getRequestURI()) + "\",");
+ consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\",");
+ }
+ log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transId
+ + "\",messageLength= \"" + msg.length() + "\",topic= \"" + topic.getName() + "\"]");
+ }
+ } catch (Exception e) {
}
-
- log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transactionId
- + "\",messageLength= \"" + message.length() + "\",topic= \"" + topic.getName() + "\"]");
}
- }
}
@@ -321,6 +324,7 @@ public class CambriaOutboundEventStream implements StreamWriter {
/**
*
* It makes thread to wait
+ *
* @throws IOException
*/
public void onWait() throws IOException {
@@ -329,15 +333,14 @@ public class CambriaOutboundEventStream implements StreamWriter {
// FIXME: would be good to wait/signal
Thread.sleep(100);
} catch (InterruptedException e) {
- Log.error(e.toString());
- Thread.currentThread().interrupt();
+ // ignore
}
}
});
- //if (null != dmaapContext && isTransactionEnabled()) {
- if (null != dmaapContext && istransEnable) {
-
+ // if (null != dmaapContext && isTransactionEnabled()) {
+ if (null != dmaapContext && istransEnable && istransType) {
+
dmaapContext.getResponse().setHeader("transactionId",
Utils.getResponseTransactionId(responseTransactionId));
}
@@ -346,12 +349,14 @@ public class CambriaOutboundEventStream implements StreamWriter {
os.flush();
boolean close_out_stream = true;
- String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"close.output.stream");
- if(null!=strclose_out_stream)close_out_stream=Boolean.parseBoolean(strclose_out_stream);
-
- //if (fSettings.getBoolean("close.output.stream", true)) {
- if (close_out_stream) {
+ String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "close.output.stream");
+ if (null != strclose_out_stream)
+ close_out_stream = Boolean.parseBoolean(strclose_out_stream);
+
+ // if (fSettings.getBoolean("close.output.stream", true)) {
+ if (close_out_stream) {
os.close();
+ // }
}
}
@@ -371,21 +376,22 @@ public class CambriaOutboundEventStream implements StreamWriter {
}
return null;
}
-/**
- *
- * @param op
- * @return
- * @throws IOException
- * @throws JSONException
- */
+
+ /**
+ *
+ * @param op
+ * @return
+ * @throws IOException
+ * @throws JSONException
+ */
public int forEachMessage(operation op) throws IOException, JSONException {
final int effectiveLimit = (fLimit == 0 ? kTopLimit : fLimit);
int count = 0;
boolean firstPing = true;
-
+ // boolean isTransType=false;
final long startMs = System.currentTimeMillis();
- final long timeoutMs = fTimeoutMs + startMs;
+ final long timeoutMs = fTimeoutMs + startMs -500; //500 ms used in poll
while (firstPing || (count == 0 && System.currentTimeMillis() < timeoutMs)) {
if (!firstPing) {
@@ -393,23 +399,63 @@ public class CambriaOutboundEventStream implements StreamWriter {
}
firstPing = false;
- Consumer.Message msg = null;
- while (count < effectiveLimit && (msg = fConsumer.nextMessage()) != null) {
+
+ Consumer.Message msgRecord = null;
+ while (count < effectiveLimit && (msgRecord =
+ fConsumer.nextMessage()) != null) {
-
String message = "";
- // if (topic.isTransactionEnabled() || true) {
- if (istransEnable) {
- // As part of DMaaP changes we are wrapping the original
- // message into a json object
- // and then this json object is further wrapped into message
- // object before publishing,
- // so extracting the original message from the message
- // object for matching with filter.
- final JSONObject jsonMessage = new JSONObject(msg.getMessage());
- message = jsonMessage.getString("message");
- } else {
- message = msg.getMessage();
+ String transactionid = "";
+ try {
+ // String msgRecord = msg;
+ JSONObject jsonMessage = new JSONObject(msgRecord);
+ String[] keys = JSONObject.getNames(jsonMessage);
+ boolean wrapheader1 = false;
+ boolean wrapheader2 = false;
+ boolean found_attr3 = false;
+ String wrapElement1 = "message";
+ String wrapElement2 = "msgWrapMR";
+ String transIdElement = "transactionId";
+ if (null != keys) {
+ for (String key : keys) {
+ if (key.equals(wrapElement1)) {
+ wrapheader1 = true;
+ } else if (key.equals(wrapElement2)) {
+ wrapheader2 = true;
+ } else if (key.equals(transIdElement)) {
+ found_attr3 = true;
+ transactionid = jsonMessage.getString(key);
+ }
+ }
+ }
+
+ // returns contents of attribute 1 if both attributes
+ // present, otherwise
+ // the whole msg
+ if (wrapheader2 && found_attr3) {
+ message = jsonMessage.getString(wrapElement2);
+ } else if (wrapheader1 && found_attr3) {
+ message = jsonMessage.getString(wrapElement1);
+ } else {
+ message = msgRecord.getMessage();
+ }
+ // jsonMessage = extractMessage(jsonMessage ,
+ // "message","msgWrapMR","transactionId");
+ istransType = true;
+ } catch (JSONException e) { // This check is required for the
+ // message sent by MR AAF flow but
+ // consumed by UEB ACL flow which
+ // wont expect transaction id in
+ // cambria client api
+ // Ignore
+ log.info("JSON Exception logged when the message is non JSON Format");
+ } catch (Exception exp) {
+ log.info("****Some Exception occured for writing messages in topic" + topic.getName()
+ + " Exception" + exp);
+ }
+ if (message == null || message.equals("")) {
+ istransType = false;
+ message = msgRecord.getMessage();
}
// If filters are enabled/set, message should be in JSON format
@@ -417,23 +463,26 @@ public class CambriaOutboundEventStream implements StreamWriter {
// otherwise filter will automatically ignore message in
// non-json format.
if (filterMatches(message)) {
- op.onMessage(count, msg);
+ op.onMessage(count, message, transactionid, msgRecord.getOffset());
count++;
+
}
+
}
}
-
return count;
}
+
+
/**
*
* Checks whether filter is initialized
*/
-// private boolean isFilterInitialized() {
-// return (fHpAlarmFilter != null && fHppe != null);
-// }
-
+ /*private boolean isFilterInitialized() {
+ return (fHpAlarmFilter != null && fHppe != null);
+ }
+*/
/**
*
* @param msg
@@ -441,18 +490,18 @@ public class CambriaOutboundEventStream implements StreamWriter {
*/
private boolean filterMatches(String msg) {
boolean result = true;
-// if (isFilterInitialized()) {
-// try {
-// final HpJsonEvent e = new HpJsonEvent("e", new JSONObject(msg));
-// result = fHpAlarmFilter.matches(fHppe, e);
-// } catch (JSONException x) {
-// // the msg may not be JSON
-// result = false;
-// log.error("Failed due to " + x.getMessage());
-// } catch (Exception x) {
-// log.error("Error using filter: " + x.getMessage(), x);
-// }
-// }
+ /*if (isFilterInitialized()) {
+ try {
+ final HpJsonEvent e = new HpJsonEvent("e", new JSONObject(msg));
+ result = fHpAlarmFilter.matches(fHppe, e);
+ } catch (JSONException x) {
+ // the msg may not be JSON
+ result = false;
+ log.error("Failed due to " + x.getMessage());
+ } catch (Exception x) {
+ log.error("Error using filter: " + x.getMessage(), x);
+ }
+ }*/
return result;
}
@@ -472,48 +521,34 @@ public class CambriaOutboundEventStream implements StreamWriter {
public void setTopic(Topic topic) {
this.topic = topic;
}
-
+
public void setTopicStyle(boolean aaftopic) {
this.isAAFTopic = aaftopic;
}
-
- public void setTransEnabled ( boolean transEnable) {
+
+ public void setTransEnabled(boolean transEnable) {
this.istransEnable = transEnable;
}
- /*private boolean isTransactionEnabled() {
- //return topic.isTransactionEnabled();
- return true; // let metrics creates for all the topics
- }*/
-
- private boolean isTransEnabled() {
- String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
- boolean istransidreqd=false;
- if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) || isAAFTopic){
- istransidreqd = true;
- }
-
- return istransidreqd;
-
- }
private final Consumer fConsumer;
private final int fLimit;
private final int fTimeoutMs;
- //private final rrNvReadable fSettings;
+ // private final rrNvReadable fSettings;
private final boolean fPretty;
private final boolean fWithMeta;
private int fSent;
// private final HpAlarmFilter<HpJsonEvent> fHpAlarmFilter;
-// private final HpProcessingEngine<HpJsonEvent> fHppe;
+ //private final HpProcessingEngine<HpJsonEvent> fHppe;
private DMaaPContext dmaapContext;
private String responseTransactionId;
private Topic topic;
private boolean isAAFTopic = false;
private boolean istransEnable = false;
-
+ private ArrayList<Consumer> fKafkaConsumerList;
+ private boolean istransType = true;
+ // private static final Logger log =
+ // Logger.getLogger(CambriaOutboundEventStream.class);
- //private static final Logger log = Logger.getLogger(CambriaOutboundEventStream.class);
-
private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaOutboundEventStream.class);
} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java
index c8172a9..98ddb50 100644
--- a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaJsonStreamReader.java
+++ b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java
@@ -8,21 +8,20 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.resources.streamReaders;
+package com.att.dmf.mr.resources.streamReaders;
import java.io.InputStream;
-import java.util.logging.Logger;
import javax.servlet.http.HttpServletResponse;
@@ -30,22 +29,19 @@ import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONTokener;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.resources.CambriaEventSet.reader;
-
-import jline.internal.Log;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.resources.CambriaEventSet.reader;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public class CambriaJsonStreamReader implements reader {
- private static final Logger LOG = Logger.getLogger(CambriaJsonStreamReader.class.toString());
private final JSONTokener fTokens;
- private final boolean fIsList;
+ private final boolean fIsList;
private long fCount;
private final String fDefPart;
public static final String kKeyField = "cambria.partition";
@@ -72,7 +68,6 @@ public class CambriaJsonStreamReader implements reader {
throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expecting an array or an object.");
}
} catch (JSONException e) {
- Log.error(e);
throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
}
}
@@ -114,7 +109,6 @@ public class CambriaJsonStreamReader implements reader {
fCount++;
return new msg(o);
} catch (JSONException e) {
- Log.error(e.toString());
throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
}
diff --git a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java
index d562360..376d140 100644
--- a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaRawStreamReader.java
+++ b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java
@@ -8,34 +8,34 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.resources.streamReaders;
+package com.att.dmf.mr.resources.streamReaders;
import java.io.IOException;
import java.io.InputStream;
import javax.servlet.http.HttpServletResponse;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.resources.CambriaEventSet.reader;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.resources.CambriaEventSet.reader;
import com.att.nsa.util.StreamTools;
/**
*
* This stream reader reads raw bytes creating a single message.
- * @author author
+ * @author peter
*
*/
public class CambriaRawStreamReader implements reader
@@ -130,7 +130,7 @@ public class CambriaRawStreamReader implements reader
}
catch ( IOException e )
{
- throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.toString());
+ throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
}
}
diff --git a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java
index 38359f0..3dbf339 100644
--- a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaStreamReader.java
+++ b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java
@@ -8,28 +8,28 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.resources.streamReaders;
+package com.att.dmf.mr.resources.streamReaders;
import java.io.IOException;
import java.io.InputStream;
import javax.servlet.http.HttpServletResponse;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.resources.CambriaEventSet.reader;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.resources.CambriaEventSet.reader;
/**
* Read an optionally chunked stream in the Cambria app format. This format
@@ -41,7 +41,7 @@ import com.att.nsa.cambria.resources.CambriaEventSet.reader;
* Whitespace before/after each entry is ignored, so messages can be delivered
* with newlines between them, or not.
*
- * @author author
+ * @author peter
*
*/
public class CambriaStreamReader implements reader {
diff --git a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java
index 41b9275..b06e17a 100644
--- a/src/main/java/com/att/nsa/cambria/resources/streamReaders/CambriaTextStreamReader.java
+++ b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java
@@ -8,44 +8,40 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.resources.streamReaders;
+package com.att.dmf.mr.resources.streamReaders;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
-import java.util.logging.Logger;
import javax.servlet.http.HttpServletResponse;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.resources.CambriaEventSet.reader;
-
-import jline.internal.Log;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.resources.CambriaEventSet.reader;
/**
* This stream reader just pulls single lines. It uses the default partition if provided. If
* not, the key is the current time, which does not guarantee ordering.
*
- * @author author
+ * @author peter
*
*/
public class CambriaTextStreamReader implements reader
{
- private Logger log = Logger.getLogger(CambriaTextStreamReader.class.toString());
/**
* This is the constructor for Cambria Text Reader format
* @param is
@@ -135,7 +131,6 @@ public class CambriaTextStreamReader implements reader
}
catch ( IOException e )
{
- Log.error(e);
throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
}
}
diff --git a/src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticator.java b/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java
index fa4fe17..59196d2 100644
--- a/src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticator.java
+++ b/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java
@@ -8,29 +8,29 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.security;
+package com.att.dmf.mr.security;
import javax.servlet.http.HttpServletRequest;
-import com.att.nsa.cambria.CambriaApiException;
+import com.att.dmf.mr.CambriaApiException;
/**
*
- * @author author
+ * @author sneha.d.desai
*
*/
public interface DMaaPAAFAuthenticator {
diff --git a/src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java b/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java
index e8f697e..b550373 100644
--- a/src/main/java/com/att/nsa/cambria/security/DMaaPAAFAuthenticatorImpl.java
+++ b/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java
@@ -8,30 +8,28 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.security;
-
+package com.att.dmf.mr.security;
import javax.servlet.http.HttpServletRequest;
-
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.constants.CambriaConstants;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.constants.CambriaConstants;
/**
*
- * @author author
+ * @author sneha.d.desai
*
*/
public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
@@ -48,6 +46,7 @@ public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
auth = true;
}
+ //System.out.println("role " +role +" user: "+ req.getRemoteUser() +" : auth="+auth);
return auth;
}
@@ -55,9 +54,28 @@ public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
public String aafPermissionString(String topicName, String action) throws CambriaApiException {
- String mrNameS = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
- "msgRtr.namespace.aaf");
- String permission = mrNameS+"|:topic."+topicName+"|"+action;
+ String permission = "";
+ String nameSpace ="";
+ if(topicName.contains(".") && topicName.contains("com.att")) {
+ //String topic = topicName.substring(topicName.lastIndexOf(".")+1);
+ nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
+ }
+ else {
+ nameSpace = null;
+ nameSpace= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"defaultNSforUEB");
+
+ if(null==nameSpace)nameSpace="com.att.dmaap.mr.ueb";
+
+
+ /*ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.TOPIC_NOT_IN_AAF.getResponseCode(), "Topic does not exist in AAF"
+ , null, Utils.getFormattedDate(new Date()), topicName,
+ null, null, null, null);
+
+ throw new CambriaApiException(errRes);*/
+ }
+
+ permission = nameSpace+".mr.topic|:topic."+topicName+"|"+action;
return permission;
}
diff --git a/src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticator.java b/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java
index 5e7073d..848d4cc 100644
--- a/src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticator.java
+++ b/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java
@@ -8,28 +8,28 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.security;
+package com.att.dmf.mr.security;
import javax.servlet.http.HttpServletRequest;
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPContext;
import com.att.nsa.security.NsaApiKey;
/**
* An interface for authenticating an inbound request.
- * @author author
+ * @author nilanjana.maity
*
* @param <K> NsaApiKey
*/
diff --git a/src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImpl.java b/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java
index 733daf7..1641901 100644
--- a/src/main/java/com/att/nsa/cambria/security/DMaaPAuthenticatorImpl.java
+++ b/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java
@@ -8,34 +8,32 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.security;
+package com.att.dmf.mr.security;
import java.util.LinkedList;
import javax.servlet.http.HttpServletRequest;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.security.impl.DMaaPOriginalUebAuthenticator;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator;
import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.NsaAuthenticator;
-import com.att.nsa.security.authenticators.OriginalUebAuthenticator;
import com.att.nsa.security.db.NsaApiDb;
import com.att.nsa.security.db.simple.NsaSimpleApiKey;
/**
*
- * @author author
+ * @author anowarul.islam
*
* @param <K>
*/
@@ -48,7 +46,7 @@ public class DMaaPAuthenticatorImpl<K extends NsaApiKey> implements DMaaPAuthent
// Setting timeout to a large value for testing purpose.
// private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10; //
// 10 minutes
- private static final long kDefaultRequestTimeWindow = (long)1000 * 60 * 10 * 10 * 10 * 10 * 10;
+ private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10 * 10 * 10 * 10 * 10;
/**
* Construct the security manager against an API key database
@@ -131,6 +129,5 @@ public class DMaaPAuthenticatorImpl<K extends NsaApiKey> implements DMaaPAuthent
{
this.fAuthenticators.add(a);
}
-
}
diff --git a/src/main/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java b/src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java
index c2a41cc..e9f28ae 100644
--- a/src/main/java/com/att/nsa/cambria/security/impl/DMaaPMechIdAuthenticator.java
+++ b/src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java
@@ -8,35 +8,33 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.security.impl;
+package com.att.dmf.mr.security.impl;
import javax.servlet.http.HttpServletRequest;
-
-
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.security.DMaaPAuthenticator;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.authenticators.MechIdAuthenticator;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.security.DMaaPAuthenticator;
//import com.att.nsa.security.db.NsaApiDb;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.authenticators.MechIdAuthenticator;
/**
* An authenticator for AT&T MechIds.
*
- * @author author
+ * @author peter
*
* @param <K>
*/
diff --git a/src/main/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java b/src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java
index fdcf7c1..a26c9e7 100644
--- a/src/main/java/com/att/nsa/cambria/security/impl/DMaaPOriginalUebAuthenticator.java
+++ b/src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java
@@ -8,27 +8,29 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.security.impl;
+package com.att.dmf.mr.security.impl;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
+
import javax.servlet.http.HttpServletRequest;
+
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.security.DMaaPAuthenticator;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.security.DMaaPAuthenticator;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.drumlin.till.data.sha1HmacSigner;
import com.att.nsa.security.NsaApiKey;
@@ -38,7 +40,7 @@ import com.att.nsa.security.db.NsaApiDb;
* This authenticator handles an AWS-like authentication, originally used by the
* Cambria server (the API server for UEB).
*
- * @author author
+ * @author peter
*
* @param <K>
*/
diff --git a/src/main/java/com/att/nsa/cambria/service/AdminService.java b/src/main/java/com/att/dmf/mr/service/AdminService.java
index 6f0d9cf..aaf7c0b 100644
--- a/src/main/java/com/att/nsa/cambria/service/AdminService.java
+++ b/src/main/java/com/att/dmf/mr/service/AdminService.java
@@ -8,29 +8,29 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
import java.io.IOException;
import org.json.JSONException;
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPContext;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
/**
- * @author author
+ * @author muzainulhaque.qazi
*
*/
public interface AdminService {
diff --git a/src/main/java/com/att/nsa/cambria/service/ApiKeysService.java b/src/main/java/com/att/dmf/mr/service/ApiKeysService.java
index 6fc9c0d..57fc8be 100644
--- a/src/main/java/com/att/nsa/cambria/service/ApiKeysService.java
+++ b/src/main/java/com/att/dmf/mr/service/ApiKeysService.java
@@ -8,25 +8,25 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
import java.io.IOException;
-import com.att.nsa.cambria.beans.ApiKeyBean;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.dmf.mr.beans.ApiKeyBean;
+import com.att.dmf.mr.beans.DMaaPContext;
import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
/**
diff --git a/src/main/java/com/att/nsa/cambria/service/EventsService.java b/src/main/java/com/att/dmf/mr/service/EventsService.java
index 477538d..2f89bd2 100644
--- a/src/main/java/com/att/nsa/cambria/service/EventsService.java
+++ b/src/main/java/com/att/dmf/mr/service/EventsService.java
@@ -8,33 +8,33 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
import java.io.IOException;
import java.io.InputStream;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public interface EventsService {
diff --git a/src/main/java/com/att/nsa/cambria/service/MMService.java b/src/main/java/com/att/dmf/mr/service/MMService.java
index 5c14674..ae01bbf 100644
--- a/src/main/java/com/att/nsa/cambria/service/MMService.java
+++ b/src/main/java/com/att/dmf/mr/service/MMService.java
@@ -8,28 +8,26 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
import java.io.IOException;
import java.io.InputStream;
-import org.json.JSONException;
-
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
@@ -37,7 +35,7 @@ import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
/**
* Contains the logic for executing calls to the Mirror Maker agent tool.
*
- * @author <a href="mailto:"></a>
+ * @author <a href="mailto:kawsar.jahan@att.com">Kawsar Jahan</a>
*
* @since May 25, 2016
*/
diff --git a/src/main/java/com/att/nsa/cambria/service/MetricsService.java b/src/main/java/com/att/dmf/mr/service/MetricsService.java
index 6b11682..b6cc60d 100644
--- a/src/main/java/com/att/nsa/cambria/service/MetricsService.java
+++ b/src/main/java/com/att/dmf/mr/service/MetricsService.java
@@ -8,31 +8,31 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
/**
- * @author
+ * @author amol.ramesh.dalne
*
*/
import java.io.IOException;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.beans.DMaaPContext;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public interface MetricsService {
diff --git a/src/main/java/com/att/nsa/cambria/service/TopicService.java b/src/main/java/com/att/dmf/mr/service/TopicService.java
index 9ed39af..b42d9c9 100644
--- a/src/main/java/com/att/nsa/cambria/service/TopicService.java
+++ b/src/main/java/com/att/dmf/mr/service/TopicService.java
@@ -8,34 +8,34 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
import java.io.IOException;
import org.json.JSONException;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.beans.TopicBean;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.beans.TopicBean;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
/**
* interface provide all the topic related operations
*
- * @author author
+ * @author anowarul.islam
*
*/
public interface TopicService {
diff --git a/src/main/java/com/att/nsa/cambria/service/TransactionService.java b/src/main/java/com/att/dmf/mr/service/TransactionService.java
index 109b2c8..f2763a6 100644
--- a/src/main/java/com/att/nsa/cambria/service/TransactionService.java
+++ b/src/main/java/com/att/dmf/mr/service/TransactionService.java
@@ -8,29 +8,29 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
import java.io.IOException;
import com.att.aft.dme2.internal.jettison.json.JSONException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.transaction.TransactionObj;
import com.att.nsa.configs.ConfigDbException;
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
public interface TransactionService {
diff --git a/src/main/java/com/att/nsa/cambria/service/UIService.java b/src/main/java/com/att/dmf/mr/service/UIService.java
index b6555fe..c62b3ff 100644
--- a/src/main/java/com/att/nsa/cambria/service/UIService.java
+++ b/src/main/java/com/att/dmf/mr/service/UIService.java
@@ -1,3 +1,6 @@
+/**
+ *
+ */
/*******************************************************************************
* ============LICENSE_START=======================================================
* org.onap.dmaap
@@ -8,31 +11,27 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-/**
- *
- */
-package com.att.nsa.cambria.service;
+package com.att.dmf.mr.service;
import java.io.IOException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.configs.ConfigDbException;
-
-import kafka.common.TopicExistsException;
+import org.apache.kafka.common.errors.TopicExistsException;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.nsa.configs.ConfigDbException;
/**
- * @author author
+ * @author muzainulhaque.qazi
*
*/
public interface UIService {
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/AdminServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java
index 8b651f5..110970f 100644
--- a/src/main/java/com/att/nsa/cambria/service/impl/AdminServiceImpl.java
+++ b/src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
import java.io.IOException;
import java.util.Collection;
@@ -30,21 +30,22 @@ import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.stereotype.Component;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.ConsumerFactory;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
+import com.att.dmf.mr.service.AdminService;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import com.att.nsa.cambria.service.AdminService;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.limits.Blacklist;
import com.att.nsa.security.NsaApiKey;
import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+//import com.att.sa.highlandPark.util.HpJsonUtil;
/**
- * @author author
+ * @author muzainulhaque.qazi
*
*/
@Component
@@ -125,7 +126,26 @@ public class AdminServiceImpl implements AdminService {
adminAuthenticate ( dMaaPContext );
DMaaPResponseBuilder.respondOk ( dMaaPContext,
- new JSONObject().put ( "blacklist", setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) );
+ new JSONObject().put ( "blacklist",
+ setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) );
+ }
+
+ public static JSONArray setToJsonArray ( Set<?> fields )
+ {
+ return collectionToJsonArray ( fields );
+ }
+
+ public static JSONArray collectionToJsonArray ( Collection<?> fields )
+ {
+ final JSONArray a = new JSONArray ();
+ if ( fields != null )
+ {
+ for ( Object o : fields )
+ {
+ a.put ( o );
+ }
+ }
+ return a;
}
/**
@@ -161,28 +181,10 @@ public class AdminServiceImpl implements AdminService {
{
final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dMaaPContext);
- if ( (user == null || !user.getKey ().equals ( "admin" )) )
+ if ( user == null || !user.getKey ().equals ( "admin" ) )
{
throw new AccessDeniedException ();
}
}
-
- public static JSONArray setToJsonArray ( Set<?> fields )
- {
- return collectionToJsonArray ( fields );
- }
-
- public static JSONArray collectionToJsonArray ( Collection<?> fields )
- {
- final JSONArray a = new JSONArray ();
- if ( fields != null )
- {
- for ( Object o : fields )
- {
- a.put ( o );
- }
- }
- return a;
- }
}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java
index 637d2fb..c818f88 100644
--- a/src/main/java/com/att/nsa/cambria/service/impl/ApiKeysServiceImpl.java
+++ b/src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java
@@ -8,36 +8,35 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
import java.io.IOException;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.stereotype.Service;
-//import com.att.nsa.apiServer.util.Emailer;
-import com.att.nsa.cambria.utils.Emailer;
-import com.att.nsa.cambria.beans.ApiKeyBean;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import com.att.nsa.cambria.service.ApiKeysService;
-import com.att.nsa.cambria.utils.ConfigurationReader;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.beans.ApiKeyBean;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
+import com.att.dmf.mr.service.ApiKeysService;
+import com.att.dmf.mr.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.utils.Emailer;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
import com.att.nsa.security.NsaApiKey;
@@ -50,7 +49,7 @@ import com.att.nsa.security.db.simple.NsaSimpleApiKey;
* Implementation of the ApiKeysService, this will provide the below operations,
* getAllApiKeys, getApiKey, createApiKey, updateApiKey, deleteApiKey
*
- * @author author
+ * @author nilanjana.maity
*/
@Service
public class ApiKeysServiceImpl implements ApiKeysService {
@@ -135,7 +134,6 @@ public class ApiKeysServiceImpl implements ApiKeysService {
log.debug("TopicService: : createApiKey....");
-
String contactEmail = nsaApiKey.getEmail();
final boolean emailProvided = contactEmail != null && contactEmail.length() > 0 && contactEmail.indexOf("@") > 1 ;
String kSetting_AllowAnonymousKeys= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"apiKeys.allowAnonymous");
@@ -148,15 +146,12 @@ public class ApiKeysServiceImpl implements ApiKeysService {
return;
}
-
-
-
+
final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
String apiKey = nsaApiKey.getKey();
String sharedSecret = nsaApiKey.getSharedSecret();
final NsaSimpleApiKey key = apiKeyDb.createApiKey(apiKey,
sharedSecret);
-
if (null != key) {
if (null != nsaApiKey.getEmail()) {
@@ -170,6 +165,7 @@ public class ApiKeysServiceImpl implements ApiKeysService {
log.debug("=======ApiKeysServiceImpl: createApiKey : saving api key : "
+ key.toString() + "=====");
apiKeyDb.saveApiKey(key);
+ // System.out.println("here4");
// email out the secret to validate the email address
if ( emailProvided )
{
@@ -200,7 +196,7 @@ public class ApiKeysServiceImpl implements ApiKeysService {
);
DMaaPResponseBuilder.respondOk(dmaapContext,
o);
- /*o.put("secret", "Emailed to " + contactEmail + ".");
+ /*o.put("secret", "Emailed to " + contactEmail + ".");
DMaaPResponseBuilder.respondOk(dmaapContext,
o); */
return;
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImpl.java b/src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java
index cdbf57b..a847f5f 100644
--- a/src/main/java/com/att/nsa/cambria/service/impl/BaseTransactionDbImpl.java
+++ b/src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java
@@ -8,26 +8,26 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
import java.util.Set;
import java.util.TreeSet;
-import com.att.nsa.cambria.transaction.DMaaPTransactionFactory;
-import com.att.nsa.cambria.transaction.DMaaPTransactionObj;
-import com.att.nsa.cambria.transaction.DMaaPTransactionObjDB;
-import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.dmf.mr.transaction.DMaaPTransactionFactory;
+import com.att.dmf.mr.transaction.DMaaPTransactionObj;
+import com.att.dmf.mr.transaction.DMaaPTransactionObjDB;
+import com.att.dmf.mr.transaction.TransactionObj;
import com.att.nsa.configs.ConfigDb;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.configs.ConfigPath;
@@ -35,7 +35,7 @@ import com.att.nsa.configs.ConfigPath;
/**
* Persistent storage for Transaction objects built over an abstract config db.
*
- * @author author
+ * @author anowarul.islam
*
* @param <K>
*/
diff --git a/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java
new file mode 100644
index 0000000..4ca6446
--- /dev/null
+++ b/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java
@@ -0,0 +1,867 @@
+/*******************************************************************************
+ * ============LICENSE_START=======================================================
+ * org.onap.dmaap
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+*
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ *
+ *******************************************************************************/
+package com.att.dmf.mr.service.impl;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.Properties;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.http.HttpStatus;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.errors.TopicExistsException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Service;
+
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.ConsumerFactory;
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.backends.Publisher;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.backends.kafka.KafkaLiveLockAvoider2;
+import com.att.dmf.mr.beans.DMaaPCambriaLimiter;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.exception.DMaaPAccessDeniedException;
+import com.att.dmf.mr.exception.DMaaPErrorMessages;
+import com.att.dmf.mr.exception.DMaaPResponseCode;
+import com.att.dmf.mr.exception.ErrorResponse;
+
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.resources.CambriaEventSet;
+import com.att.dmf.mr.resources.CambriaOutboundEventStream;
+import com.att.dmf.mr.security.DMaaPAAFAuthenticator;
+import com.att.dmf.mr.security.DMaaPAAFAuthenticatorImpl;
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
+import com.att.dmf.mr.service.EventsService;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.utils.Utils;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.att.nsa.configs.ConfigDbException;
+import com.att.nsa.drumlin.service.standards.MimeTypes;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.att.nsa.security.NsaApiKey;
+import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
+import com.att.nsa.util.rrConvertor;
+
+/**
+ * This class provides the functinality to publish and subscribe message to
+ * kafka
+ *
+ * @author Ramkumar Sembaiyam
+ *
+ */
+@Service
+public class EventsServiceImpl implements EventsService {
+ // private static final Logger LOG =
+ // Logger.getLogger(EventsServiceImpl.class);
+ private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
+
+ private static final String BATCH_LENGTH = "event.batch.length";
+ private static final String TRANSFER_ENCODING = "Transfer-Encoding";
+ @Autowired
+ private DMaaPErrorMessages errorMessages;
+
+ //@Autowired
+ //KafkaLiveLockAvoider2 kafkaLiveLockAvoider;
+
+ // @Value("${metrics.send.cambria.topic}")
+ // private String metricsTopic;
+
+ public DMaaPErrorMessages getErrorMessages() {
+ return errorMessages;
+ }
+
+ public void setErrorMessages(DMaaPErrorMessages errorMessages) {
+ this.errorMessages = errorMessages;
+ }
+
+ /**
+ * @param ctx
+ * @param topic
+ * @param consumerGroup
+ * @param clientId
+ * @throws ConfigDbException,
+ * TopicExistsException, AccessDeniedException,
+ * UnavailableException, CambriaApiException, IOException
+ *
+ *
+ */
+ @Override
+ public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
+ throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
+ CambriaApiException, IOException, DMaaPAccessDeniedException {
+ final long startTime = System.currentTimeMillis();
+ final HttpServletRequest req = ctx.getRequest();
+ //System.out.println(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"+kafkaLiveLockAvoider);
+ boolean isAAFTopic = false;
+ // was this host blacklisted?
+ final String remoteAddr = Utils.getRemoteAddress(ctx);
+ if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+
+ int limit = CambriaConstants.kNoLimit;
+ if (req.getParameter("limit") != null) {
+ limit = Integer.parseInt(req.getParameter("limit"));
+ }
+
+ int timeoutMs = CambriaConstants.kNoTimeout;
+ String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout");
+ if (strtimeoutMS != null)
+ timeoutMs = Integer.parseInt(strtimeoutMS);
+ // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout",
+ // CambriaConstants.kNoTimeout);
+ if (req.getParameter("timeout") != null) {
+ timeoutMs = Integer.parseInt(req.getParameter("timeout"));
+ }
+
+ // By default no filter is applied if filter is not passed as a
+ // parameter in the request URI
+ String topicFilter = CambriaConstants.kNoFilter;
+ if (null != req.getParameter("filter")) {
+ topicFilter = req.getParameter("filter");
+ }
+ // pretty to print the messaages in new line
+ String prettyval = "0";
+ String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty");
+ if (null != strPretty)
+ prettyval = strPretty;
+
+ String metaval = "0";
+ String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta");
+ if (null != strmeta)
+ metaval = strmeta;
+
+ final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval);
+ // withMeta to print offset along with message
+ final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval);
+
+ final LogWrap logger = new LogWrap(topic, consumerGroup, clientId);
+ logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter + " from Remote host "+ctx.getRequest().getRemoteHost());
+
+ // is this user allowed to read this topic?
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+ final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+
+ if (metatopic == null) {
+ // no such topic.
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
+ DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
+ errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()),
+ topic, null, null, consumerGroup + "/" + clientId, ctx.getRequest().getRemoteHost());
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "metrics.send.cambria.topic");
+ if (null == metricTopicname)
+ metricTopicname = "msgrtr.apinode.metrics.dmaap";
+
+ if (null == ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) {
+ if (null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))) {
+ // check permissions
+ metatopic.checkUserRead(user);
+ }
+ }
+ // if headers are not provided then user will be null
+ if (user == null && null != ctx.getRequest().getHeader("Authorization")) {
+ // the topic name will be sent by the client
+ // String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"sub";
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ String permission = aaf.aafPermissionString(topic, "sub");
+ if (!aaf.aafAuthentication(ctx.getRequest(), permission)) {
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getNotPermitted1() + " read " + errorMessages.getNotPermitted2() + topic + " on "
+ + permission,
+ null, Utils.getFormattedDate(new Date()), topic, null, null, consumerGroup + "/" + clientId,
+ ctx.getRequest().getRemoteHost());
+ LOG.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+
+ }
+ isAAFTopic = true;
+ }
+ final long elapsedMs1 = System.currentTimeMillis() - startTime;
+ logger.info("Time taken in getEvents Authorization " + elapsedMs1 + " ms for " + topic + " " + consumerGroup
+ + " " + clientId);
+ Consumer c = null;
+ // String localclientId = clientId;
+ String lhostId = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "clusterhostid");
+ if (null == lhostId) {
+ try {
+ lhostId = InetAddress.getLocalHost().getCanonicalHostName();
+ } catch (UnknownHostException e) {
+ LOG.info("Unknown Host Exception error occured while getting getting hostid");
+ }
+
+ }
+ CambriaOutboundEventStream coes = null;
+ try {
+ final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+ final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter();
+ rl.onCall(topic, consumerGroup, clientId, ctx.getRequest().getRemoteHost());
+ c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,
+ ctx.getRequest().getRemoteHost());
+ coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs)
+ .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
+ coes.setDmaapContext(ctx);
+ coes.setTopic(metatopic);
+ if (isTransEnabled() || isAAFTopic) {
+ coes.setTransEnabled(true);
+ } else {
+ coes.setTransEnabled(false);
+ }
+ coes.setTopicStyle(isAAFTopic);
+ final long elapsedMs2 = System.currentTimeMillis() - startTime;
+ logger.info("Time taken in getEvents getConsumerFor " + elapsedMs2 + " ms for " + topic + " "
+ + consumerGroup + " " + clientId);
+
+ DMaaPResponseBuilder.setNoCacheHeadings(ctx);
+
+ DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes);
+ // No IOException thrown during respondOkWithStream, so commit the
+ // new offsets to all the brokers
+ c.commitOffsets();
+ final int sent = coes.getSentCount();
+
+ metricsSet.consumeTick(sent);
+ rl.onSend(topic, consumerGroup, clientId, sent);
+ final long elapsedMs = System.currentTimeMillis() - startTime;
+ logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + c.getOffset() + " for "
+ + topic + " " + consumerGroup + " " + clientId + " on to the server "
+ + ctx.getRequest().getRemoteHost());
+
+ } catch (UnavailableException excp) {
+ logger.warn(excp.getMessage(), excp);
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+ DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+ errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
+ null, null, consumerGroup + "-" + clientId, ctx.getRequest().getRemoteHost());
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ } catch (java.util.ConcurrentModificationException excp1) {
+ LOG.info(excp1.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId + " from Remote"+ctx.getRequest().getRemoteHost());
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_CONFLICT,
+ DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(),
+ "Couldn't respond to client, possible of consumer requests from more than one server. Please contact MR team if you see this issue occurs continously", null,
+ Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
+ logger.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ } catch (CambriaApiException excp) {
+ LOG.info(excp.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId);
+
+ throw excp;
+ }
+ catch (Exception excp) {
+ // System.out.println(excp + "------------------ " + topic+"
+ // "+consumerGroup+" "+clientId);
+
+ logger.info("Couldn't respond to client, closing cambria consumer " + " " + topic + " " + consumerGroup
+ + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE + " ****** " + excp);
+
+ ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
+
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
+ DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
+ "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null,
+ Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
+ logger.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ } finally {
+ coes = null;
+ // If no cache, close the consumer now that we're done with it.
+ boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
+ String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ ConsumerFactory.kSetting_EnableCache);
+ if (null != strkSetting_EnableCache)
+ kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
+ // if
+ // (!ctx.getConfigReader().getSettings().getBoolean(ConsumerFactory.kSetting_EnableCache,
+ // ConsumerFactory.kDefault_IsCacheEnabled) && (c != null)) {
+ if (!kSetting_EnableCache && (c != null)) {
+ try {
+ c.close();
+ } catch (Exception e) {
+ logger.info("***Exception occured in getEvents finaly block while closing the consumer " + " "
+ + topic + " " + consumerGroup + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE
+ + " " + e);
+ }
+ }
+ }
+ }
+
+ /**
+ * @throws missingReqdSetting
+ *
+ */
+ @Override
+ public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
+ final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
+ CambriaApiException, IOException, missingReqdSetting, DMaaPAccessDeniedException {
+
+ // is this user allowed to write to this topic?
+ final long startMs = System.currentTimeMillis();
+ final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
+ final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
+ boolean isAAFTopic = false;
+
+ // was this host blacklisted?
+ final String remoteAddr = Utils.getRemoteAddress(ctx);
+
+ if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+
+ String topicNameStd = null;
+
+ // topicNameStd=
+ // ctx.getConfigReader().getSettings().getString("enforced.topic.name.AAF");
+ topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
+ "enforced.topic.name.AAF");
+ String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "metrics.send.cambria.topic");
+ if (null == metricTopicname)
+ metricTopicname = "msgrtr.apinode.metrics.dmaap";
+ boolean topicNameEnforced = false;
+ if (null != topicNameStd && topic.startsWith(topicNameStd)) {
+ topicNameEnforced = true;
+ }
+
+ // Here check if the user has rights to publish on the topic
+ // ( This will be called when no auth is added or when UEB API Key
+ // Authentication is used)
+ // checkUserWrite(user) method will throw an error when there is no Auth
+ // header added or when the
+ // user has no publish rights
+
+ if (null != metatopic && null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))
+ && null == ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) {
+ metatopic.checkUserWrite(user);
+ }
+
+ // if headers are not provided then user will be null
+ if (topicNameEnforced || (user == null && null != ctx.getRequest().getHeader("Authorization")
+ && !topic.equalsIgnoreCase(metricTopicname))) {
+ // the topic name will be sent by the client
+ // String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"pub";
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ String permission = aaf.aafPermissionString(topic, "pub");
+ if (!aaf.aafAuthentication(ctx.getRequest(), permission)) {
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getNotPermitted1() + " publish " + errorMessages.getNotPermitted2() + topic
+ + " on " + permission,
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+ }
+ isAAFTopic = true;
+ }
+
+ final HttpServletRequest req = ctx.getRequest();
+
+ // check for chunked input
+ boolean chunked = false;
+ if (null != req.getHeader(TRANSFER_ENCODING)) {
+ chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
+ }
+ // get the media type, or set it to a generic value if it wasn't
+ // provided
+ String mediaType = req.getContentType();
+ if (mediaType == null || mediaType.length() == 0) {
+ mediaType = MimeTypes.kAppGenericBinary;
+ }
+
+ if (mediaType.contains("charset=UTF-8")) {
+ mediaType = mediaType.replace("; charset=UTF-8", "").trim();
+ }
+
+ String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "transidUEBtopicreqd");
+ boolean istransidreqd = false;
+ if (null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) {
+ istransidreqd = true;
+ }
+
+ if (isAAFTopic || istransidreqd) {
+ pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
+ } else {
+ pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
+ }
+ final long endMs = System.currentTimeMillis();
+ final long totalMs = endMs - startMs;
+
+ LOG.info("Overall Response time - Published " + " msgs in " + totalMs + " ms for topic " + topic);
+
+ }
+
+ /**
+ *
+ * @param ctx
+ * @param topic
+ * @param msg
+ * @param defaultPartition
+ * @param chunked
+ * @param mediaType
+ * @throws ConfigDbException
+ * @throws AccessDeniedException
+ * @throws TopicExistsException
+ * @throws CambriaApiException
+ * @throws IOException
+ */
+ private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked,
+ String mediaType)
+ throws ConfigDbException, AccessDeniedException, TopicExistsException, CambriaApiException, IOException {
+ final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+ // setup the event set
+ final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
+
+ // start processing, building a batch to push to the backend
+ final long startMs = System.currentTimeMillis();
+ long count = 0;
+ long maxEventBatch = 1024 * 16;
+ String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
+ if (null != batchlen)
+ maxEventBatch = Long.parseLong(batchlen);
+ // long maxEventBatch =
+ // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+ final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+ // final ArrayList<KeyedMessage<String, String>> kms = new
+ // ArrayList<KeyedMessage<String, String>>();
+ final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
+ try {
+ // for each message...
+ Publisher.message m = null;
+ while ((m = events.next()) != null) {
+ // add the message to the batch
+ batch.add(m);
+ // final KeyedMessage<String, String> data = new
+ // KeyedMessage<String, String>(topic, m.getKey(),
+ // m.getMessage());
+ // kms.add(data);
+ final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
+ m.getMessage());
+
+ pms.add(data);
+ // check if the batch is full
+ final int sizeNow = batch.size();
+ if (sizeNow > maxEventBatch) {
+ // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+ // kms);
+ // kms.clear();
+ ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+ pms.clear();
+ batch.clear();
+ metricsSet.publishTick(sizeNow);
+ count += sizeNow;
+ }
+ }
+
+ // send the pending batch
+ final int sizeNow = batch.size();
+ if (sizeNow > 0) {
+ // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+ // kms);
+ // kms.clear();
+ ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+ pms.clear();
+ batch.clear();
+ metricsSet.publishTick(sizeNow);
+ count += sizeNow;
+ }
+
+ final long endMs = System.currentTimeMillis();
+ final long totalMs = endMs - startMs;
+
+ LOG.info("Published " + count + " msgs in " + totalMs + " ms for topic " + topic + " from server "
+ + ctx.getRequest().getRemoteHost());
+
+ // build a responseP
+ final JSONObject response = new JSONObject();
+ response.put("count", count);
+ response.put("serverTimeMs", totalMs);
+ DMaaPResponseBuilder.respondOk(ctx, response);
+
+ } catch (Exception excp) {
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg = null;
+ if (excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+
+ }
+ ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count
+ + "." + errorMsg,
+ null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null,
+ null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ }
+ }
+
+ /**
+ *
+ * @param ctx
+ * @param inputStream
+ * @param topic
+ * @param partitionKey
+ * @param requestTime
+ * @param chunked
+ * @param mediaType
+ * @throws ConfigDbException
+ * @throws AccessDeniedException
+ * @throws TopicExistsException
+ * @throws IOException
+ * @throws CambriaApiException
+ */
+ private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
+ final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
+ throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException, CambriaApiException {
+
+ final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
+
+ // setup the event set
+ final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
+
+ // start processing, building a batch to push to the backend
+ final long startMs = System.currentTimeMillis();
+ long count = 0;
+ long maxEventBatch = 1024 * 16;
+ String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
+ if (null != evenlen)
+ maxEventBatch = Long.parseLong(evenlen);
+ // final long maxEventBatch =
+ // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
+ final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
+ // final ArrayList<KeyedMessage<String, String>> kms = new
+ // ArrayList<KeyedMessage<String, String>>();
+ final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
+ Publisher.message m = null;
+ int messageSequence = 1;
+ Long batchId = 1L;
+ final boolean transactionEnabled = true;
+ int publishBatchCount = 0;
+ SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
+
+ // LOG.warn("Batch Start Id: " +
+ // Utils.getFromattedBatchSequenceId(batchId));
+ try {
+ // for each message...
+ batchId = DMaaPContext.getBatchID();
+
+ String responseTransactionId = null;
+
+ while ((m = events.next()) != null) {
+
+ // LOG.warn("Batch Start Id: " +
+ // Utils.getFromattedBatchSequenceId(batchId));
+
+ addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
+ transactionEnabled);
+ messageSequence++;
+
+ // add the message to the batch
+ batch.add(m);
+
+ responseTransactionId = m.getLogDetails().getTransactionId();
+
+ JSONObject jsonObject = new JSONObject();
+ jsonObject.put("msgWrapMR", m.getMessage());
+ jsonObject.put("transactionId", responseTransactionId);
+ // final KeyedMessage<String, String> data = new
+ // KeyedMessage<String, String>(topic, m.getKey(),
+ // jsonObject.toString());
+ // kms.add(data);
+ final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
+ m.getMessage());
+
+ pms.add(data);
+ // check if the batch is full
+ final int sizeNow = batch.size();
+ if (sizeNow >= maxEventBatch) {
+ String startTime = sdf.format(new Date());
+ LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+ + batchId + "]");
+ try {
+ // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+ // kms);
+ ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+ // transactionLogs(batch);
+ for (message msg : batch) {
+ LogDetails logDetails = msg.getLogDetails();
+ LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+ }
+ } catch (Exception excp) {
+
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg = null;
+ if (excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+ }
+ ErrorResponse errRes = new ErrorResponse(status,
+ DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+ + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ pms.clear();
+ batch.clear();
+ metricsSet.publishTick(sizeNow);
+ publishBatchCount = sizeNow;
+ count += sizeNow;
+ // batchId++;
+ String endTime = sdf.format(new Date());
+ LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
+ + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
+ + ",Batch End Time=" + endTime + "]");
+ batchId = DMaaPContext.getBatchID();
+ }
+ }
+
+ // send the pending batch
+ final int sizeNow = batch.size();
+ if (sizeNow > 0) {
+ String startTime = sdf.format(new Date());
+ LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+ + batchId + "]");
+ try {
+ // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
+ // kms);
+ ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+ // transactionLogs(batch);
+ for (message msg : batch) {
+ LogDetails logDetails = msg.getLogDetails();
+ LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
+ }
+ } catch (Exception excp) {
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg = null;
+ if (excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+ }
+
+ ErrorResponse errRes = new ErrorResponse(status,
+ DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+ + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ pms.clear();
+ metricsSet.publishTick(sizeNow);
+ count += sizeNow;
+ // batchId++;
+ String endTime = sdf.format(new Date());
+ publishBatchCount = sizeNow;
+ LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId
+ + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time="
+ + endTime + "]");
+ }
+
+ final long endMs = System.currentTimeMillis();
+ final long totalMs = endMs - startMs;
+
+ LOG.info("Published " + count + " msgs(with transaction id) in " + totalMs + " ms for topic " + topic);
+
+ if (null != responseTransactionId) {
+ ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
+ }
+
+ // build a response
+ final JSONObject response = new JSONObject();
+ response.put("count", count);
+ response.put("serverTimeMs", totalMs);
+ DMaaPResponseBuilder.respondOk(ctx, response);
+
+ } catch (Exception excp) {
+ int status = HttpStatus.SC_NOT_FOUND;
+ String errorMsg = null;
+ if (excp instanceof CambriaApiException) {
+ status = ((CambriaApiException) excp).getStatus();
+ JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
+ JSONObject errObject = new JSONObject(jsonTokener);
+ errorMsg = (String) errObject.get("message");
+ }
+
+ ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
+ "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
+ + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
+ null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
+ ctx.getRequest().getRemoteHost(), null, null);
+ LOG.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+ }
+ }
+
+ /**
+ *
+ * @param msg
+ * @param topic
+ * @param request
+ * @param messageCreationTime
+ * @param messageSequence
+ * @param batchId
+ * @param transactionEnabled
+ */
+ private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
+ final String messageCreationTime, final int messageSequence, final Long batchId,
+ final boolean transactionEnabled) {
+ LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
+ transactionEnabled);
+ logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
+ msg.setTransactionEnabled(transactionEnabled);
+ msg.setLogDetails(logDetails);
+ }
+
+ /**
+ *
+ * @author anowarul.islam
+ *
+ */
+ private static class LogWrap {
+ private final String fId;
+
+ /**
+ * constructor initialization
+ *
+ * @param topic
+ * @param cgroup
+ * @param cid
+ */
+ public LogWrap(String topic, String cgroup, String cid) {
+ fId = "[" + topic + "/" + cgroup + "/" + cid + "] ";
+ }
+
+ /**
+ *
+ * @param msg
+ */
+ public void info(String msg) {
+ LOG.info(fId + msg);
+ }
+
+ /**
+ *
+ * @param msg
+ * @param t
+ */
+ public void warn(String msg, Exception t) {
+ LOG.warn(fId + msg, t);
+ }
+
+ }
+
+ public boolean isTransEnabled() {
+ String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "transidUEBtopicreqd");
+ boolean istransidreqd = false;
+ if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true"))) {
+ istransidreqd = true;
+ }
+
+ return istransidreqd;
+
+ }
+
+ private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
+ final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
+ LogDetails logDetails = new LogDetails();
+ logDetails.setTopicId(topicName);
+ logDetails.setMessageTimestamp(messageTimestamp);
+ logDetails.setPublisherId(Utils.getUserApiKey(request));
+ logDetails.setPublisherIp(request.getRemoteHost());
+ logDetails.setMessageBatchId(batchId);
+ logDetails.setMessageSequence(String.valueOf(messageSequence));
+ logDetails.setTransactionEnabled(transactionEnabled);
+ logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
+ logDetails.setServerIp(request.getLocalAddr());
+ return logDetails;
+ }
+
+ /*
+ * public String getMetricsTopic() { return metricsTopic; }
+ *
+ * public void setMetricsTopic(String metricsTopic) { this.metricsTopic =
+ * metricsTopic; }
+ */
+
+
+
+} \ No newline at end of file
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/MMServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java
index 04cc51c..da8cb16 100644
--- a/src/main/java/com/att/nsa/cambria/service/impl/MMServiceImpl.java
+++ b/src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@@ -34,9 +34,7 @@ import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.Context;
import org.apache.http.HttpStatus;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
+import org.apache.kafka.clients.producer.ProducerRecord;
import org.json.JSONObject;
import org.json.JSONTokener;
import org.springframework.beans.factory.annotation.Autowired;
@@ -44,39 +42,36 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.backends.Publisher;
-
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;
-import com.att.nsa.cambria.exception.DMaaPResponseCode;
-import com.att.nsa.cambria.exception.ErrorResponse;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.resources.CambriaEventSet;
-import com.att.nsa.cambria.resources.CambriaOutboundEventStream;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
-import com.att.nsa.cambria.service.MMService;
-import com.att.nsa.cambria.utils.ConfigurationReader;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
-import com.att.nsa.cambria.utils.Utils;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.Consumer;
+import com.att.dmf.mr.backends.ConsumerFactory;
+import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.backends.Publisher;
+import com.att.dmf.mr.backends.Publisher.message;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.beans.LogDetails;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.exception.DMaaPErrorMessages;
+import com.att.dmf.mr.exception.DMaaPResponseCode;
+import com.att.dmf.mr.exception.ErrorResponse;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.resources.CambriaEventSet;
+import com.att.dmf.mr.resources.CambriaOutboundEventStream;
+import com.att.dmf.mr.service.MMService;
+import com.att.dmf.mr.utils.ConfigurationReader;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.utils.Utils;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.drumlin.service.standards.MimeTypes;
import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-
import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
import com.att.nsa.util.rrConvertor;
-import kafka.producer.KeyedMessage;
+
@Service
public class MMServiceImpl implements MMService {
@@ -202,7 +197,7 @@ public class MMServiceImpl implements MMService {
try {
final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
- c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs);
+ c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,ctx.getRequest().getRemoteHost());
final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs)
.limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
@@ -214,7 +209,7 @@ public class MMServiceImpl implements MMService {
try {
coes.write(baos);
} catch (Exception ex) {
- LOG.error("Exception at subscribe(.......): " + ex);
+
}
c.commitOffsets();
@@ -223,7 +218,7 @@ public class MMServiceImpl implements MMService {
metricsSet.consumeTick(sent);
} catch (UnavailableException excp) {
- LOG.error(excp.toString());
+
ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
@@ -235,7 +230,6 @@ public class MMServiceImpl implements MMService {
throw excp;
} catch (Exception excp) {
- LOG.error(excp.toString());
ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
@@ -363,7 +357,8 @@ public class MMServiceImpl implements MMService {
// long maxEventBatch =
// ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
- final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+ final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
+ //final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
try {
// for each message...
@@ -371,14 +366,13 @@ public class MMServiceImpl implements MMService {
while ((m = events.next()) != null) {
// add the message to the batch
batch.add(m);
- final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
+ final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
m.getMessage());
- kms.add(data);
// check if the batch is full
final int sizeNow = batch.size();
if (sizeNow > maxEventBatch) {
- ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
- kms.clear();
+ ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+ pms.clear();
batch.clear();
metricsSet.publishTick(sizeNow);
count += sizeNow;
@@ -388,8 +382,8 @@ public class MMServiceImpl implements MMService {
// send the pending batch
final int sizeNow = batch.size();
if (sizeNow > 0) {
- ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
- kms.clear();
+ ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
+ pms.clear();
batch.clear();
metricsSet.publishTick(sizeNow);
count += sizeNow;
@@ -447,7 +441,7 @@ public class MMServiceImpl implements MMService {
maxEventBatch = Long.parseLong(evenlen);
final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
- final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
+ final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
Publisher.message m = null;
int messageSequence = 1;
@@ -481,9 +475,9 @@ public class MMServiceImpl implements MMService {
JSONObject jsonObject = new JSONObject();
jsonObject.put("message", m.getMessage());
jsonObject.put("transactionId", responseTransactionId);
- final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
- jsonObject.toString());
- kms.add(data);
+ final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
+ m.getMessage());
+ pms.add(data);
// check if the batch is full
final int sizeNow = batch.size();
@@ -492,7 +486,7 @@ public class MMServiceImpl implements MMService {
LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+ batchId + "]");
try {
- ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+ ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
// transactionLogs(batch);
for (message msg : batch) {
LogDetails logDetails = msg.getLogDetails();
@@ -517,7 +511,7 @@ public class MMServiceImpl implements MMService {
LOG.info(errRes.toString());
throw new CambriaApiException(errRes);
}
- kms.clear();
+ pms.clear();
batch.clear();
metricsSet.publishTick(sizeNow);
publishBatchCount = sizeNow;
@@ -538,7 +532,7 @@ public class MMServiceImpl implements MMService {
LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
+ batchId + "]");
try {
- ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
+ ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
// transactionLogs(batch);
for (message msg : batch) {
LogDetails logDetails = msg.getLogDetails();
@@ -563,7 +557,7 @@ public class MMServiceImpl implements MMService {
LOG.info(errRes.toString());
throw new CambriaApiException(errRes);
}
- kms.clear();
+ pms.clear();
metricsSet.publishTick(sizeNow);
count += sizeNow;
// batchId++;
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/MetricsServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java
index 1a1baf5..83b3770 100644
--- a/src/main/java/com/att/nsa/cambria/service/impl/MetricsServiceImpl.java
+++ b/src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java
@@ -8,31 +8,31 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
import java.io.IOException;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
import org.json.JSONObject;
import org.springframework.stereotype.Component;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.service.MetricsService;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.service.MetricsService;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import com.att.nsa.metrics.CdmMeasuredItem;
/**
@@ -42,7 +42,7 @@ import com.att.nsa.metrics.CdmMeasuredItem;
* get metrics details
*
*
- * @author author
+ * @author nilanjana.maity
*
*
*/
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/TopicServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java
index bfd005e..01ed1cc 100644
--- a/src/main/java/com/att/nsa/cambria/service/impl/TopicServiceImpl.java
+++ b/src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java
@@ -1,3 +1,6 @@
+/**
+ *
+ */
/*******************************************************************************
* ============LICENSE_START=======================================================
* org.onap.dmaap
@@ -8,27 +11,22 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-/**
- *
- */
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
import java.io.IOException;
import org.apache.http.HttpStatus;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
@@ -36,46 +34,51 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;
-import com.att.nsa.cambria.beans.TopicBean;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;
-import com.att.nsa.cambria.exception.DMaaPResponseCode;
-import com.att.nsa.cambria.exception.ErrorResponse;
-import com.att.nsa.cambria.metabroker.Broker;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import com.att.nsa.cambria.service.TopicService;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import com.att.dmf.mr.beans.TopicBean;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.exception.DMaaPAccessDeniedException;
+import com.att.dmf.mr.exception.DMaaPErrorMessages;
+import com.att.dmf.mr.exception.DMaaPResponseCode;
+import com.att.dmf.mr.exception.ErrorResponse;
+import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
+import com.att.dmf.mr.metabroker.Broker1;
+//import com.att.dmf.mr.metabroker.Broker1;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.security.DMaaPAAFAuthenticator;
+import com.att.dmf.mr.security.DMaaPAAFAuthenticatorImpl;
+import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
+import com.att.dmf.mr.service.TopicService;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.security.NsaAcl;
import com.att.nsa.security.NsaApiKey;
import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
/**
- * @author author
+ * @author muzainulhaque.qazi
*
*/
@Service
public class TopicServiceImpl implements TopicService {
- //private static final Logger LOGGER = Logger.getLogger(TopicServiceImpl.class);
+ // private static final Logger LOGGER =
+ // Logger.getLogger(TopicServiceImpl.class);
private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicServiceImpl.class);
@Autowired
private DMaaPErrorMessages errorMessages;
-
-
-
- //@Value("${msgRtr.topicfactory.aaf}")
- //private String mrFactory;
-
-
+
+ // @Value("${msgRtr.topicfactory.aaf}")
+ // private String mrFactory;
+
+ public DMaaPErrorMessages getErrorMessages() {
+ return errorMessages;
+ }
+
public void setErrorMessages(DMaaPErrorMessages errorMessages) {
this.errorMessages = errorMessages;
}
@@ -89,7 +92,6 @@ public class TopicServiceImpl implements TopicService {
*/
@Override
public void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
-
LOGGER.info("Fetching list of all the topics.");
JSONObject json = new JSONObject();
@@ -123,7 +125,7 @@ public class TopicServiceImpl implements TopicService {
for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
JSONObject obj = new JSONObject();
obj.put("topicName", topic.getName());
- //obj.put("description", topic.getDescription());
+ // obj.put("description", topic.getDescription());
obj.put("owner", topic.getOwner());
obj.put("txenabled", topic.isTransactionEnabled());
topicsList.put(obj);
@@ -136,7 +138,6 @@ public class TopicServiceImpl implements TopicService {
}
-
/**
* @param dmaapContext
* @param topicName
@@ -157,24 +158,21 @@ public class TopicServiceImpl implements TopicService {
}
JSONObject o = new JSONObject();
- o.put ( "name", t.getName () );
- o.put ( "description", t.getDescription () );
-
- if (null!=t.getOwners ())
- o.put ( "owner", t.getOwners ().iterator ().next () );
- if(null!=t.getReaderAcl ())
- {
- o.put ( "readerAcl", aclToJson ( t.getReaderAcl () ) );
- }
- if(null!=t.getWriterAcl ())
- o.put ( "writerAcl", aclToJson ( t.getWriterAcl () ) );
-
+ o.put("name", t.getName());
+ o.put("description", t.getDescription());
+
+ if (null != t.getOwners())
+ o.put("owner", t.getOwners().iterator().next());
+ if (null != t.getReaderAcl())
+ o.put("readerAcl", aclToJson(t.getReaderAcl()));
+ if (null != t.getWriterAcl())
+ o.put("writerAcl", aclToJson(t.getWriterAcl()));
+
LOGGER.info("Returning details of topic " + topicName);
DMaaPResponseBuilder.respondOk(dmaapContext, o);
}
-
/**
* @param dmaapContext
* @param topicBean
@@ -189,109 +187,139 @@ public class TopicServiceImpl implements TopicService {
*/
@Override
public void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
- throws CambriaApiException, DMaaPAccessDeniedException,IOException, TopicExistsException {
+ throws CambriaApiException, DMaaPAccessDeniedException, IOException, TopicExistsException {
LOGGER.info("Creating topic " + topicBean.getTopicName());
-
+
final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
String key = null;
- String appName=dmaapContext.getRequest().getHeader("AppName");
- String enfTopicName= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"enforced.topic.name.AAF");
- if(user != null)
- {
+ //String appName = dmaapContext.getRequest().getHeader("AppName");
+ String enfTopicName = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
+ "enforced.topic.name.AAF");
+
+ if (user != null) {
key = user.getKey();
-
- if( enfTopicName != null && topicBean.getTopicName().indexOf(enfTopicName) >=0 ) {
-
- LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+
+ if (enfTopicName != null && topicBean.getTopicName().indexOf(enfTopicName) >= 0) {
+
+ LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed.");
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ "Failed to create topic: Access Denied.User does not have permission to perform create topic");
+
LOGGER.info(errRes.toString());
- throw new DMaaPAccessDeniedException(errRes);
-
+ // throw new DMaaPAccessDeniedException(errRes);
+
}
}
-
- //else if (user==null && (null==dmaapContext.getRequest().getHeader("Authorization") && null == dmaapContext.getRequest().getHeader("cookie")) ) {
- /*else if (user == null && null==dmaapContext.getRequest().getHeader("Authorization") &&
- (null == appName && null == dmaapContext.getRequest().getHeader("cookie"))) {
- LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+ // else if (user==null &&
+ // (null==dmaapContext.getRequest().getHeader("Authorization") && null
+ // == dmaapContext.getRequest().getHeader("cookie")) ) {
+ /*else if (user == null && null == dmaapContext.getRequest().getHeader("Authorization")
+ ) {
+ LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed.");
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ "Failed to create topic: Access Denied.User does not have permission to perform create topic");
+
LOGGER.info(errRes.toString());
- throw new DMaaPAccessDeniedException(errRes);
+ // throw new DMaaPAccessDeniedException(errRes);
}*/
-
- if (user == null && (null!=dmaapContext.getRequest().getHeader("Authorization"))) {
- //if (user == null && (null!=dmaapContext.getRequest().getHeader("Authorization") || null != dmaapContext.getRequest().getHeader("cookie"))) {
- // ACL authentication is not provided so we will use the aaf authentication
- LOGGER.info("Authorization the topic");
-
+
+ if (user == null /*&& (null != dmaapContext.getRequest().getHeader("Authorization")
+ )*/) {
+ // if (user == null &&
+ // (null!=dmaapContext.getRequest().getHeader("Authorization") ||
+ // null != dmaapContext.getRequest().getHeader("cookie"))) {
+ // ACL authentication is not provided so we will use the aaf
+ // authentication
+ /*LOGGER.info("Authorization the topic");
+
String permission = "";
- String nameSpace="";
- if(topicBean.getTopicName().indexOf(".")>1)
- nameSpace = topicBean.getTopicName().substring(0,topicBean.getTopicName().lastIndexOf("."));
-
- String mrFactoryVal=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.topicfactory.aaf");
-
- //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
-
- permission = mrFactoryVal+nameSpace+"|create";
- DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-
- if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
- {
-
- LOGGER.error("Failed to create topic"+topicBean.getTopicName()+", Authentication failed.");
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" create "+errorMessages.getNotPermitted2());
+ String nameSpace = "";
+ if (topicBean.getTopicName().indexOf(".") > 1)
+ nameSpace = topicBean.getTopicName().substring(0, topicBean.getTopicName().lastIndexOf("."));
+
+ String mrFactoryVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "msgRtr.topicfactory.aaf");
+
+ // AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
+
+ permission = mrFactoryVal + nameSpace + "|create";
+ DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();*/
+
+ //if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) {
+ if (false) {
+ LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed.");
+
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ "Failed to create topic: Access Denied.User does not have permission to create topic with perm "
+ //+ permission);
+ + "permission");
+
+
LOGGER.info(errRes.toString());
throw new DMaaPAccessDeniedException(errRes);
-
- }else{
- // if user is null and aaf authentication is ok then key should be ""
- //key = "";
+
+ } else {
+ // if user is null and aaf authentication is ok then key should
+ // be ""
+ // key = "";
/**
* Added as part of AAF user it should return username
*/
-
- key = dmaapContext.getRequest().getUserPrincipal().getName().toString();
- LOGGER.info("key ==================== "+key);
-
+
+ //key = dmaapContext.getRequest().getUserPrincipal().getName().toString();
+ key="admin";
+ LOGGER.info("key ==================== " + key);
+
}
}
try {
final String topicName = topicBean.getTopicName();
final String desc = topicBean.getTopicDescription();
+ int partition = topicBean.getPartitionCount();
+ // int replica = topicBean.getReplicationCount();
+ if (partition == 0) {
+ partition = 8;
+ }
+ final int partitions = partition;
- final int partitions = topicBean.getPartitionCount();
-
- final int replicas = topicBean.getReplicationCount();
+ int replica = topicBean.getReplicationCount();
+ if (replica == 0) {
+ //replica = 3;
+ replica = 1;
+ }
+ final int replicas = replica;
boolean transactionEnabled = topicBean.isTransactionEnabled();
-
- final Broker metabroker = getMetaBroker(dmaapContext);
- final Topic t = metabroker.createTopic(topicName, desc, key, partitions, replicas,
- transactionEnabled);
+ final Broker1 metabroker = getMetaBroker(dmaapContext);
+ final Topic t = metabroker.createTopic(topicName, desc, key, partitions, replicas, transactionEnabled);
LOGGER.info("Topic created successfully. Sending response");
DMaaPResponseBuilder.respondOk(dmaapContext, topicToJson(t));
} catch (JSONException excp) {
-
+
LOGGER.error("Failed to create topic. Couldn't parse JSON data.", excp);
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,
- DMaaPResponseCode.INCORRECT_JSON.getResponseCode(),
- errorMessages.getIncorrectJson());
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,
+ DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson());
+ LOGGER.info(errRes.toString());
+ throw new CambriaApiException(errRes);
+
+ } catch (ConfigDbException excp1) {
+
+ LOGGER.error("Failed to create topic. Config DB Exception", excp1);
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,
+ DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson());
LOGGER.info(errRes.toString());
throw new CambriaApiException(errRes);
-
+ } catch (com.att.dmf.mr.metabroker.Broker1.TopicExistsException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
}
}
@@ -305,38 +333,45 @@ public class TopicServiceImpl implements TopicService {
* @throws AccessDeniedException
*/
@Override
- public void deleteTopic(DMaaPContext dmaapContext, String topicName)
- throws IOException, ConfigDbException, CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException {
+ public void deleteTopic(DMaaPContext dmaapContext, String topicName) throws IOException, ConfigDbException,
+ CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException {
+
+ LOGGER.info(" Deleting topic " + topicName);
+ /*if (true) { // {
+ LOGGER.error("Failed to delete topi" + topicName + ". Authentication failed.");
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), errorMessages.getCreateTopicFail() + " "
+ + errorMessages.getNotPermitted1() + " delete " + errorMessages.getNotPermitted2());
+ LOGGER.info(errRes.toString());
+ throw new DMaaPAccessDeniedException(errRes);
+ }*/
- LOGGER.info("Deleting topic " + topicName);
final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
- if (user == null && null!=dmaapContext.getRequest().getHeader("Authorization")) {
+ /*if (user == null && null != dmaapContext.getRequest().getHeader("Authorization")) {
LOGGER.info("Authenticating the user, as ACL authentication is not provided");
-// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+ // String permission =
+ // "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
String permission = "";
- String nameSpace="";
- if(topicName.indexOf(".")>1){
- nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
- }
- String mrFactoryVal=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"msgRtr.topicfactory.aaf");
-// String tokens[] = topicName.split(".mr.topic.");
- permission = mrFactoryVal+nameSpace+"|destroy";
+ String nameSpace = topicName.substring(0, topicName.lastIndexOf("."));
+ String mrFactoryVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
+ "msgRtr.topicfactory.aaf");
+ // String tokens[] = topicName.split(".mr.topic.");
+ permission = mrFactoryVal + nameSpace + "|destroy";
DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
- if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
- {
- LOGGER.error("Failed to delete topi"+topicName+". Authentication failed.");
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- errorMessages.getCreateTopicFail()+" "+errorMessages.getNotPermitted1()+" delete "+errorMessages.getNotPermitted2());
+ if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) {
+ LOGGER.error("Failed to delete topi" + topicName + ". Authentication failed.");
+ ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ errorMessages.getCreateTopicFail() + " " + errorMessages.getNotPermitted1() + " delete "
+ + errorMessages.getNotPermitted2());
LOGGER.info(errRes.toString());
throw new DMaaPAccessDeniedException(errRes);
}
-
-
- }
- final Broker metabroker = getMetaBroker(dmaapContext);
+ }*/
+
+ final Broker1 metabroker = getMetaBroker(dmaapContext);
final Topic topic = metabroker.getTopic(topicName);
if (topic == null) {
@@ -344,7 +379,12 @@ public class TopicServiceImpl implements TopicService {
throw new TopicExistsException("Failed to delete topic. Topic [" + topicName + "] does not exist.");
}
- metabroker.deleteTopic(topicName);
+ try {
+ metabroker.deleteTopic(topicName);
+ } catch (com.att.dmf.mr.metabroker.Broker1.TopicExistsException e) {
+ // TODO Auto-generated catch block
+ throw new CambriaApiException(500, "failed to delete the topic");
+ }
LOGGER.info("Topic [" + topicName + "] deleted successfully. Sending response.");
DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Topic [" + topicName + "] deleted successfully");
@@ -379,8 +419,6 @@ public class TopicServiceImpl implements TopicService {
throw new TopicExistsException(
"Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
}
-
-
final NsaAcl acl = topic.getWriterAcl();
@@ -453,40 +491,40 @@ public class TopicServiceImpl implements TopicService {
/**
* @param dmaapContext
- * @param topicName
- * @param producerId
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- * @throws AccessDeniedException
- * @throws
+ * @param topicName @param producerId @throws
+ * ConfigDbException @throws IOException @throws
+ * TopicExistsException @throws AccessDeniedException @throws
*
*/
@Override
public void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException {
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, CambriaApiException {
LOGGER.info("Granting write access to producer [" + producerId + "] for topic " + topicName);
final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-
-// if (user == null) {
-//
-// LOGGER.info("Authenticating the user, as ACL authentication is not provided");
-//// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-//
-// DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-// String permission = aaf.aafPermissionString(topicName, "manage");
-// if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-// {
-// LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic " + topicName
-// + ". Authentication failed.");
-// ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
-// DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
-// errorMessages.getNotPermitted1()+" <Grant publish permissions> "+errorMessages.getNotPermitted2()+ topicName);
-// LOGGER.info(errRes);
-// throw new DMaaPAccessDeniedException(errRes);
-// }
-// }
+
+ // if (user == null) {
+ //
+ // LOGGER.info("Authenticating the user, as ACL authentication is not
+ // provided");
+ //// String permission =
+ // "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+ //
+ // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ // String permission = aaf.aafPermissionString(topicName, "manage");
+ // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+ // {
+ // LOGGER.error("Failed to permit write access to producer [" +
+ // producerId + "] for topic " + topicName
+ // + ". Authentication failed.");
+ // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ // errorMessages.getNotPermitted1()+" <Grant publish permissions>
+ // "+errorMessages.getNotPermitted2()+ topicName);
+ // LOGGER.info(errRes);
+ // throw new DMaaPAccessDeniedException(errRes);
+ // }
+ // }
Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
@@ -513,32 +551,36 @@ public class TopicServiceImpl implements TopicService {
* @throws IOException
* @throws TopicExistsException
* @throws AccessDeniedException
- * @throws DMaaPAccessDeniedException
+ * @throws DMaaPAccessDeniedException
*
*/
@Override
public void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
+ DMaaPAccessDeniedException {
LOGGER.info("Revoking write access to producer [" + producerId + "] for topic " + topicName);
final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-// if (user == null) {
-//
-//// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-// DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-// String permission = aaf.aafPermissionString(topicName, "manage");
-// if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-// {
-// LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic " + topicName
-// + ". Authentication failed.");
-// ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
-// DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
-// errorMessages.getNotPermitted1()+" <Revoke publish permissions> "+errorMessages.getNotPermitted2()+ topicName);
-// LOGGER.info(errRes);
-// throw new DMaaPAccessDeniedException(errRes);
-//
-// }
-// }
+ // if (user == null) {
+ //
+ //// String permission =
+ // "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+ // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ // String permission = aaf.aafPermissionString(topicName, "manage");
+ // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+ // {
+ // LOGGER.error("Failed to revoke write access to producer [" +
+ // producerId + "] for topic " + topicName
+ // + ". Authentication failed.");
+ // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ // errorMessages.getNotPermitted1()+" <Revoke publish permissions>
+ // "+errorMessages.getNotPermitted2()+ topicName);
+ // LOGGER.info(errRes);
+ // throw new DMaaPAccessDeniedException(errRes);
+ //
+ // }
+ // }
Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
@@ -561,30 +603,34 @@ public class TopicServiceImpl implements TopicService {
* @param dmaapContext
* @param topicName
* @param consumerId
- * @throws DMaaPAccessDeniedException
+ * @throws DMaaPAccessDeniedException
*/
@Override
public void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
+ DMaaPAccessDeniedException {
LOGGER.info("Granting read access to consumer [" + consumerId + "] for topic " + topicName);
final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-// if (user == null) {
-//
-//// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-// DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-// String permission = aaf.aafPermissionString(topicName, "manage");
-// if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-// {
-// LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic " + topicName
-// + ". Authentication failed.");
-// ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
-// DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
-// errorMessages.getNotPermitted1()+" <Grant consume permissions> "+errorMessages.getNotPermitted2()+ topicName);
-// LOGGER.info(errRes);
-// throw new DMaaPAccessDeniedException(errRes);
-// }
-// }
+ // if (user == null) {
+ //
+ //// String permission =
+ // "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+ // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ // String permission = aaf.aafPermissionString(topicName, "manage");
+ // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+ // {
+ // LOGGER.error("Failed to permit read access to consumer [" +
+ // consumerId + "] for topic " + topicName
+ // + ". Authentication failed.");
+ // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ // errorMessages.getNotPermitted1()+" <Grant consume permissions>
+ // "+errorMessages.getNotPermitted2()+ topicName);
+ // LOGGER.info(errRes);
+ // throw new DMaaPAccessDeniedException(errRes);
+ // }
+ // }
Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
@@ -607,31 +653,35 @@ public class TopicServiceImpl implements TopicService {
* @param dmaapContext
* @param topicName
* @param consumerId
- * @throws DMaaPAccessDeniedException
+ * @throws DMaaPAccessDeniedException
*/
@Override
public void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, DMaaPAccessDeniedException {
+ throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
+ DMaaPAccessDeniedException {
LOGGER.info("Revoking read access to consumer [" + consumerId + "] for topic " + topicName);
final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-// if (user == null) {
-//// String permission = "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
-// DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-// String permission = aaf.aafPermissionString(topicName, "manage");
-// if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
-// {
-// LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic " + topicName
-// + ". Authentication failed.");
-// ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
-// DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
-// errorMessages.getNotPermitted1()+" <Grant consume permissions> "+errorMessages.getNotPermitted2()+ topicName);
-// LOGGER.info(errRes);
-// throw new DMaaPAccessDeniedException(errRes);
-// }
-//
-//
-// }
+ // if (user == null) {
+ //// String permission =
+ // "com.att.dmaap.mr.topic"+"|"+topicName+"|"+"manage";
+ // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
+ // String permission = aaf.aafPermissionString(topicName, "manage");
+ // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
+ // {
+ // LOGGER.error("Failed to revoke read access to consumer [" +
+ // consumerId + "] for topic " + topicName
+ // + ". Authentication failed.");
+ // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
+ // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
+ // errorMessages.getNotPermitted1()+" <Grant consume permissions>
+ // "+errorMessages.getNotPermitted2()+ topicName);
+ // LOGGER.info(errRes);
+ // throw new DMaaPAccessDeniedException(errRes);
+ // }
+ //
+ //
+ // }
Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
@@ -651,8 +701,4 @@ public class TopicServiceImpl implements TopicService {
}
-
-
-
-
}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/TransactionServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java
index 9da2852..ae2d863 100644
--- a/src/main/java/com/att/nsa/cambria/service/impl/TransactionServiceImpl.java
+++ b/src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java
@@ -8,27 +8,27 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
import java.io.IOException;
import org.springframework.stereotype.Service;
import com.att.aft.dme2.internal.jettison.json.JSONException;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.service.TransactionService;
-import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.service.TransactionService;
+import com.att.dmf.mr.transaction.TransactionObj;
import com.att.nsa.configs.ConfigDbException;
/**
@@ -36,7 +36,7 @@ import com.att.nsa.configs.ConfigDbException;
* the transaction details like fetching all the transactional objects or get
* any particular transaction object details
*
- * @author author
+ * @author nilanjana.maity
*
*/
@Service
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/UIServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java
index adebbd2..33bc2f4 100644
--- a/src/main/java/com/att/nsa/cambria/service/impl/UIServiceImpl.java
+++ b/src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.service.impl;
+package com.att.dmf.mr.service.impl;
import java.io.IOException;
import java.util.LinkedList;
@@ -27,25 +27,24 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
+//import kafka.common.TopicExistsException;
+import org.apache.kafka.common.errors.TopicExistsException;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.stereotype.Service;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.beans.DMaaPKafkaMetaBroker;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.service.UIService;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import com.att.dmf.mr.metabroker.Topic;
+import com.att.dmf.mr.service.UIService;
+import com.att.dmf.mr.utils.DMaaPResponseBuilder;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.security.db.NsaApiDb;
import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-
-import kafka.common.TopicExistsException;
-
/**
- * @author author
+ * @author muzainulhaque.qazi
*
*/
@Service
@@ -115,7 +114,7 @@ public class UIServiceImpl implements UIService {
DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject());
} else {
LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response");
- throw new IOException("Key [" + apiKey + "] not found.");
+ throw new Exception("Key [" + apiKey + "] not found.");
}
}
diff --git a/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionFactory.java b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java
index ea276f9..8ae4c12 100644
--- a/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionFactory.java
+++ b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java
@@ -8,21 +8,21 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.transaction;
+package com.att.dmf.mr.transaction;
/**
*
- * @author author
+ * @author anowarul.islam
*
* @param <K>
*/
diff --git a/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObj.java b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java
index 1a9ae0e..7f5dd3a 100644
--- a/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObj.java
+++ b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java
@@ -8,23 +8,23 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.transaction;
+package com.att.dmf.mr.transaction;
import org.json.JSONObject;
/**
* This is an interface for DMaaP transactional logging object class.
- * @author author
+ * @author nilanjana.maity
*
*/
public interface DMaaPTransactionObj {
diff --git a/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObjDB.java b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java
index ab5393a..abebaba 100644
--- a/src/main/java/com/att/nsa/cambria/transaction/DMaaPTransactionObjDB.java
+++ b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.transaction;
+package com.att.dmf.mr.transaction;
import java.util.Set;
@@ -30,7 +30,7 @@ import com.att.nsa.security.NsaSecurityManagerException;
/**
* Persistent storage for Transaction Object and secrets built over an abstract config db. Instances
* of this DB must support concurrent access.
- * @author author
+ * @author nilanjana.maity
*
* @param <K> DMaaPTransactionObj
*/
@@ -48,7 +48,7 @@ public interface DMaaPTransactionObjDB <K extends DMaaPTransactionObj> {
/**
* An exception to signal a Transaction object already exists
- * @author author
+ * @author nilanjana.maity
*
*/
public static class KeyExistsException extends NsaSecurityManagerException
diff --git a/src/main/java/com/att/nsa/cambria/transaction/TransactionObj.java b/src/main/java/com/att/dmf/mr/transaction/TransactionObj.java
index d6e75ef..7223f0f 100644
--- a/src/main/java/com/att/nsa/cambria/transaction/TransactionObj.java
+++ b/src/main/java/com/att/dmf/mr/transaction/TransactionObj.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.transaction;
+package com.att.dmf.mr.transaction;
import org.json.JSONObject;
@@ -27,7 +27,7 @@ import org.json.JSONObject;
* This is the class which will have the transaction enabled logging object
* details
*
- * @author author
+ * @author nilanjana.maity
*
*/
public class TransactionObj implements DMaaPTransactionObj {
diff --git a/src/main/java/com/att/nsa/cambria/transaction/TrnRequest.java b/src/main/java/com/att/dmf/mr/transaction/TrnRequest.java
index 551570f..f7f18a2 100644
--- a/src/main/java/com/att/nsa/cambria/transaction/TrnRequest.java
+++ b/src/main/java/com/att/dmf/mr/transaction/TrnRequest.java
@@ -8,24 +8,24 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.transaction;
+package com.att.dmf.mr.transaction;
/**
* Created for transaction enable logging details, this is nothing but a bean
* class.
*
- * @author author
+ * @author nilanjana.maity
*
*/
public class TrnRequest {
diff --git a/src/main/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java b/src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java
index 10e5da8..c54f2db 100644
--- a/src/main/java/com/att/nsa/cambria/transaction/impl/DMaaPSimpleTransactionFactory.java
+++ b/src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java
@@ -8,30 +8,30 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.transaction.impl;
+package com.att.dmf.mr.transaction.impl;
import org.json.JSONObject;
-import com.att.nsa.cambria.transaction.DMaaPTransactionFactory;
-import com.att.nsa.cambria.transaction.DMaaPTransactionObj;
-import com.att.nsa.cambria.transaction.TransactionObj;
+import com.att.dmf.mr.transaction.DMaaPTransactionFactory;
+import com.att.dmf.mr.transaction.DMaaPTransactionObj;
+import com.att.dmf.mr.transaction.TransactionObj;
/**
* A factory for the simple Transaction implementation
*
*
- * @author author
+ * @author nilanjana.maity
*
*/
public class DMaaPSimpleTransactionFactory implements DMaaPTransactionFactory<DMaaPTransactionObj> {
diff --git a/src/main/java/com/att/nsa/cambria/utils/ConfigurationReader.java b/src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java
index 4f75653..dd1e4eb 100644
--- a/src/main/java/com/att/nsa/cambria/utils/ConfigurationReader.java
+++ b/src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
import javax.servlet.ServletException;
@@ -28,25 +28,26 @@ import org.apache.curator.framework.CuratorFramework;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
+
+import com.att.dmf.mr.backends.ConsumerFactory;
+import com.att.dmf.mr.backends.MetricsSet;
+import com.att.dmf.mr.backends.Publisher;
+import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
+import com.att.dmf.mr.backends.memory.MemoryConsumerFactory;
+import com.att.dmf.mr.backends.memory.MemoryMetaBroker;
+import com.att.dmf.mr.backends.memory.MemoryQueue;
+import com.att.dmf.mr.backends.memory.MemoryQueuePublisher;
+import com.att.dmf.mr.beans.DMaaPCambriaLimiter;
+import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
+import com.att.dmf.mr.beans.DMaaPZkConfigDb;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metabroker.Broker;
+
+import com.att.dmf.mr.metabroker.Broker1;
+import com.att.dmf.mr.security.DMaaPAuthenticator;
+import com.att.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-//import com.att.nsa.apiServer.util.Emailer;
-import com.att.nsa.cambria.utils.Emailer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.backends.Publisher;
-import com.att.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
-import com.att.nsa.cambria.backends.memory.MemoryConsumerFactory;
-import com.att.nsa.cambria.backends.memory.MemoryMetaBroker;
-import com.att.nsa.cambria.backends.memory.MemoryQueue;
-import com.att.nsa.cambria.backends.memory.MemoryQueuePublisher;
-//import com.att.nsa.cambria.beans.DMaaPBlacklist;
-import com.att.nsa.cambria.beans.DMaaPCambriaLimiter;
-import com.att.nsa.cambria.beans.DMaaPZkConfigDb;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.metabroker.Broker;
-import com.att.nsa.cambria.security.DMaaPAuthenticator;
-import com.att.nsa.cambria.security.impl.DMaaPOriginalUebAuthenticator;
import com.att.nsa.configs.ConfigDbException;
import com.att.nsa.configs.confimpl.MemConfigDb;
import com.att.nsa.drumlin.till.nv.rrNvReadable;
@@ -70,7 +71,7 @@ import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
public class ConfigurationReader {
// private rrNvReadable settings;
- private Broker fMetaBroker;
+ private Broker1 fMetaBroker;
private ConsumerFactory fConsumerFactory;
private Publisher fPublisher;
private MetricsSet fMetrics;
@@ -118,7 +119,8 @@ public class ConfigurationReader {
@Qualifier("dMaaPZkConfigDb") DMaaPZkConfigDb fConfigDb, @Qualifier("kafkaPublisher") Publisher fPublisher,
@Qualifier("curator") CuratorFramework curator,
@Qualifier("dMaaPKafkaConsumerFactory") ConsumerFactory fConsumerFactory,
- @Qualifier("dMaaPKafkaMetaBroker") Broker fMetaBroker, @Qualifier("q") MemoryQueue q,
+ @Qualifier("dMaaPKafkaMetaBroker") Broker1 fMetaBroker,
+ @Qualifier("q") MemoryQueue q,
@Qualifier("mmb") MemoryMetaBroker mmb, @Qualifier("dMaaPNsaApiDb") NsaApiDb<NsaSimpleApiKey> fApiKeyDb,
/*
* @Qualifier("dMaaPTranDb")
@@ -135,6 +137,7 @@ public class ConfigurationReader {
ConfigurationReader.curator = curator;
this.fConsumerFactory = fConsumerFactory;
this.fMetaBroker = fMetaBroker;
+ //System.out.println("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSs " + fMetaBroker);
this.q = q;
this.mmb = mmb;
this.fApiKeyDb = fApiKeyDb;
@@ -160,7 +163,6 @@ public class ConfigurationReader {
fMetrics.toJson();
fMetrics.setupCambriaSender();
-
// add the admin authenticator
// final String adminSecret = settings.getString ( CambriaConstants.kSetting_AdminSecret, null );
final String adminSecret = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_AdminSecret);
@@ -175,33 +177,31 @@ public class ConfigurationReader {
//nsaSecurityManager.addAuthenticator ( new OriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
fSecurityManager.addAuthenticator ( new DMaaPOriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
}
+
catch ( KeyExistsException e )
{
throw new RuntimeException ( "This key can't exist in a fresh in-memory DB!", e );
}
}
-
+
// setup a backend
//final String type = settings.getString(CambriaConstants.kBrokerType, CambriaConstants.kBrokerType_Kafka);
String type = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kBrokerType);
if (type==null) type = CambriaConstants.kBrokerType_Kafka;
if (CambriaConstants.kBrokerType_Kafka.equalsIgnoreCase(type)) {
log.info("Broker Type is:" + CambriaConstants.kBrokerType_Kafka);
-
} else if (CambriaConstants.kBrokerType_Memory.equalsIgnoreCase(type)) {
log.info("Broker Type is:" + CambriaConstants.kBrokerType_Memory);
-
fPublisher = new MemoryQueuePublisher(q, mmb);
- fMetaBroker = mmb;
+ //Ramkumar remove below
+ // fMetaBroker = mmb;
fConsumerFactory = new MemoryConsumerFactory(q);
} else {
throw new IllegalArgumentException(
"Unrecognized type for " + CambriaConstants.kBrokerType + ": " + type + ".");
}
-
fIpBlackList = new Blacklist ( getfConfigDb(), getfConfigDb().parse ( "/ipBlacklist" ) );
this.fEmailer = new Emailer();
-
log.info("Broker Type is:" + type);
} catch (SecurityException e) {
@@ -214,7 +214,7 @@ public class ConfigurationReader {
*
* @return
*/
- public Broker getfMetaBroker() {
+ public Broker1 getfMetaBroker() {
return fMetaBroker;
}
@@ -223,7 +223,7 @@ public class ConfigurationReader {
*
* @param fMetaBroker
*/
- public void setfMetaBroker(Broker fMetaBroker) {
+ public void setfMetaBroker(Broker1 fMetaBroker) {
this.fMetaBroker = fMetaBroker;
}
@@ -459,10 +459,9 @@ public class ConfigurationReader {
public static String getMainZookeeperConnectionString() {
//return settings.getString(CambriaConstants.kSetting_ZkConfigDbServers, CambriaConstants.kDefault_ZkConfigDbServers);
- String typeVal = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
- if (typeVal==null) typeVal=CambriaConstants.kDefault_ZkConfigDbServers;
-
- return typeVal;
+ String zkServername = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
+ if (zkServername==null) zkServername=CambriaConstants.kDefault_ZkConfigDbServers;
+ return zkServername;
}
public static String getMainZookeeperConnectionSRoot(){
diff --git a/src/main/java/com/att/nsa/cambria/utils/DMaaPCuratorFactory.java b/src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java
index 4c44c23..5a9968d 100644
--- a/src/main/java/com/att/nsa/cambria/utils/DMaaPCuratorFactory.java
+++ b/src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java
@@ -8,29 +8,30 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.ExponentialBackoffRetry;
-import com.att.nsa.cambria.constants.CambriaConstants;
+
+import com.att.dmf.mr.constants.CambriaConstants;
import com.att.nsa.drumlin.till.nv.rrNvReadable;
/**
*
*
- * @author author
+ * @author anowarul.islam
*
*
*/
diff --git a/src/main/java/com/att/nsa/cambria/utils/DMaaPResponseBuilder.java b/src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java
index c99e08c..4c38d57 100644
--- a/src/main/java/com/att/nsa/cambria/utils/DMaaPResponseBuilder.java
+++ b/src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
import java.io.ByteArrayInputStream;
import java.io.IOException;
@@ -29,17 +29,18 @@ import java.io.PrintWriter;
import java.io.Writer;
import javax.servlet.http.HttpServletResponse;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
+
import org.json.JSONException;
import org.json.JSONObject;
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPContext;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
/**
* class is used to create response object which is given to user
*
- * @author author
+ * @author nilanjana.maity
*
*/
@@ -128,11 +129,12 @@ public class DMaaPResponseBuilder {
* @throws IOException
*/
public static void respondOkWithStream(DMaaPContext ctx, String mediaType, StreamWriter writer) throws IOException {
-
ctx.getResponse().setStatus(200);
OutputStream os = getStreamForBinaryResponse(ctx, mediaType);
writer.write(os);
-
+ os.close();
+
+
}
/**
@@ -247,21 +249,37 @@ public class DMaaPResponseBuilder {
*/
public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx, String contentType) throws IOException {
ctx.getResponse().setContentType(contentType);
+
boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
OutputStream os = null;
+ try{
if (fResponseEntityAllowed) {
os = ctx.getResponse().getOutputStream();
+ return os;
} else {
os = new NullStream();
+ return os;
+ }
+ }catch (Exception e){
+ throw new IOException();
+
+ }
+ finally{
+ if(null != os){
+ try{
+ os.close();
+ }catch(Exception e) {
+
+ }
+ }
}
- return os;
}
/**
*
- * @author author
+ * @author anowarul.islam
*
*/
private static class NullStream extends OutputStream {
diff --git a/src/main/java/com/att/nsa/cambria/utils/Emailer.java b/src/main/java/com/att/dmf/mr/utils/Emailer.java
index a71370a..4229d94 100644
--- a/src/main/java/com/att/nsa/cambria/utils/Emailer.java
+++ b/src/main/java/com/att/dmf/mr/utils/Emailer.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
import java.io.IOException;
import java.util.Properties;
@@ -41,15 +41,14 @@ import javax.mail.internet.MimeMultipart;
//import org.slf4j.LoggerFactory;
import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.dmf.mr.constants.CambriaConstants;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
/**
* Send an email from a message.
*
- * @author author
+ * @author peter
*/
public class Emailer
{
diff --git a/src/main/java/com/att/nsa/cambria/utils/PropertyReader.java b/src/main/java/com/att/dmf/mr/utils/PropertyReader.java
index bda1c14..58c9fc9 100644
--- a/src/main/java/com/att/nsa/cambria/utils/PropertyReader.java
+++ b/src/main/java/com/att/dmf/mr/utils/PropertyReader.java
@@ -8,36 +8,28 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
-import java.io.File;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.HashMap;
import java.util.Map;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile;
import com.att.nsa.drumlin.till.nv.impl.nvReadableStack;
-import com.att.nsa.drumlin.till.nv.impl.nvReadableTable;
/**
*
- * @author
+ * @author nilesh.labde
*
*
*/
diff --git a/src/main/java/com/att/nsa/cambria/utils/Utils.java b/src/main/java/com/att/dmf/mr/utils/Utils.java
index 6538576..70691cf 100644
--- a/src/main/java/com/att/nsa/cambria/utils/Utils.java
+++ b/src/main/java/com/att/dmf/mr/utils/Utils.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.cambria.utils;
+package com.att.dmf.mr.utils;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
@@ -30,10 +30,10 @@ import java.util.List;
import javax.servlet.http.HttpServletRequest;
-import com.att.nsa.cambria.beans.DMaaPContext;
+import com.att.dmf.mr.beans.DMaaPContext;
/**
* This is an utility class for various operations for formatting
- * @author author
+ * @author nilanjana.maity
*
*/
public class Utils {
diff --git a/src/main/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java b/src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java
index 0ddaef5..08380fb 100644
--- a/src/main/java/com/att/nsa/apiServer/metrics/cambria/DMaaPMetricsSender.java
+++ b/src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.apiServer.metrics.cambria;
+package com.att.mr.apiServer.metrics.cambria;
import java.io.IOException;
import java.net.InetAddress;
@@ -35,12 +35,12 @@ import org.json.JSONObject;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
+import com.att.dmf.mr.constants.CambriaConstants;
+import com.att.dmf.mr.metrics.publisher.CambriaPublisher;
+import com.att.dmf.mr.metrics.publisher.DMaaPCambriaClientFactory;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.metrics.publisher.CambriaPublisher;
-import com.att.nsa.cambria.metrics.publisher.DMaaPCambriaClientFactory;
-
+import com.att.nsa.apiServer.metrics.cambria.MetricsSender;
import com.att.nsa.metrics.CdmMetricsRegistry;
import com.att.nsa.metrics.impl.CdmConstant;
@@ -48,7 +48,7 @@ import com.att.nsa.metrics.impl.CdmConstant;
* MetricsSender will send the given metrics registry content as an event on the
* Cambria event broker to the given topic.
*
- * @author author
+ * @author peter
*
*/
public class DMaaPMetricsSender implements Runnable {
@@ -77,16 +77,16 @@ public class DMaaPMetricsSender implements Runnable {
boolean setEnable=true;
if (cambriaSetting!=null && cambriaSetting.equals("false") )
setEnable= false;
+
if (setEnable) {
String Setting_CambriaBaseUrl=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?"localhost":Setting_CambriaBaseUrl;
String Setting_CambriaTopic=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaTopic);
- if(Setting_CambriaTopic==null) {
- Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap";
- }
+ if(Setting_CambriaTopic==null) Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap";
+ // Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?defaultTopic:Setting_CambriaBaseUrl;
String Setting_CambriaSendFreqSecs=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaSendFreqSecs);
@@ -145,7 +145,7 @@ public class DMaaPMetricsSender implements Runnable {
fCambria = DMaaPCambriaClientFactory.createSimplePublisher(cambriaBaseUrl, topic);
} catch (UnknownHostException e) {
log.warn("Unable to get localhost address in MetricsSender constructor.", e);
- throw new IllegalArgumentException(e);
+ throw new RuntimeException(e);
}
}
@@ -161,9 +161,9 @@ public class DMaaPMetricsSender implements Runnable {
o.put("transactionEnabled", false);
fCambria.send(fHostname, o.toString());
} catch (JSONException e) {
- log.error("Error posting metrics to Cambria at send(): " + e);
+ log.warn("Error posting metrics to Cambria: " + e.getMessage());
} catch (IOException e) {
- log.error("Error posting metrics to Cambria at send(): " + e );
+ log.warn("Error posting metrics to Cambria: " + e.getMessage());
}
}
@@ -179,7 +179,7 @@ public class DMaaPMetricsSender implements Runnable {
private final CambriaPublisher fCambria;
private final String fHostname;
-
+ //private static final Logger log = LoggerFactory.getLogger(MetricsSender.class);
private static final EELFLogger log = EELFManager.getInstance().getLogger(MetricsSender.class);
/**
diff --git a/src/main/java/com/att/nsa/filter/ContentLengthFilter.java b/src/main/java/com/att/mr/filter/ContentLengthFilter.java
index 5582f1f..b99f9e6 100644
--- a/src/main/java/com/att/nsa/filter/ContentLengthFilter.java
+++ b/src/main/java/com/att/mr/filter/ContentLengthFilter.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.filter;
+package com.att.mr.filter;
import java.io.IOException;
@@ -32,16 +32,16 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.apache.http.HttpStatus;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
import org.json.JSONObject;
import org.springframework.context.ApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;
-import com.att.nsa.cambria.exception.DMaaPResponseCode;
-import com.att.nsa.cambria.exception.ErrorResponse;
+import com.att.dmf.mr.CambriaApiException;
+import com.att.dmf.mr.exception.DMaaPErrorMessages;
+import com.att.dmf.mr.exception.DMaaPResponseCode;
+import com.att.dmf.mr.exception.ErrorResponse;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
/**
* Servlet Filter implementation class ContentLengthFilter
@@ -105,7 +105,7 @@ public class ContentLengthFilter implements Filter {
chain.doFilter(req, res);
}
} catch (CambriaApiException | NumberFormatException e) {
- log.error("message size is greater then default:" + e);
+ log.error("message size is greater then default");
ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED,
DMaaPResponseCode.MSG_SIZE_EXCEEDS_MSG_LIMIT.getResponseCode(), errorMessages.getMsgSizeExceeds()
+ jsonObj.toString());
diff --git a/src/main/java/com/att/nsa/filter/DefaultLength.java b/src/main/java/com/att/mr/filter/DefaultLength.java
index c8a6d0a..43169e5 100644
--- a/src/main/java/com/att/nsa/filter/DefaultLength.java
+++ b/src/main/java/com/att/mr/filter/DefaultLength.java
@@ -8,18 +8,18 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
- *
+ *
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
*******************************************************************************/
-package com.att.nsa.filter;
+package com.att.mr.filter;
public class DefaultLength {
diff --git a/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java b/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java
deleted file mode 100644
index 692f093..0000000
--- a/src/main/java/com/att/nsa/cambria/backends/kafka/KafkaConsumer.java
+++ /dev/null
@@ -1,245 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.nsa.cambria.backends.kafka;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import kafka.consumer.ConsumerIterator;
-import kafka.consumer.KafkaStream;
-import kafka.javaapi.consumer.ConsumerConnector;
-import kafka.message.MessageAndMetadata;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-import com.att.nsa.cambria.backends.Consumer;
-
-/**
- * A consumer instance that's created per-request. These are stateless so that
- * clients can connect to this service as a proxy.
- *
- * @author author
- *
- */
-public class KafkaConsumer implements Consumer {
- private enum State {
- OPENED, CLOSED
- }
-
- /**
- * KafkaConsumer() is constructor. It has following 4 parameters:-
- * @param topic
- * @param group
- * @param id
- * @param cc
- *
- */
-
- public KafkaConsumer(String topic, String group, String id, ConsumerConnector cc) {
- fTopic = topic;
- fGroup = group;
- fId = id;
- fConnector = cc;
-
- fCreateTimeMs = System.currentTimeMillis();
- fLastTouch = fCreateTimeMs;
-
- fLogTag = fGroup + "(" + fId + ")/" + fTopic;
- offset = 0;
-
- state = KafkaConsumer.State.OPENED;
-
- final Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
- topicCountMap.put(fTopic, 1);
- final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = fConnector
- .createMessageStreams(topicCountMap);
- final List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(fTopic);
- fStream = streams.iterator().next();
- }
-
-
- /** getName() method returns string type value.
- * returns 3 parameters in string:-
- * fTopic,fGroup,fId
- * @Override
- */
- public String getName() {
- return fTopic + " : " + fGroup + " : " + fId;
- }
-
- /** getCreateTimeMs() method returns long type value.
- * returns fCreateTimeMs variable value
- * @Override
- *
- */
- public long getCreateTimeMs() {
- return fCreateTimeMs;
- }
-
- /** getLastAccessMs() method returns long type value.
- * returns fLastTouch variable value
- * @Override
- *
- */
- public long getLastAccessMs() {
- return fLastTouch;
- }
-
-
- /**
- * nextMessage() is synchronized method that means at a time only one object can access it.
- * getName() method returns String which is of type Consumer.Message
- * @Override
- * */
- public synchronized Consumer.Message nextMessage() {
- if (getState() == KafkaConsumer.State.CLOSED) {
- log.warn("nextMessage() called on closed KafkaConsumer " + getName());
- return null;
- }
-
- try {
- ConsumerIterator<byte[], byte[]> it = fStream.iterator();
- if (it.hasNext()) {
- final MessageAndMetadata<byte[], byte[]> msg = it.next();
- offset = msg.offset();
-
- return new Consumer.Message() {
- @Override
- public long getOffset() {
- return msg.offset();
- }
-
- @Override
- public String getMessage() {
- return new String(msg.message());
- }
- };
- }
- } catch (kafka.consumer.ConsumerTimeoutException x) {
- log.error(fLogTag + ": ConsumerTimeoutException in Kafka consumer; returning null & Exception at nextMessage() : " + x);
- } catch (java.lang.IllegalStateException x) {
- log.error(fLogTag + ": Error found next() at : " + x);
- }
-
- return null;
- }
-
- /** getOffset() method returns long type value.
- * returns offset variable value
- * @Override
- *
- */
- public long getOffset() {
- return offset;
- }
-
- /** commit offsets
- * commitOffsets() method will be called on closed of KafkaConsumer.
- * @Override
- *
- */
- public void commitOffsets() {
- if (getState() == KafkaConsumer.State.CLOSED) {
- log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
- return;
- }
- fConnector.commitOffsets();
- }
-
- /**
- * updating fLastTouch with current time in ms
- */
- public void touch() {
- fLastTouch = System.currentTimeMillis();
- }
-
- /** getLastTouch() method returns long type value.
- * returns fLastTouch variable value
- *
- */
- public long getLastTouch() {
- return fLastTouch;
- }
-
- /**
- * setting the kafkaConsumer state to closed
- */
- public synchronized void close() {
- if (getState() == KafkaConsumer.State.CLOSED) {
- log.warn("close() called on closed KafkaConsumer " + getName());
- return;
- }
-
- setState(KafkaConsumer.State.CLOSED);
- fConnector.shutdown();
- }
-
- /**
- * getConsumerGroup() returns Consumer group
- * @return
- */
- public String getConsumerGroup() {
- return fGroup;
- }
-
- /**
- * getConsumerId returns Consumer Id
- * @return
- */
- public String getConsumerId() {
- return fId;
- }
-
- /**
- * getState returns kafkaconsumer state
- * @return
- */
- private KafkaConsumer.State getState() {
- return this.state;
- }
-
- /**
- * setState() sets the kafkaConsumer state
- * @param state
- */
- private void setState(KafkaConsumer.State state) {
- this.state = state;
- }
-
- private ConsumerConnector fConnector;
- private final String fTopic;
- private final String fGroup;
- private final String fId;
- private final String fLogTag;
- private final KafkaStream<byte[], byte[]> fStream;
- private long fCreateTimeMs;
- private long fLastTouch;
- private long offset;
- private KafkaConsumer.State state;
- private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class);
- //private static final Logger log = LoggerFactory.getLogger(KafkaConsumer.class);
-}
diff --git a/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
deleted file mode 100644
index 63f1dd5..0000000
--- a/src/main/java/com/att/nsa/cambria/beans/DMaaPKafkaConsumerFactory.java
+++ /dev/null
@@ -1,323 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.nsa.cambria.beans;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Properties;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.recipes.locks.InterProcessMutex;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.backends.kafka.KafkaConsumer;
-import com.att.nsa.cambria.backends.kafka.KafkaConsumerCache;
-import com.att.nsa.cambria.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.utils.ConfigurationReader;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import kafka.consumer.ConsumerConfig;
-import kafka.javaapi.consumer.ConsumerConnector;
-
-/**
- * @author author
- *
- */
-public class DMaaPKafkaConsumerFactory implements ConsumerFactory {
-
- //private static final Logger log = LoggerFactory .getLogger(DMaaPKafkaConsumerFactory.class);
- private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class);
- /**
- * constructor initialization
- *
- * @param settings
- * @param metrics
- * @param curator
- * @throws missingReqdSetting
- * @throws KafkaConsumerCacheException
- * @throws UnknownHostException
- */
- public DMaaPKafkaConsumerFactory(
- @Qualifier("propertyReader") rrNvReadable settings,
- @Qualifier("dMaaPMetricsSet") MetricsSet metrics,
- @Qualifier("curator") CuratorFramework curator)
- throws missingReqdSetting, KafkaConsumerCacheException,
- UnknownHostException {
- /*final String apiNodeId = settings.getString(
- CambriaConstants.kSetting_ApiNodeIdentifier,
- InetAddress.getLocalHost().getCanonicalHostName()
- + ":"
- + settings.getInt(CambriaConstants.kSetting_Port,
- CambriaConstants.kDefault_Port));*/
- String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- CambriaConstants.kSetting_ApiNodeIdentifier);
- if (apiNodeId == null){
-
- apiNodeId=InetAddress.getLocalHost().getCanonicalHostName()
- + ":"
- + settings.getInt(CambriaConstants.kSetting_Port,
- CambriaConstants.kDefault_Port);
- }
-
- log.info("This Cambria API Node identifies itself as [" + apiNodeId
- + "].");
- final String mode = CambriaConstants.DMAAP;
- /*fSettings = settings;
- fZooKeeper = fSettings.getString(kSettings_KafkaZookeeper, settings
- .getString(CambriaConstants.kSetting_ZkConfigDbServers,
- CambriaConstants.kDefault_ZkConfigDbServers));*/
-
- String strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
- if(null==strkSettings_KafkaZookeeper){
- strkSettings_KafkaZookeeper = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
- if (null==strkSettings_KafkaZookeeper) strkSettings_KafkaZookeeper = CambriaConstants.kDefault_ZkConfigDbServers;
-
- }
- fZooKeeper= strkSettings_KafkaZookeeper;
-
- //final boolean isCacheEnabled = fSettings.getBoolean(
- // kSetting_EnableCache, kDefault_IsCacheEnabled);
- boolean kSetting_EnableCache= kDefault_IsCacheEnabled;
- String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_EnableCache+"");
- if(null!=strkSetting_EnableCache)
- {
- kSetting_EnableCache=Boolean.parseBoolean(strkSetting_EnableCache);
- }
-
- final boolean isCacheEnabled = kSetting_EnableCache;
-
-
- fCache = (isCacheEnabled) ? new KafkaConsumerCache(apiNodeId,
- metrics) : null;
- if (fCache != null) {
- fCache.startCache(mode, curator);
- }
- }
-
- @Override
- public Consumer getConsumerFor(String topic, String consumerGroupName,
- String consumerId, int timeoutMs) throws UnavailableException {
- KafkaConsumer kc;
-
- try {
- kc = (fCache != null) ? fCache.getConsumerFor(topic,
- consumerGroupName, consumerId) : null;
- } catch (KafkaConsumerCacheException e) {
- throw new UnavailableException(e);
- }
-
- if (kc == null) {
-
- final InterProcessMutex ipLock = new InterProcessMutex( ConfigurationReader.getCurator(), "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId);
-// final InterProcessMutex fLock = new InterProcessMutex(
-// ConfigurationReader.getCurator(), "/consumerFactory/"
-// + topic + "/" + consumerGroupName + "/"
-// + consumerId);
- boolean locked = false;
- try {
-
- locked = ipLock.acquire(30, TimeUnit.SECONDS);
- if (!locked) {
- // FIXME: this seems to cause trouble in some cases. This exception
- // gets thrown routinely. Possibly a consumer trying multiple servers
- // at once, producing a never-ending cycle of overlapping locks?
- // The problem is that it throws and winds up sending a 503 to the
- // client, which would be incorrect if the client is causing trouble
- // by switching back and forth.
-
- throw new UnavailableException("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic + ", " + consumerGroupName + ", " + consumerId + ")");
- }
-
-// if (!fLock.acquire(30, TimeUnit.SECONDS)) {
-// throw new UnavailableException(
-// "Could not acquire lock in order to create (topic, group, consumer) = "
-// + "(" + topic + ", " + consumerGroupName
-// + ", " + consumerId + ")");
-// }
-
- fCache.signalOwnership(topic, consumerGroupName, consumerId);
-
- log.info("Creating Kafka consumer for group ["
- + consumerGroupName + "], consumer [" + consumerId
- + "], on topic [" + topic + "].");
-
- final String fakeGroupName = consumerGroupName + "--" + topic;
-
- final ConsumerConfig ccc = createConsumerConfig(fakeGroupName,
- consumerId);
- final ConsumerConnector cc = kafka.consumer.Consumer
- .createJavaConsumerConnector(ccc);
- kc = new KafkaConsumer(topic, consumerGroupName, consumerId, cc);
-
- if (fCache != null) {
- fCache.putConsumerFor(topic, consumerGroupName, consumerId,
- kc);
- }
- } catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) {
- log.error("Exception find at getConsumerFor(String topic, String consumerGroupName,\r\n" +
- " String consumerId, int timeoutMs) : " + x);
- throw new UnavailableException("Couldn't connect to ZK.");
- } catch (KafkaConsumerCacheException e) {
- log.error("Failed to cache consumer (this may have performance implications): "
- + e.getMessage());
- } catch (Exception e) {
- throw new UnavailableException(
- "Error while acquiring consumer factory lock", e);
- } finally {
- if ( locked )
- {
- try {
- ipLock.release();
- } catch (Exception e) {
- throw new UnavailableException("Error while releasing consumer factory lock", e);
- }
- }
- }
- }
-
- return kc;
- }
-
- @Override
- public synchronized void destroyConsumer(String topic,
- String consumerGroup, String clientId) {
- if (fCache != null) {
- fCache.dropConsumer(topic, consumerGroup, clientId);
- }
- }
-
- @Override
- public synchronized Collection<? extends Consumer> getConsumers() {
- return fCache.getConsumers();
- }
-
- @Override
- public synchronized void dropCache() {
- fCache.dropAllConsumers();
- }
-
- private ConsumerConfig createConsumerConfig(String groupId,
- String consumerId) {
- final Properties props = new Properties();
- props.put("zookeeper.connect", fZooKeeper);
- props.put("group.id", groupId);
- props.put("consumer.id", consumerId);
- //props.put("auto.commit.enable", "false");
- // additional settings: start with our defaults, then pull in configured
- // overrides
- props.putAll(KafkaInternalDefaults);
- for (String key : KafkaConsumerKeys) {
- transferSettingIfProvided(props, key, "kafka");
- }
-
- return new ConsumerConfig(props);
- }
-
- //private final rrNvReadable fSettings;
- private final KafkaConsumerCache fCache;
-
- private String fZooKeeper;
-
- private static final String kSettings_KafkaZookeeper = "kafka.client.zookeeper";
-
- private static final HashMap<String, String> KafkaInternalDefaults = new HashMap<String, String>();
-
- /**
- * putting values in hashmap like consumer timeout, zookeeper time out, etc
- *
- * @param setting
- */
- public static void populateKafkaInternalDefaultsMap() {
- //@Qualifier("propertyReader") rrNvReadable setting) {
- try {
-
- HashMap<String, String> map1= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop);
-
- KafkaInternalDefaults.put("consumer.timeout.ms",
- // AJSCPropertiesMap.get(CambriaConstants.msgRtr_prop, "consumer.timeout.ms"));
- map1.get( "consumer.timeout.ms"));
-
- KafkaInternalDefaults.put("zookeeper.connection.timeout.ms",
- //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.connection.timeout.ms"));
- map1.get("zookeeper.connection.timeout.ms"));
- KafkaInternalDefaults.put("zookeeper.session.timeout.ms",
- //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.session.timeout.ms"));
- map1.get("zookeeper.session.timeout.ms"));
- KafkaInternalDefaults.put("zookeeper.sync.time.ms",
- // AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "zookeeper.sync.time.ms"));
- map1.get( "zookeeper.sync.time.ms"));
- KafkaInternalDefaults.put("auto.commit.interval.ms",
- //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "auto.commit.interval.ms"));
- map1.get( "auto.commit.interval.ms"));
- KafkaInternalDefaults.put("fetch.message.max.bytes",
- //AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "fetch.message.max.bytes"));
- map1.get("fetch.message.max.bytes"));
- KafkaInternalDefaults.put("auto.commit.enable",
- // AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "auto.commit.enable"));
- map1.get("auto.commit.enable"));
- } catch (Exception e) {
- log.error("Failed to load Kafka Internal Properties.", e);
- }
- }
-
- private static final String KafkaConsumerKeys[] = { "socket.timeout.ms",
- "socket.receive.buffer.bytes", "fetch.message.max.bytes",
- "auto.commit.interval.ms", "queued.max.message.chunks",
- "rebalance.max.retries", "fetch.min.bytes", "fetch.wait.max.bytes",
- "rebalance.backoff.ms", "refresh.leader.backoff.ms",
- "auto.offset.reset", "consumer.timeout.ms",
- "zookeeper.session.timeout.ms", "zookeeper.connection.timeout.ms",
- "zookeeper.sync.time.ms" };
-
- private static String makeLongKey(String key, String prefix) {
- return prefix + "." + key;
- }
-
- private void transferSettingIfProvided(Properties target, String key,
- String prefix) {
- String keyVal= AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,makeLongKey(key, prefix));
-
- // if (fSettings.hasValueFor(makeLongKey(key, prefix))) {
- if (null!=keyVal) {
- // final String val = fSettings
- // .getString(makeLongKey(key, prefix), "");
- log.info("Setting [" + key + "] to " + keyVal + ".");
- target.put(key, keyVal);
- }
- }
-
- }
-
-
diff --git a/src/main/java/com/att/nsa/cambria/beans/ZkClientFactory.java b/src/main/java/com/att/nsa/cambria/beans/ZkClientFactory.java
deleted file mode 100644
index 2aedb95..0000000
--- a/src/main/java/com/att/nsa/cambria/beans/ZkClientFactory.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * ONAP Policy Engine
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package com.att.nsa.cambria.beans;
-
-import org.I0Itec.zkclient.ZkClient;
-
-import com.att.nsa.cambria.utils.ConfigurationReader;
-
-import kafka.utils.ZKStringSerializer$;
-
-public class ZkClientFactory {
-
- public static ZkClient createZkClient(){
- return new ZkClient(ConfigurationReader.getMainZookeeperConnectionString(), 10000, 10000,
- ZKStringSerializer$.MODULE$);
-
- }
-
-}
diff --git a/src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java b/src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java
deleted file mode 100644
index bb2effd..0000000
--- a/src/main/java/com/att/nsa/cambria/service/impl/EventsServiceImpl.java
+++ /dev/null
@@ -1,795 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.nsa.cambria.service.impl;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.LinkedList;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.MediaType;
-
-import org.apache.http.HttpStatus;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.nsa.cambria.CambriaApiException;
-import com.att.nsa.cambria.backends.Consumer;
-import com.att.nsa.cambria.backends.ConsumerFactory;
-import com.att.nsa.cambria.backends.ConsumerFactory.UnavailableException;
-import com.att.nsa.cambria.backends.MetricsSet;
-import com.att.nsa.cambria.backends.Publisher;
-import com.att.nsa.cambria.backends.Publisher.message;
-import com.att.nsa.cambria.beans.DMaaPCambriaLimiter;
-import com.att.nsa.cambria.beans.DMaaPContext;
-import com.att.nsa.cambria.beans.LogDetails;
-import com.att.nsa.cambria.constants.CambriaConstants;
-import com.att.nsa.cambria.exception.DMaaPAccessDeniedException;
-import com.att.nsa.cambria.exception.DMaaPErrorMessages;
-import com.att.nsa.cambria.exception.DMaaPResponseCode;
-import com.att.nsa.cambria.exception.ErrorResponse;
-import com.att.nsa.cambria.metabroker.Broker.TopicExistsException;
-import com.att.nsa.cambria.metabroker.Topic;
-import com.att.nsa.cambria.resources.CambriaEventSet;
-import com.att.nsa.cambria.resources.CambriaOutboundEventStream;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticator;
-import com.att.nsa.cambria.security.DMaaPAAFAuthenticatorImpl;
-import com.att.nsa.cambria.security.DMaaPAuthenticatorImpl;
-import com.att.nsa.cambria.service.EventsService;
-import com.att.nsa.cambria.utils.DMaaPResponseBuilder;
-import com.att.nsa.cambria.utils.Utils;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.service.standards.MimeTypes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.util.rrConvertor;
-
-import kafka.producer.KeyedMessage;
-
-/**
- * This class provides the functinality to publish and subscribe message to
- * kafka
- *
- * @author author
- *
- */
-@Service
-public class EventsServiceImpl implements EventsService {
- //private static final Logger LOG = Logger.getLogger(EventsServiceImpl.class);
- private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
-
- private static final String BATCH_LENGTH = "event.batch.length";
- private static final String TRANSFER_ENCODING = "Transfer-Encoding";
- @Autowired
- private DMaaPErrorMessages errorMessages;
-
- //@Value("${metrics.send.cambria.topic}")
- //private String metricsTopic;
-
- public void setErrorMessages(DMaaPErrorMessages errorMessages) {
- this.errorMessages = errorMessages;
- }
-
- /**
- * @param ctx
- * @param topic
- * @param consumerGroup
- * @param clientId
- * @throws ConfigDbException,
- * TopicExistsException, AccessDeniedException,
- * UnavailableException, CambriaApiException, IOException
- *
- *
- */
- @Override
- public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
- throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
- CambriaApiException, IOException,DMaaPAccessDeniedException {
- final long startTime = System.currentTimeMillis();
- final HttpServletRequest req = ctx.getRequest();
-
- if(clientId == null)
- throw new NullPointerException();
-
- boolean isAAFTopic=false;
- // was this host blacklisted?
- final String remoteAddr = Utils.getRemoteAddress(ctx);;
- if ( ctx.getConfigReader().getfIpBlackList().contains ( remoteAddr ) )
- {
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), "Source address [" + remoteAddr +
- "] is blacklisted. Please contact the cluster management team."
- ,null,Utils.getFormattedDate(new Date()),topic,
- Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
- null,null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
-
-
- int limit = CambriaConstants.kNoLimit;
- if (req.getParameter("limit") != null) {
- limit = Integer.parseInt(req.getParameter("limit"));
- }
-
- int timeoutMs= CambriaConstants.kNoTimeout;
- String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"timeout");
- if(strtimeoutMS!=null)timeoutMs=Integer.parseInt(strtimeoutMS);
- //int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout", CambriaConstants.kNoTimeout);
- if (req.getParameter("timeout") != null) {
- timeoutMs = Integer.parseInt(req.getParameter("timeout"));
- }
-
- // By default no filter is applied if filter is not passed as a
- // parameter in the request URI
- String topicFilter = CambriaConstants.kNoFilter;
- if (null != req.getParameter("filter")) {
- topicFilter = req.getParameter("filter");
- }
- // pretty to print the messaages in new line
- String prettyval="0";
- String strPretty=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"pretty");
- if (null!=strPretty)prettyval=strPretty;
-
- String metaval="0";
- String strmeta=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"meta");
- if (null!=strmeta)metaval=strmeta;
-
- final boolean pretty = rrConvertor
- .convertToBooleanBroad(prettyval);
- // withMeta to print offset along with message
- final boolean withMeta = rrConvertor
- .convertToBooleanBroad(metaval);
-
-
- /*final boolean pretty = rrConvertor
- .convertToBooleanBroad(ctx.getConfigReader().getSettings().getString("pretty", "0"));
- // withMeta to print offset along with message
- final boolean withMeta = rrConvertor
- .convertToBooleanBroad(ctx.getConfigReader().getSettings().getString("meta", "0"));
-*/
- final LogWrap logger = new LogWrap ( topic, consumerGroup, clientId);
- logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter);
-
- // is this user allowed to read this topic?
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
- final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
-
- if (metatopic == null) {
- // no such topic.
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
- DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
- errorMessages.getTopicNotExist()+"-[" + topic + "]",null,Utils.getFormattedDate(new Date()),topic,null,null,
- clientId,ctx.getRequest().getRemoteHost());
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
- if (null==metricTopicname)
- metricTopicname="msgrtr.apinode.metrics.dmaap";
-
- if(null==ctx.getRequest().getHeader("Authorization")&& !topic.equalsIgnoreCase(metricTopicname))
- {
- if (null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))){
- // check permissions
- metatopic.checkUserRead(user);
- }
- }
- // if headers are not provided then user will be null
- if(user == null && null!=ctx.getRequest().getHeader("Authorization"))
- {
- // the topic name will be sent by the client
-// String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"sub";
- DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
- String permission = aaf.aafPermissionString(topic, "sub");
- if(!aaf.aafAuthentication(ctx.getRequest(), permission))
- {
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- errorMessages.getNotPermitted1()+" read "+errorMessages.getNotPermitted2()+topic,null,Utils.getFormattedDate(new Date()),topic,null,null,
- clientId,ctx.getRequest().getRemoteHost());
- LOG.info(errRes.toString());
- throw new DMaaPAccessDeniedException(errRes);
-
- }
- isAAFTopic = true;
- }
- Consumer c = null;
- try {
- final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
- final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter();
- rl.onCall(topic, consumerGroup, clientId);
-
- c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs);
-
- /* final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c,
- ctx.getConfigReader().getSettings()).timeout(timeoutMs).limit(limit).filter(topicFilter)
- .pretty(pretty).withMeta(withMeta)
- // .atOffset(topicOffset)
- .build();*/
- final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs).limit(limit).filter(topicFilter)
- .pretty(pretty).withMeta(withMeta).build();
- coes.setDmaapContext(ctx);
- coes.setTopic(metatopic);
- if( isTransEnabled() || isAAFTopic ){
- coes.setTransEnabled(true);
- }else{
- coes.setTransEnabled(false);
- }
- coes.setTopicStyle(isAAFTopic);
-
- DMaaPResponseBuilder.setNoCacheHeadings(ctx);
-
- DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes);
-
- // No IOException thrown during respondOkWithStream, so commit the
- // new offsets to all the brokers
- c.commitOffsets();
- final int sent = coes.getSentCount();
-
- metricsSet.consumeTick(sent);
- rl.onSend(topic, consumerGroup, clientId, sent);
-
- final long elapsedMs = System.currentTimeMillis() - startTime;
- logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + c.getOffset());
-
- } catch (UnavailableException excp) {
- logger.warn(excp.getMessage(), excp);
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
- DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
- errorMessages.getServerUnav()+ excp.getMessage(),null,Utils.getFormattedDate(new Date()),topic,null,null,
- clientId,ctx.getRequest().getRemoteHost());
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
-
- } catch (CambriaApiException excp) {
- logger.warn(excp.getMessage(), excp);
- throw excp;
- } catch (Exception excp) {
- logger.warn("Couldn't respond to client, closing cambria consumer", excp);
- ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
- DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
- "Couldn't respond to client, closing cambria consumer"+ excp.getMessage(),null,Utils.getFormattedDate(new Date()),topic,null,null,
- clientId,ctx.getRequest().getRemoteHost());
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- } finally {
- // If no cache, close the consumer now that we're done with it.
- boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
- String strkSetting_EnableCache=AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,ConsumerFactory.kSetting_EnableCache);
- if(null!=strkSetting_EnableCache) kSetting_EnableCache=Boolean.parseBoolean(strkSetting_EnableCache);
- //if (!ctx.getConfigReader().getSettings().getBoolean(ConsumerFactory.kSetting_EnableCache, ConsumerFactory.kDefault_IsCacheEnabled) && (c != null)) {
- if (!kSetting_EnableCache && (c != null)) {
- c.close();
-
- }
- }
- }
-
- /**
- * @throws missingReqdSetting
- *
- */
- @Override
- public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
- final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
- CambriaApiException, IOException, missingReqdSetting,DMaaPAccessDeniedException {
-
- // is this user allowed to write to this topic?
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
- final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
- boolean isAAFTopic=false;
-
- // was this host blacklisted?
- final String remoteAddr = Utils.getRemoteAddress(ctx);
-
- if ( ctx.getConfigReader().getfIpBlackList().contains ( remoteAddr ) )
- {
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), "Source address [" + remoteAddr +
- "] is blacklisted. Please contact the cluster management team."
- ,null,Utils.getFormattedDate(new Date()),topic,
- Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
- null,null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
-
- String topicNameStd = null;
-
- // topicNameStd= ctx.getConfigReader().getSettings().getString("enforced.topic.name.AAF");
- topicNameStd= com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,"enforced.topic.name.AAF");
- String metricTopicname= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
- if (null==metricTopicname)
- metricTopicname="msgrtr.apinode.metrics.dmaap";
- boolean topicNameEnforced=false;
- if (null != topicNameStd && topic.startsWith(topicNameStd) )
- {
- topicNameEnforced = true;
- }
-
- //Here check if the user has rights to publish on the topic
- //( This will be called when no auth is added or when UEB API Key Authentication is used)
- //checkUserWrite(user) method will throw an error when there is no Auth header added or when the
- //user has no publish rights
-
- if(null != metatopic && null != metatopic.getOwner() && !("".equals(metatopic.getOwner())) && null==ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname))
- {
- metatopic.checkUserWrite(user);
- }
-
-
-
- // if headers are not provided then user will be null
- if(topicNameEnforced || (user == null && null!=ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)))
- {
- // the topic name will be sent by the client
- // String permission = "com.att.dmaap.mr.topic"+"|"+topic+"|"+"pub";
- DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
- String permission = aaf.aafPermissionString(topic, "pub");
- if(!aaf.aafAuthentication(ctx.getRequest(), permission))
- {
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- errorMessages.getNotPermitted1()+" publish "+errorMessages.getNotPermitted2()+topic,null,Utils.getFormattedDate(new Date()),topic,
- Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
- null,null);
- LOG.info(errRes.toString());
- throw new DMaaPAccessDeniedException(errRes);
- }
- isAAFTopic=true;
- }
-
- final HttpServletRequest req = ctx.getRequest();
-
- // check for chunked input
- boolean chunked = false;
- if (null != req.getHeader(TRANSFER_ENCODING)) {
- chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
- }
- // get the media type, or set it to a generic value if it wasn't
- // provided
- String mediaType = req.getContentType();
- if (mediaType == null || mediaType.length() == 0) {
- mediaType = MimeTypes.kAppGenericBinary;
- }
-
- if (mediaType.contains("charset=UTF-8")) {
- mediaType = mediaType.replace("; charset=UTF-8", "").trim();
- }
-
- String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
- boolean istransidreqd=false;
- if (null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")){
- istransidreqd = true;
- }
-
- if (isAAFTopic || istransidreqd ) {
- pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
- }
- else
- {
- pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
- }
-
-
- }
-
- /**
- *
- * @param ctx
- * @param topic
- * @param msg
- * @param defaultPartition
- * @param chunked
- * @param mediaType
- * @throws ConfigDbException
- * @throws AccessDeniedException
- * @throws TopicExistsException
- * @throws CambriaApiException
- * @throws IOException
- */
- private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition,
- boolean chunked, String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException,
- CambriaApiException, IOException {
- final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
- // setup the event set
- final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
-
- // start processing, building a batch to push to the backend
- final long startMs = System.currentTimeMillis();
- long count = 0;
-
- long maxEventBatch=(long)1024 * 16;
- String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,BATCH_LENGTH);
- if(null!=batchlen)maxEventBatch=Long.parseLong(batchlen);
-
- // long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
- final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
- final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
-
- try {
- // for each message...
- Publisher.message m = null;
- while ((m = events.next()) != null) {
- // add the message to the batch
- batch.add(m);
- final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
- m.getMessage());
- kms.add(data);
- // check if the batch is full
- final int sizeNow = batch.size();
- if (sizeNow > maxEventBatch) {
- ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
- kms.clear();
- batch.clear();
- metricsSet.publishTick(sizeNow);
- count += sizeNow;
- }
- }
-
- // send the pending batch
- final int sizeNow = batch.size();
- if (sizeNow > 0) {
- ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
- kms.clear();
- batch.clear();
- metricsSet.publishTick(sizeNow);
- count += sizeNow;
- }
-
- final long endMs = System.currentTimeMillis();
- final long totalMs = endMs - startMs;
-
- LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
-
- // build a responseP
- final JSONObject response = new JSONObject();
- response.put("count", count);
- response.put("serverTimeMs", totalMs);
- DMaaPResponseBuilder.respondOk(ctx, response);
-
- } catch (Exception excp) {
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg=null;
- if(excp instanceof CambriaApiException) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
-
- }
- ErrorResponse errRes = new ErrorResponse(status,
- DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- errorMessages.getPublishMsgError()+":"+topic+"."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,null,Utils.getFormattedDate(new Date()),topic,
- null,ctx.getRequest().getRemoteHost(),
- null,null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
-
-
- }
- }
-
- /**
- *
- * @param ctx
- * @param inputStream
- * @param topic
- * @param partitionKey
- * @param requestTime
- * @param chunked
- * @param mediaType
- * @throws ConfigDbException
- * @throws AccessDeniedException
- * @throws TopicExistsException
- * @throws IOException
- * @throws CambriaApiException
- */
- private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
- final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
- throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
- CambriaApiException {
-
- final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
- // setup the event set
- final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
-
- // start processing, building a batch to push to the backend
- final long startMs = System.currentTimeMillis();
- long count = 0;
- long maxEventBatch = (long)1024 * 16;
- String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,BATCH_LENGTH);
- if(null!=evenlen)maxEventBatch=Long.parseLong(evenlen);
- //final long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
- final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
- final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
-
- Publisher.message m = null;
- int messageSequence = 1;
- Long batchId = 1L;
- final boolean transactionEnabled = true;
- int publishBatchCount=0;
- SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
-
- //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
- try {
- // for each message...
- batchId=DMaaPContext.getBatchID();
-
- String responseTransactionId = null;
-
- while ((m = events.next()) != null) {
-
- //LOG.warn("Batch Start Id: " + Utils.getFromattedBatchSequenceId(batchId));
-
-
- addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
- transactionEnabled);
- messageSequence++;
-
- // add the message to the batch
- batch.add(m);
-
- responseTransactionId = m.getLogDetails().getTransactionId();
-
- JSONObject jsonObject = new JSONObject();
- jsonObject.put("message", m.getMessage());
- jsonObject.put("transactionId", responseTransactionId);
- final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
- jsonObject.toString());
- kms.add(data);
-
- // check if the batch is full
- final int sizeNow = batch.size();
- if (sizeNow >= maxEventBatch) {
- String startTime = sdf.format(new Date());
- LOG.info("Batch Start Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch Start Id=" + batchId+"]");
- try {
- ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
- //transactionLogs(batch);
- for (message msg : batch) {
- LogDetails logDetails = msg.getLogDetails();
- LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
- }
- } catch (Exception excp) {
-
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg=null;
- if(excp instanceof CambriaApiException) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
- }
- ErrorResponse errRes = new ErrorResponse(status,
- DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- "Transaction-"+errorMessages.getPublishMsgError()+":"+topic+ "."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,
- null,Utils.getFormattedDate(new Date()),topic,
- Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
- null,null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- kms.clear();
- batch.clear();
- metricsSet.publishTick(sizeNow);
- publishBatchCount=sizeNow;
- count += sizeNow;
- //batchId++;
- String endTime = sdf.format(new Date());
- LOG.info("Batch End Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch End Id=" + batchId
- + ",Batch Total=" + publishBatchCount+",Batch Start Time="+startTime+",Batch End Time="+endTime+"]");
- batchId=DMaaPContext.getBatchID();
- }
- }
-
- // send the pending batch
- final int sizeNow = batch.size();
- if (sizeNow > 0) {
- String startTime = sdf.format(new Date());
- LOG.info("Batch Start Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch Start Id=" + batchId+"]");
- try {
- ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
- //transactionLogs(batch);
- for (message msg : batch) {
- LogDetails logDetails = msg.getLogDetails();
- LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
- }
- } catch (Exception excp) {
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg=null;
- if(excp instanceof CambriaApiException) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
- }
-
- ErrorResponse errRes = new ErrorResponse(status,
- DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- "Transaction-"+errorMessages.getPublishMsgError()+":"+topic+"."+ errorMessages.getPublishMsgCount()+count+"."+errorMsg,
- null,Utils.getFormattedDate(new Date()),topic,
- Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
- null,null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- kms.clear();
- metricsSet.publishTick(sizeNow);
- count += sizeNow;
- //batchId++;
- String endTime = sdf.format(new Date());
- publishBatchCount=sizeNow;
- LOG.info("Batch End Details:[serverIp="+ctx.getRequest().getLocalAddr()+",Batch End Id=" + batchId
- + ",Batch Total=" + publishBatchCount+",Batch Start Time="+startTime+",Batch End Time="+endTime+"]");
- }
-
- final long endMs = System.currentTimeMillis();
- final long totalMs = endMs - startMs;
-
- LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
-
- if (null != responseTransactionId) {
- ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
- }
-
- // build a response
- final JSONObject response = new JSONObject();
- response.put("count", count);
- response.put("serverTimeMs", totalMs);
- DMaaPResponseBuilder.respondOk(ctx, response);
-
- } catch (Exception excp) {
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg=null;
- if(excp instanceof CambriaApiException) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
- }
-
- ErrorResponse errRes = new ErrorResponse(
- status,
- DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- "Transaction-"+errorMessages.getPublishMsgError()+":"+topic+"."+errorMessages.getPublishMsgCount()+count+"."+errorMsg,null,Utils.getFormattedDate(new Date()),topic,
- Utils.getUserApiKey(ctx.getRequest()),ctx.getRequest().getRemoteHost(),
- null,null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- }
-
- /**
- *
- * @param msg
- * @param topic
- * @param request
- * @param messageCreationTime
- * @param messageSequence
- * @param batchId
- * @param transactionEnabled
- */
- private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
- final String messageCreationTime, final int messageSequence, final Long batchId,
- final boolean transactionEnabled) {
- LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
- transactionEnabled);
- logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
- msg.setTransactionEnabled(transactionEnabled);
- msg.setLogDetails(logDetails);
- }
-
-
-
- /**
- *
- * @author author
- *
- */
- private static class LogWrap {
- private final String fId;
-
- /**
- * constructor initialization
- *
- * @param topic
- * @param cgroup
- * @param cid
- */
- public LogWrap(String topic, String cgroup, String cid) {
- fId = "[" + topic + "/" + cgroup + "/" + cid + "] ";
- }
-
- /**
- *
- * @param msg
- */
- public void info(String msg) {
- LOG.info(fId + msg);
- }
-
- /**
- *
- * @param msg
- * @param t
- */
- public void warn(String msg, Exception t) {
- LOG.warn(fId + msg, t);
- }
-
- }
-
- private boolean isTransEnabled() {
- String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"transidUEBtopicreqd");
- boolean istransidreqd=false;
- if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) ){
- istransidreqd = true;
- }
-
- return istransidreqd;
-
- }
-
- private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
- final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
- LogDetails logDetails = new LogDetails();
- logDetails.setTopicId(topicName);
- logDetails.setMessageTimestamp(messageTimestamp);
- logDetails.setPublisherId(Utils.getUserApiKey(request));
- logDetails.setPublisherIp(request.getRemoteHost());
- logDetails.setMessageBatchId(batchId);
- logDetails.setMessageSequence(String.valueOf(messageSequence));
- logDetails.setTransactionEnabled(transactionEnabled);
- logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
- logDetails.setServerIp(request.getLocalAddr());
- return logDetails;
- }
-
- /*public String getMetricsTopic() {
- return metricsTopic;
- }
-
- public void setMetricsTopic(String metricsTopic) {
- this.metricsTopic = metricsTopic;
- }*/
-
-} \ No newline at end of file