summaryrefslogtreecommitdiffstats
path: root/src/main/java/com
diff options
context:
space:
mode:
Diffstat (limited to 'src/main/java/com')
-rw-r--r--src/main/java/com/att/dmf/mr/CambriaApiException.java80
-rw-r--r--src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java88
-rw-r--r--src/main/java/com/att/dmf/mr/backends/Consumer.java105
-rw-r--r--src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java118
-rw-r--r--src/main/java/com/att/dmf/mr/backends/MetricsSet.java71
-rw-r--r--src/main/java/com/att/dmf/mr/backends/Publisher.java99
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java397
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java123
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt386
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java742
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java159
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java228
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java45
-rw-r--r--src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java100
-rw-r--r--src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java184
-rw-r--r--src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java201
-rw-r--r--src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java207
-rw-r--r--src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java92
-rw-r--r--src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java109
-rw-r--r--src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java88
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java288
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPContext.java104
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java361
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java495
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java231
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java140
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java45
-rw-r--r--src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java51
-rw-r--r--src/main/java/com/att/dmf/mr/beans/LogDetails.java214
-rw-r--r--src/main/java/com/att/dmf/mr/beans/TopicBean.java155
-rw-r--r--src/main/java/com/att/dmf/mr/constants/CambriaConstants.java126
-rw-r--r--src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java42
-rw-r--r--src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java94
-rw-r--r--src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java248
-rw-r--r--src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java93
-rw-r--r--src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java137
-rw-r--r--src/main/java/com/att/dmf/mr/exception/ErrorResponse.java135
-rw-r--r--src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java64
-rw-r--r--src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java123
-rw-r--r--src/main/java/com/att/dmf/mr/metabroker/Broker.java92
-rw-r--r--src/main/java/com/att/dmf/mr/metabroker/Broker1.java95
-rw-r--r--src/main/java/com/att/dmf/mr/metabroker/Topic.java133
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java52
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java89
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java52
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java101
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java146
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java420
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java100
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/impl/Clock.java74
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java169
-rw-r--r--src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java422
-rw-r--r--src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java114
-rw-r--r--src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java554
-rw-r--r--src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java169
-rw-r--r--src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java141
-rw-r--r--src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java229
-rw-r--r--src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java140
-rw-r--r--src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java39
-rw-r--r--src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java80
-rw-r--r--src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java61
-rw-r--r--src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java133
-rw-r--r--src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java87
-rw-r--r--src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java293
-rw-r--r--src/main/java/com/att/dmf/mr/service/AdminService.java83
-rw-r--r--src/main/java/com/att/dmf/mr/service/ApiKeysService.java105
-rw-r--r--src/main/java/com/att/dmf/mr/service/EventsService.java75
-rw-r--r--src/main/java/com/att/dmf/mr/service/MMService.java66
-rw-r--r--src/main/java/com/att/dmf/mr/service/MetricsService.java54
-rw-r--r--src/main/java/com/att/dmf/mr/service/TopicService.java176
-rw-r--r--src/main/java/com/att/dmf/mr/service/TransactionService.java61
-rw-r--r--src/main/java/com/att/dmf/mr/service/UIService.java92
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java190
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java320
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java153
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java867
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java600
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java115
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java694
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java100
-rw-r--r--src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java210
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java44
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java83
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java86
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/TransactionObj.java202
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/TrnRequest.java183
-rw-r--r--src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java62
-rw-r--r--src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java492
-rw-r--r--src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java69
-rw-r--r--src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java370
-rw-r--r--src/main/java/com/att/dmf/mr/utils/Emailer.java211
-rw-r--r--src/main/java/com/att/dmf/mr/utils/PropertyReader.java125
-rw-r--r--src/main/java/com/att/dmf/mr/utils/Utils.java175
-rw-r--r--src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java197
-rw-r--r--src/main/java/com/att/mr/filter/ContentLengthFilter.java134
-rw-r--r--src/main/java/com/att/mr/filter/DefaultLength.java37
96 files changed, 0 insertions, 17184 deletions
diff --git a/src/main/java/com/att/dmf/mr/CambriaApiException.java b/src/main/java/com/att/dmf/mr/CambriaApiException.java
deleted file mode 100644
index cdf95ab..0000000
--- a/src/main/java/com/att/dmf/mr/CambriaApiException.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr;
-
-import org.json.JSONObject;
-
-import com.att.dmf.mr.exception.ErrorResponse;
-import com.att.nsa.apiServer.NsaAppException;
-
-public class CambriaApiException extends NsaAppException
-{
- /*
- * defined long type constant serialVersionUID
- */
- private static final long serialVersionUID = 1L;
-
- private transient ErrorResponse errRes;
- /**
- * Implements constructor CambriaApiException
- * @param jsonObject
- *
- */
- public CambriaApiException ( JSONObject jsonObject )
- {
- super ( jsonObject );
- }
-
- /**
- * Implements constructor CambriaApiException
- * @param status
- * @param msg
- */
- public CambriaApiException ( int status, String msg )
- {
- super ( status, msg );
- }
-
- /**
- * Implements constructor CambriaApiException
- * @param status
- * @param jsonObject
- */
- public CambriaApiException ( int status, JSONObject jsonObject )
- {
- super ( status, jsonObject );
- }
-
- public CambriaApiException (ErrorResponse errRes)
- {
- super(errRes.getHttpStatusCode(),errRes.getErrorMessage());
- this.errRes = errRes;
- }
-
- public ErrorResponse getErrRes() {
- return errRes;
- }
-
- public void setErrRes(ErrorResponse errRes) {
- this.errRes = errRes;
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java b/src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java
deleted file mode 100644
index f0c57b5..0000000
--- a/src/main/java/com/att/dmf/mr/CambriaApiVersionInfo.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-/**
- * CambriaApiVersionInfo will provide the version of cambria code
- *
- * @author peter
- *
- */
-public class CambriaApiVersionInfo {
-
- /**
- * 3 constants are defined:-
- * PROPS,VERSION and LOG
- */
-
- private static final Properties PROPS = new Properties();
- private static final String VERSION;
-
-
- private static final EELFLogger LOG = EELFManager.getInstance().getLogger(CambriaApiVersionInfo.class);
-
- /**
- * private constructor created with no argument
- * to avoid default constructor
- */
- private CambriaApiVersionInfo()
- {
-
- }
-
- /**
- * returns version of String type
- */
- public static String getVersion() {
- return VERSION;
- }
-
- /**
- *
- * defines static initialization method
- * It initializes VERSION Constant
- * it handles exception in try catch block
- * and throws IOException
- *
- */
-
- static {
- String use = null;
- try {
- final InputStream is = CambriaApiVersionInfo.class
- .getResourceAsStream("/cambriaApiVersion.properties");
- if (is != null) {
- PROPS.load(is);
- use = PROPS.getProperty("cambriaApiVersion", null);
- }
- } catch (IOException e) {
- LOG.error("Failed due to IO EXception:"+e);
- }
- VERSION = use;
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/Consumer.java b/src/main/java/com/att/dmf/mr/backends/Consumer.java
deleted file mode 100644
index f4a9a80..0000000
--- a/src/main/java/com/att/dmf/mr/backends/Consumer.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends;
-
-
-/**
- * A consumer interface. Consumers pull the next message from a given topic.
- * @author peter
- */
-public interface Consumer
-{
- /**
- * A message interface provide the offset and message
- * @author nilanjana.maity
- *
- */
- public interface Message
- {
- /**
- * returning the offset of that particular message
- * @return long
- */
- long getOffset ();
- /**
- * returning the message
- * @return message
- */
- String getMessage ();
- }
-
- /**
- * Get this consumer's name
- * @return name
- */
- String getName ();
-
- /**
- * Get creation time in ms
- * @return
- */
- long getCreateTimeMs ();
-
- /**
- * Get last access time in ms
- * @return
- */
- long getLastAccessMs ();
-
- /**
- * Get the next message from this source. This method must not block.
- * @return the next message, or null if none are waiting
- */
- Message nextMessage ();
-
- /**
- * Get the next message from this source. This method must not block.
- * @param atOffset start with the next message at or after atOffset. -1 means next from last request
- * @return the next message, or null if none are waiting
- */
-
-
-
- /**
- * Close/clean up this consumer
- * @return
- */
- boolean close();
-
- /**
- * Commit the offset of the last consumed message
- *
- */
- void commitOffsets();
-
- /**
- * Get the offset this consumer is currently at
- * @return offset
- */
- long getOffset();
-
- void setOffset(long offset);
-
-
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java b/src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java
deleted file mode 100644
index 55e0645..0000000
--- a/src/main/java/com/att/dmf/mr/backends/ConsumerFactory.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends;
-
-import java.util.Collection;
-import java.util.HashMap;
-
-import com.att.dmf.mr.CambriaApiException;
-
-/**
- * This is the factory class to instantiate the consumer
- *
- * @author nilanjana.maity
- *
- */
-
-public interface ConsumerFactory {
- public static final String kSetting_EnableCache = "cambria.consumer.cache.enabled";
- public static boolean kDefault_IsCacheEnabled = true;
-
- /**
- * User defined exception for Unavailable Exception
- *
- * @author nilanjana.maity
- *
- */
- public class UnavailableException extends Exception {
- /**
- * Unavailable Exception with message
- *
- * @param msg
- */
- public UnavailableException(String msg) {
- super(msg);
- }
-
- /**
- * Unavailable Exception with the throwable object
- *
- * @param t
- */
- public UnavailableException(Throwable t) {
- super(t);
- }
-
- /**
- * Unavailable Exception with the message and cause
- *
- * @param msg
- * @param cause
- */
- public UnavailableException(String msg, Throwable cause) {
- super(msg, cause);
- }
-
- private static final long serialVersionUID = 1L;
- }
-
- /**
- * For admin use, drop all cached consumers.
- */
- public void dropCache();
-
- /**
- * Get or create a consumer for the given set of info (topic, group, id)
- *
- * @param topic
- * @param consumerGroupId
- * @param clientId
- * @param timeoutMs
- * @return
- * @throws UnavailableException
- */
-
-
- /**
- * For factories that employ a caching mechanism, this allows callers to
- * explicitly destory a consumer that resides in the factory's cache.
- *
- * @param topic
- * @param consumerGroupId
- * @param clientId
- */
- public void destroyConsumer(String topic, String consumerGroupId,
- String clientId);
-
- /**
- * For admin/debug, we provide access to the consumers
- *
- * @return a collection of consumers
- */
- public Collection<? extends Consumer> getConsumers();
-
- public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException;
- public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs, String remotehost) throws UnavailableException, CambriaApiException;
-
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/MetricsSet.java b/src/main/java/com/att/dmf/mr/backends/MetricsSet.java
deleted file mode 100644
index de665b8..0000000
--- a/src/main/java/com/att/dmf/mr/backends/MetricsSet.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends;
-
-import com.att.nsa.metrics.CdmMetricsRegistry;
-/**
- * This interface will help to generate metrics
- * @author nilanjana.maity
- *
- */
-public interface MetricsSet extends CdmMetricsRegistry{
-
- /**
- * This method will setup cambria sender code
- */
- public void setupCambriaSender ();
- /**
- * This method will define on route complete
- * @param name
- * @param durationMs
- */
- public void onRouteComplete ( String name, long durationMs );
- /**
- * This method will help the kafka publisher while publishing the messages
- * @param amount
- */
- public void publishTick ( int amount );
- /**
- * This method will help the kafka consumer while consuming the messages
- * @param amount
- */
- public void consumeTick ( int amount );
- /**
- * This method will call if the kafka consumer cache missed
- */
- public void onKafkaConsumerCacheMiss ();
- /**
- * This method will call if the kafka consumer cache will be hit while publishing/consuming the messages
- */
- public void onKafkaConsumerCacheHit ();
- /**
- * This method will call if the kafka consumer cache claimed
- */
- public void onKafkaConsumerClaimed ();
- /**
- * This method will call if Kafka consumer is timed out
- */
- public void onKafkaConsumerTimeout ();
-
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/Publisher.java b/src/main/java/com/att/dmf/mr/backends/Publisher.java
deleted file mode 100644
index 25022e2..0000000
--- a/src/main/java/com/att/dmf/mr/backends/Publisher.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.kafka.clients.producer.ProducerRecord;
-
-import com.att.dmf.mr.beans.LogDetails;
-
-/**
- * A publisher interface. Publishers receive messages and post them to a topic.
- * @author peter
- */
-public interface Publisher
-{
- /**
- * A message interface. The message has a key and a body.
- * @author peter
- */
- public interface message
- {
- /**
- * Get the key for this message. The key is used to partition messages
- * into "sub-streams" that have guaranteed order. The key can be null,
- * which means the message can be processed without any concern for order.
- *
- * @return a key, possibly null
- */
- String getKey();
-
- /**
- * Get the message body.
- * @return a message body
- */
- String getMessage();
- /**
- * set the logging params for transaction enabled logging
- * @param logDetails
- */
- void setLogDetails (LogDetails logDetails);
- /**
- * Get the log details for transaction enabled logging
- * @return LogDetails
- */
- LogDetails getLogDetails ();
-
- /**
- * boolean transactionEnabled
- * @return true/false
- */
- boolean isTransactionEnabled();
- /**
- * Set the transaction enabled flag from prop file or topic based implementation
- * @param transactionEnabled
- */
- void setTransactionEnabled(boolean transactionEnabled);
- }
-
- /**
- * Send a single message to a topic. Equivalent to sendMessages with a list of size 1.
- * @param topic
- * @param msg
- * @throws IOException
- */
- public void sendMessage ( String topic, message msg ) throws IOException;
-
- /**
- * Send messages to a topic.
- * @param topic
- * @param msgs
- * @throws IOException
- */
- public void sendMessages ( String topic, List<? extends message> msgs ) throws IOException;
-
- public void sendBatchMessageNew(String topic ,ArrayList<ProducerRecord<String,String>> kms) throws IOException;
- public void sendMessagesNew( String topic, List<? extends message> msgs ) throws IOException;
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java b/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java
deleted file mode 100644
index 9be9073..0000000
--- a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011Consumer.java
+++ /dev/null
@@ -1,397 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.kafka;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.FutureTask;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.RunnableFuture;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.KafkaException;
-
-import com.att.dmf.mr.backends.Consumer;
-import com.att.dmf.mr.constants.CambriaConstants;
-
-
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * A consumer instance that's created per-request. These are stateless so that
- * clients can connect to this service as a proxy.
- *
- * @author Ram
- *
- */
-public class Kafka011Consumer implements Consumer {
- private enum State {
- OPENED, CLOSED
- }
-
-
- /**
- * KafkaConsumer() is constructor. It has following 4 parameters:-
- *
- * @param topic
- * @param group
- * @param id
- * @param cc
- *
- */
-
- public Kafka011Consumer(String topic, String group, String id, KafkaConsumer<String, String> cc,
- KafkaLiveLockAvoider2 klla) throws Exception {
- fTopic = topic;
- fGroup = group;
- fId = id;
- fCreateTimeMs = System.currentTimeMillis();
- fLastTouch = fCreateTimeMs;
- fPendingMsgs = new LinkedBlockingQueue<ConsumerRecord<String, String>>();
- fLogTag = fGroup + "(" + fId + ")/" + fTopic;
- offset = 0;
- state = Kafka011Consumer.State.OPENED;
- kConsumer = cc;
- fKafkaLiveLockAvoider = klla;
- synchronized (kConsumer) {
- kConsumer.subscribe(Arrays.asList(topic));
- }
- }
-
- private Consumer.Message makeMessage(final ConsumerRecord<String, String> msg) {
- return new Consumer.Message() {
- @Override
- public long getOffset() {
- offset = msg.offset();
- return offset;
- }
-
- @Override
- public String getMessage() {
- return new String(msg.value());
- }
- };
- }
-
- @Override
- public synchronized Consumer.Message nextMessage() {
-
- try {
- if (fPendingMsgs.size() > 0) {
- return makeMessage(fPendingMsgs.take());
- }
- } catch (InterruptedException x) {
- log.warn("After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage() + ")",
- x);
- }
-
- Callable<Boolean> run = new Callable<Boolean>() {
- @Override
- public Boolean call() throws Exception {
- try {
- ConsumerRecords<String, String> records;
- synchronized (kConsumer) {
- records = kConsumer.poll(500);
- }
- for (ConsumerRecord<String, String> record : records) {
-
- fPendingMsgs.offer(record);
- }
-
- } catch (KafkaException x) {
- log.debug(fLogTag + ": KafkaException " + x.getMessage());
-
- } catch (java.lang.IllegalStateException | java.lang.IllegalArgumentException x) {
- log.error(fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. "
- + x.getMessage());
-
- }
-
-
- return true;
- }
- };
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- RunnableFuture future = new FutureTask(run);
- ExecutorService service = Executors.newSingleThreadExecutor();
- service.execute(future);
- try {
- future.get(5, TimeUnit.SECONDS); // wait 1
- // second
- } catch (TimeoutException ex) {
- // timed out. Try to stop the code if possible.
- String apiNodeId = null;
- try {
- apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port;
- } catch (UnknownHostException e1) {
- // TODO Auto-generated catch block
- log.error("unable to get the localhost address");
- }
-
- try {
- if (fKafkaLiveLockAvoider != null)
- fKafkaLiveLockAvoider.unlockConsumerGroup(apiNodeId, fTopic + "::" + fGroup);
- } catch (Exception e) {
- log.error("unlockConsumerGroup(" + apiNodeId + "," + fTopic + "::" + fGroup);
- }
-
- forcePollOnConsumer();
- future.cancel(true);
- } catch (Exception ex) {
- // timed out. Try to stop the code if possible.
- future.cancel(true);
- }
- service.shutdown();
-
- return null;
-
- }
-
- /**
- * getName() method returns string type value. returns 3 parameters in
- * string:- fTopic,fGroup,fId
- *
- * @Override
- */
- public String getName() {
- return fTopic + " : " + fGroup + " : " + fId;
- }
-
- /**
- * getCreateTimeMs() method returns long type value. returns fCreateTimeMs
- * variable value
- *
- * @Override
- *
- */
- public long getCreateTimeMs() {
- return fCreateTimeMs;
- }
-
- public org.apache.kafka.clients.consumer.KafkaConsumer<String, String> getConsumer() {
- return kConsumer;
- }
-
- /**
- * getLastAccessMs() method returns long type value. returns fLastTouch
- * variable value
- *
- * @Override
- *
- */
- public long getLastAccessMs() {
- return fLastTouch;
- }
-
- /**
- * getOffset() method returns long type value. returns offset variable value
- *
- * @Override
- *
- */
- public long getOffset() {
- return offset;
- }
-
- /**
- * commit offsets commitOffsets() method will be called on closed of
- * KafkaConsumer.
- *
- * @Override
- *
- *
- * public void commitOffsets() { if (getState() ==
- * KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called
- * on closed KafkaConsumer " + getName()); return; }
- * fConnector.commitOffsets(); }
- */
-
- /**
- * updating fLastTouch with current time in ms
- */
- public void touch() {
- fLastTouch = System.currentTimeMillis();
- }
-
- /**
- * getLastTouch() method returns long type value. returns fLastTouch
- * variable value
- *
- */
- public long getLastTouch() {
- return fLastTouch;
- }
-
- /**
- * setting the kafkaConsumer state to closed
- */
-
- public boolean close() {
- if (getState() == Kafka011Consumer.State.CLOSED) {
-
- log.error("close() called on closed KafkaConsumer " + getName());
- return true;
- }
-
-
- boolean retVal = kafkaConnectorshuttask();
- return retVal;
-
- }
-
- /* time out if the kafka shutdown fails for some reason */
-
- private boolean kafkaConnectorshuttask() {
- Callable<Boolean> run = new Callable<Boolean>() {
- @Override
- public Boolean call() throws Exception {
-
- try {
-
- kConsumer.close();
-
- } catch (Exception e) {
- log.info("@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
- throw new Exception("@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
-
- }
- log.info("Kafka connection closure with in 15 seconds by a Executors task");
-
- return true;
- }
- };
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- RunnableFuture future = new FutureTask(run);
- ExecutorService service = Executors.newSingleThreadExecutor();
- service.execute(future);
- try {
- future.get(200, TimeUnit.SECONDS); // wait 1
- // second
- } catch (TimeoutException ex) {
- // timed out. Try to stop the code if possible.
- log.info("Timeout Occured - Kafka connection closure with in 300 seconds by a Executors task");
- future.cancel(true);
- setState(Kafka011Consumer.State.OPENED);
- } catch (Exception ex) {
- // timed out. Try to stop the code if possible.
- log.error("Exception occured Occured - Kafka connection closure with in 300 seconds by a Executors task"
- + ex);
- future.cancel(true);
- setState(Kafka011Consumer.State.OPENED);
- return false;
- }
- service.shutdown();
- setState(Kafka011Consumer.State.CLOSED);
- return true;
- }
-
- public void forcePollOnConsumer() {
- Kafka011ConsumerUtil.forcePollOnConsumer(fTopic, fGroup, fId);
-
- }
-
- /**
- * getConsumerGroup() returns Consumer group
- *
- * @return
- */
- public String getConsumerGroup() {
- return fGroup;
- }
-
- /**
- * getConsumerId returns Consumer Id
- *
- * @return
- */
- public String getConsumerId() {
- return fId;
- }
-
- /**
- * getState returns kafkaconsumer state
- *
- * @return
- */
- private Kafka011Consumer.State getState() {
- return this.state;
- }
-
- /**
- * setState() sets the kafkaConsumer state
- *
- * @param state
- */
- private void setState(Kafka011Consumer.State state) {
- this.state = state;
- }
-
-
- private final String fTopic;
- private final String fGroup;
- private final String fId;
- private final String fLogTag;
-
- private KafkaConsumer<String, String> kConsumer;
- private long fCreateTimeMs;
- private long fLastTouch;
- private long offset;
- private Kafka011Consumer.State state;
- private KafkaLiveLockAvoider2 fKafkaLiveLockAvoider;
- private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011Consumer.class);
- private final LinkedBlockingQueue<ConsumerRecord<String, String>> fPendingMsgs;
-
- @Override
- public void commitOffsets() {
- if (getState() == Kafka011Consumer.State.CLOSED) {
- log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
- return;
- }
- kConsumer.commitSync();
-
-
- }
-
- @Override
- public void setOffset(long offsetval) {
- offset = offsetval;
- }
-
-
- public void setConsumerCache(KafkaConsumerCache cache) {
- }
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java b/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java
deleted file mode 100644
index e066df5..0000000
--- a/src/main/java/com/att/dmf/mr/backends/kafka/Kafka011ConsumerUtil.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.kafka;
-
-import java.util.ArrayList;
-
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-
-
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * A consumer Util class for force polling when a rebalance issue is anticipated
- *
- * @author Ram
- *
- */
-public class Kafka011ConsumerUtil {
- private static final EELFLogger log = EELFManager.getInstance().getLogger(Kafka011ConsumerUtil.class);
-
- /**
- * @param fconsumercache
- * @param fTopic
- * @param fGroup
- * @param fId
- * @return
- */
- public static boolean forcePollOnConsumer(final String fTopic, final String fGroup, final String fId) {
-
- Thread forcepollThread = new Thread(new Runnable() {
- public void run() {
- try {
-
- ArrayList<Kafka011Consumer> kcsList = null;
-
- kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(fTopic + "::" + fGroup + "::", fId);
- if (null != kcsList) {
- for (int counter = 0; counter < kcsList.size(); counter++) {
-
- Kafka011Consumer kc1 = kcsList.get(counter);
-
- try {
- ConsumerRecords<String, String> recs = kc1.getConsumer().poll(0);
- log.info("soft poll on " + kc1);
- } catch (java.util.ConcurrentModificationException e) {
- log.error("Error occurs for " + e);
- }
-
- }
-
- }
-
- } catch (Exception e) {
- log.error("Failed and go to Exception block for " + fGroup + " " + e.getMessage());
- }
- }
- });
-
- forcepollThread.start();
-
- return false;
-
- }
-
- /**
- * @param fconsumercache
- * @param group
- * @return
- */
- public static boolean forcePollOnConsumer(final String group) {
-
- Thread forcepollThread = new Thread(new Runnable() {
- public void run() {
- try {
- ArrayList<Kafka011Consumer> kcsList = new ArrayList<Kafka011Consumer>();
- kcsList = KafkaConsumerCache.getInstance().getConsumerListForCG(group);
-
- if (null != kcsList) {
-
- for (int counter = 0; counter < kcsList.size(); counter++) {
-
- Kafka011Consumer kc1 = kcsList.get(counter);
- log.info("soft poll on remote nodes " + kc1);
- ConsumerRecords<String, String> recs = kc1.getConsumer().poll(0);
- }
-
- }
-
- } catch (java.util.ConcurrentModificationException e) {
- log.error("Error occurs for " + e);
- } catch (Exception e) {
- log.error("Failed and go to Exception block for " + group + " " + e.getMessage());
- }
- }
- });
-
- forcepollThread.start();
- return false;
-
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt
deleted file mode 100644
index dd6259f..0000000
--- a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumer.txt
+++ /dev/null
@@ -1,386 +0,0 @@
-package com.att.dmf.mr.backends.kafka;
-
-import java.util.Arrays;
-import java.util.Properties;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.FutureTask;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.RunnableFuture;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.common.KafkaException;
-
-import com.att.dmf.mr.backends.Consumer;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * A consumer instance that's created per-request. These are stateless so that
- * clients can connect to this service as a proxy.
- *
- * @author peter
- *
- */
-public class KafkaConsumer implements Consumer {
- private enum State {
- OPENED, CLOSED
- }
-
- /**
- * KafkaConsumer() is constructor. It has following 4 parameters:-
- *
- * @param topic
- * @param group
- * @param id
- * @param cc
- *
- */
-
- public KafkaConsumer(String topic, String group, String id, Properties prop) throws Exception {
- fTopic = topic;
- fGroup = group;
- fId = id;
- // fConnector = cc;
-
- fCreateTimeMs = System.currentTimeMillis();
- fLastTouch = fCreateTimeMs;
- fPendingMsgs = new LinkedBlockingQueue<ConsumerRecord<String,String>> ();
- fLogTag = fGroup + "(" + fId + ")/" + fTopic;
- offset = 0;
-
- state = KafkaConsumer.State.OPENED;
-
- // final Map<String, Integer> topicCountMap = new HashMap<String,
- // Integer>();
- // topicCountMap.put(fTopic, 1);
- // log.info(fLogTag +" kafka Consumer started at "
- // +System.currentTimeMillis());
- // final Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
- // fConnector.createMessageStreams(topicCountMap);
- // final List<KafkaStream<byte[], byte[]>> streams =
- // consumerMap.get(fTopic);
-
- kConsumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(prop);
- // System.out.println("I am in Consumer APP " + topic + "-- " +
- // fConsumer);
- kConsumer.subscribe(Arrays.asList(topic));
- log.info(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
- System.out.println("-----id " +id);
-
-
- try { ConsumerRecords<String, String> records =
- kConsumer.poll(500); System.out.println("---" +
- records.count());
-
- for (ConsumerRecord<String, String> record : records) {
- System.out.printf("offset = %d, key = %s, value = %s",
- record.offset(), record.key(), record.value()); String t =
- record.value();
-
- }
- }catch(Exception e){
- System.out.println( e);
- }
- System.out.println(fLogTag + " kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
- kConsumer.commitSync();
- // fConsumer.close();
-
-
- /*
- * ConsumerRecords<String, String> records = fConsumer.poll(500);
- * System.out.println("---" + records.count());
- *
- * for (ConsumerRecord<String, String> record : records) {
- * System.out.printf("offset = %d, key = %s, value = %s",
- * record.offset(), record.key(), record.value()); String t =
- * record.value();
- *
- * }
- *
- *
- * fConsumer.commitSync(); fConsumer.close();
- */
-
- // fStream = streams.iterator().next();
- }
-
-
-
- private Consumer.Message makeMessage ( final ConsumerRecord<String,String> msg )
- {
- return new Consumer.Message()
- {
- @Override
- public long getOffset ()
- {
- return msg.offset ();
- }
-
- @Override
- public String getMessage ()
- {
- return new String ( msg.value () );
- }
- };
- }
-
- @Override
- public synchronized Consumer.Message nextMessage ()
- {
-
- try
- {
- if ( fPendingMsgs.size () > 0 )
- {
- return makeMessage ( fPendingMsgs.take () );
- }
- }
- catch ( InterruptedException x )
- {
- log.warn ( "After size>0, pending msg take() threw InterruptedException. Ignoring. (" + x.getMessage () + ")", x );
- }
-
-
- try
- {
- boolean foundMsgs = false;
- System.out.println("entering into pollingWWWWWWWWWWWWWWWWW");
- final ConsumerRecords<String,String> records = kConsumer.poll ( 100 );
- System.out.println("polling doneXXXXXXXXXXXXXXXXXXXXXXXXXXX....");
- for ( ConsumerRecord<String,String> record : records )
- {
- foundMsgs = true;
- fPendingMsgs.offer ( record );
- }
-
- }
- catch ( KafkaException x )
- {
- log.debug ( fLogTag + ": KafkaException " + x.getMessage () );
-
- }
- catch ( java.lang.IllegalStateException | java.lang.IllegalArgumentException x )
- {
- log.error ( fLogTag + ": Illegal state/arg exception in Kafka consumer; dropping stream. " + x.getMessage () );
-
- }
-
- return null;
- }
-
-
-
- /**
- * getName() method returns string type value. returns 3 parameters in
- * string:- fTopic,fGroup,fId
- *
- * @Override
- */
- public String getName() {
- return fTopic + " : " + fGroup + " : " + fId;
- }
-
- /**
- * getCreateTimeMs() method returns long type value. returns fCreateTimeMs
- * variable value
- *
- * @Override
- *
- */
- public long getCreateTimeMs() {
- return fCreateTimeMs;
- }
-
- public org.apache.kafka.clients.consumer.KafkaConsumer getConsumer() {
- return kConsumer;
- }
-
- /**
- * getLastAccessMs() method returns long type value. returns fLastTouch
- * variable value
- *
- * @Override
- *
- */
- public long getLastAccessMs() {
- return fLastTouch;
- }
-
-
-
- /**
- * getOffset() method returns long type value. returns offset variable value
- *
- * @Override
- *
- */
- public long getOffset() {
- return offset;
- }
-
- /**
- * commit offsets commitOffsets() method will be called on closed of
- * KafkaConsumer.
- *
- * @Override
- *
- *
- * public void commitOffsets() { if (getState() ==
- * KafkaConsumer.State.CLOSED) { log.warn("commitOffsets() called
- * on closed KafkaConsumer " + getName()); return; }
- * fConnector.commitOffsets(); }
- */
-
- /**
- * updating fLastTouch with current time in ms
- */
- public void touch() {
- fLastTouch = System.currentTimeMillis();
- }
-
- /**
- * getLastTouch() method returns long type value. returns fLastTouch
- * variable value
- *
- */
- public long getLastTouch() {
- return fLastTouch;
- }
-
- /**
- * setting the kafkaConsumer state to closed
- */
- public synchronized boolean close() {
-
- if (getState() == KafkaConsumer.State.CLOSED) {
-
- log.warn("close() called on closed KafkaConsumer " + getName());
- return true;
- }
-
- setState(KafkaConsumer.State.CLOSED);
- // fConnector.shutdown();
- boolean retVal = kafkaConnectorshuttask();
- return retVal;
-
- }
-
- /* time out if the kafka shutdown fails for some reason */
-
- private boolean kafkaConnectorshuttask() {
- Callable<Boolean> run = new Callable<Boolean>() {
- @Override
- public Boolean call() throws Exception {
- // your code to be timed
- try {
- System.out.println("consumer closing....." + kConsumer);
- kConsumer.close();
- } catch (Exception e) {
- log.info("@@@@@@Kafka Stream shutdown erorr occurred " + getName() + " " + e);
- }
- log.info("Kafka connection closure with in 15 seconds by a Executors task");
- return true;
- }
- };
-
- RunnableFuture future = new FutureTask(run);
- ExecutorService service = Executors.newSingleThreadExecutor();
- service.execute(future);
- Boolean result = null;
- try {
- result = (Boolean) future.get(15, TimeUnit.SECONDS); // wait 1
- // second
- } catch (TimeoutException ex) {
- // timed out. Try to stop the code if possible.
- log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task");
- future.cancel(true);
- } catch (Exception ex) {
- // timed out. Try to stop the code if possible.
- log.info("Timeout Occured - Kafka connection closure with in 15 seconds by a Executors task" + ex);
- future.cancel(true);
- return false;
- }
- service.shutdown();
- return true;
- }
-
- /**
- * getConsumerGroup() returns Consumer group
- *
- * @return
- */
- public String getConsumerGroup() {
- return fGroup;
- }
-
- /**
- * getConsumerId returns Consumer Id
- *
- * @return
- */
- public String getConsumerId() {
- return fId;
- }
-
- /**
- * getState returns kafkaconsumer state
- *
- * @return
- */
- private KafkaConsumer.State getState() {
- return this.state;
- }
-
- /**
- * setState() sets the kafkaConsumer state
- *
- * @param state
- */
- private void setState(KafkaConsumer.State state) {
- this.state = state;
- }
-
- // private ConsumerConnector fConnector;
- private final String fTopic;
- private final String fGroup;
- private final String fId;
- private final String fLogTag;
- // private final KafkaStream<byte[], byte[]> fStream;
- private final org.apache.kafka.clients.consumer.KafkaConsumer<String, String> kConsumer;
- private long fCreateTimeMs;
- private long fLastTouch;
- private long offset;
- private KafkaConsumer.State state;
- private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumer.class);
- private final LinkedBlockingQueue<ConsumerRecord<String,String>> fPendingMsgs;
- // private static final Logger log =
- // LoggerFactory.getLogger(KafkaConsumer.class);
-
- @Override
- public void commitOffsets() {
- if (getState() == KafkaConsumer.State.CLOSED) {
- log.warn("commitOffsets() called on closed KafkaConsumer " + getName());
- return;
- }
- kConsumer.commitSync();
- // fConsumer.close();
-
- }
-
-
-
- @Override
- public void setOffset(long offsetval) {
- // TODO Auto-generated method stub
- offset = offsetval;
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java
deleted file mode 100644
index 83c08ec..0000000
--- a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaConsumerCache.java
+++ /dev/null
@@ -1,742 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.kafka;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Enumeration;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-import javax.annotation.Resource;
-
-import org.I0Itec.zkclient.exception.ZkException;
-import org.I0Itec.zkclient.exception.ZkInterruptedException;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.imps.CuratorFrameworkState;
-import org.apache.curator.framework.recipes.cache.ChildData;
-import org.apache.curator.framework.recipes.cache.PathChildrenCache;
-import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
-import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
-import org.apache.curator.framework.state.ConnectionState;
-import org.apache.curator.framework.state.ConnectionStateListener;
-import org.apache.curator.utils.EnsurePath;
-import org.apache.curator.utils.ZKPaths;
-import org.apache.http.annotation.NotThreadSafe;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.KeeperException.NoNodeException;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.context.annotation.ComponentScan;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.dmf.mr.backends.Consumer;
-import com.att.dmf.mr.backends.MetricsSet;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.exception.DMaaPErrorMessages;
-import com.att.dmf.mr.utils.ConfigurationReader;
-
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.metrics.CdmTimer;
-
-/**
- * @NotThreadSafe but expected to be used within KafkaConsumerFactory, which
- * must be
- * @author peter
- *
- */
-@NotThreadSafe
-public class KafkaConsumerCache {
-
- private static KafkaConsumerCache kafkaconscache = null;
-
- public static KafkaConsumerCache getInstance() {
- if (kafkaconscache == null)
- kafkaconscache = new KafkaConsumerCache();
-
- return kafkaconscache;
- }
-
- private static final String kSetting_ConsumerHandoverWaitMs = "cambria.consumer.cache.handoverWaitMs";
- private static final int kDefault_ConsumerHandoverWaitMs = 500;
-
- private static final String kSetting_SweepEverySeconds = "cambria.consumer.cache.sweepFreqSeconds";
- private static final String kSetting_TouchEveryMs = "cambria.consumer.cache.touchFreqMs";
-
- private static final String kSetting_ZkBasePath = "cambria.consumer.cache.zkBasePath";
- private static final String kDefault_ZkBasePath = CambriaConstants.kDefault_ZkRoot + "/consumerCache";
-
- // kafka defaults to timing out a client after 6 seconds of inactivity, but
- // it heartbeats even when the client isn't fetching. Here, we don't
- // want to prematurely rebalance the consumer group. Assuming clients are
- // hitting
- // the server at least every 30 seconds, timing out after 2 minutes should
- // be okay.
- // FIXME: consider allowing the client to specify its expected call rate?
- private static final long kDefault_MustTouchEveryMs = 1000L*60*2;
-
- // check for expirations pretty regularly
- private static final long kDefault_SweepEverySeconds = 15;
-
- private enum Status {
- NOT_STARTED, CONNECTED, DISCONNECTED, SUSPENDED
- }
-
-
-
-
- @Autowired
- private DMaaPErrorMessages errorMessages;
-
-
- /**
- * User defined exception class for kafka consumer cache
- *
- * @author nilanjana.maity
- *
- */
- public class KafkaConsumerCacheException extends Exception {
- /**
- * To throw the exception
- *
- * @param t
- */
- KafkaConsumerCacheException(Throwable t) {
- super(t);
- }
-
- /**
- *
- * @param s
- */
- public KafkaConsumerCacheException(String s) {
- super(s);
- }
-
- private static final long serialVersionUID = 1L;
- }
-
- /**
- * Creates a KafkaConsumerCache object. Before it is used, you must call
- * startCache()
- *
- * @param apiId
- * @param s
- * @param metrics
- */
- public KafkaConsumerCache() {
-
- String strkSetting_ZkBasePath = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- kSetting_ZkBasePath);
- if (null == strkSetting_ZkBasePath)
- strkSetting_ZkBasePath = kDefault_ZkBasePath;
- fBaseZkPath = strkSetting_ZkBasePath;
-
- fConsumers = new ConcurrentHashMap<>();
- fSweepScheduler = Executors.newScheduledThreadPool(1);
-
- curatorConsumerCache = null;
-
- status = Status.NOT_STARTED;
- // Watcher for consumer rebalancing across nodes. Kafka011 rebalancing
- // work around
-
- listener = new ConnectionStateListener() {
- public void stateChanged(CuratorFramework client, ConnectionState newState) {
- if (newState == ConnectionState.LOST) {
-
- log.info("ZooKeeper connection expired");
- handleConnectionLoss();
- } else if (newState == ConnectionState.READ_ONLY) {
- log.warn("ZooKeeper connection set to read only mode.");
- } else if (newState == ConnectionState.RECONNECTED) {
- log.info("ZooKeeper connection re-established");
- handleReconnection();
- } else if (newState == ConnectionState.SUSPENDED) {
- log.warn("ZooKeeper connection has been suspended.");
- handleConnectionSuspended();
- }
- }
- };
- }
-
- /**
- * Start the cache service. This must be called before any get/put
- * operations.
- *
- * @param mode
- * DMAAP or cambria
- * @param curator
- * @throws IOException
- * @throws KafkaConsumerCacheException
- */
- public void startCache(String mode, CuratorFramework curator) throws KafkaConsumerCacheException {
-
- if (fApiId == null) {
- throw new IllegalArgumentException("API Node ID must be specified.");
- }
-
- try {
-
- if (mode != null && mode.equals(CambriaConstants.DMAAP)) {
- curator = getCuratorFramework(curator);
- }
- curator.getConnectionStateListenable().addListener(listener);
- setStatus(Status.CONNECTED);
- curatorConsumerCache = new PathChildrenCache(curator, fBaseZkPath, true);
- curatorConsumerCache.start();
- curatorConsumerCache.getListenable().addListener(new PathChildrenCacheListener() {
- public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
- switch (event.getType()) {
- case CHILD_ADDED: {
- try {
- final String apiId = new String(event.getData().getData());
- final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
-
- log.info(apiId + " started consumer " + consumer);
- } catch (Exception ex) {
- log.info("#Error Occured during Adding child" + ex);
- }
- break;
- }
- case CHILD_UPDATED: {
- final String apiId = new String(event.getData().getData());
- final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
-
- if (fConsumers.containsKey(consumer)) {
- log.info(apiId + " claimed consumer " + consumer + " from " + fApiId
- + " but wont hand over");
- // Commented so that it dont give the connection
- // until the active node is running for this client
- // id.
- dropClaimedConsumer(consumer);
- }
-
- break;
- }
- case CHILD_REMOVED: {
- final String consumer = ZKPaths.getNodeFromPath(event.getData().getPath());
-
- if (fConsumers.containsKey(consumer)) {
- log.info("Someone wanted consumer " + consumer
- + " gone; but not removing it from the cache");
- dropConsumer(consumer, false);
- }
-
- break;
- }
-
- default:
- break;
- }
- }
- });
-
- // initialize the ZK path
- EnsurePath ensurePath = new EnsurePath(fBaseZkPath);
- ensurePath.ensure(curator.getZookeeperClient());
-
-
-
- long freq = kDefault_SweepEverySeconds;
- String strkSetting_SweepEverySeconds = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- kSetting_SweepEverySeconds);
- if (null != strkSetting_SweepEverySeconds) {
- freq = Long.parseLong(strkSetting_SweepEverySeconds);
- }
-
- fSweepScheduler.scheduleAtFixedRate(new sweeper(), freq, freq, TimeUnit.SECONDS);
- log.info("KafkaConsumerCache started");
- log.info("sweeping cached clients every " + freq + " seconds");
- } catch (ZkException e) {
- log.error("@@@@@@ ZK Exception occured for " + e);
- throw new KafkaConsumerCacheException(e);
- } catch (Exception e) {
- log.error("@@@@@@ Exception occured for " + e);
- throw new KafkaConsumerCacheException(e);
- }
- }
-
- /**
- * Getting the curator oject to start the zookeeper connection estabished
- *
- * @param curator
- * @return curator object
- */
- public static CuratorFramework getCuratorFramework(CuratorFramework curator) {
- if (curator.getState() == CuratorFrameworkState.LATENT) {
- curator.start();
-
- try {
- curator.blockUntilConnected();
- } catch (InterruptedException e) {
- log.error("error while setting curator framework :",e);
- Thread.currentThread().interrupt();
- }
- }
-
- return curator;
- }
-
- /**
- * Stop the cache service.
- */
- public void stopCache() {
- setStatus(Status.DISCONNECTED);
-
- final CuratorFramework curator = ConfigurationReader.getCurator();
-
- if (curator != null) {
- try {
- curator.getConnectionStateListenable().removeListener(listener);
- curatorConsumerCache.close();
- log.info("Curator client closed");
- } catch (ZkInterruptedException e) {
- log.warn("Curator client close interrupted: " + e.getMessage());
- } catch (IOException e) {
- log.warn("Error while closing curator PathChildrenCache for KafkaConsumerCache" + e.getMessage());
- }
-
- curatorConsumerCache = null;
- }
-
- if (fSweepScheduler != null) {
- fSweepScheduler.shutdownNow();
- log.info("cache sweeper stopped");
- }
-
- if (fConsumers != null) {
- fConsumers.clear();
- fConsumers = null;
- }
-
- setStatus(Status.NOT_STARTED);
-
- log.info("Consumer cache service stopped");
- }
-
- /**
- * Get a cached consumer by topic, group, and id, if it exists (and remains
- * valid) In addition, this method waits for all other consumer caches in
- * the cluster to release their ownership and delete their version of this
- * consumer.
- *
- * @param topic
- * @param consumerGroupId
- * @param clientId
- * @return a consumer, or null
- */
- public Kafka011Consumer getConsumerFor(String topic, String consumerGroupId, String clientId)
- throws KafkaConsumerCacheException {
- if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
- throw new KafkaConsumerCacheException("The cache service is unavailable.");
-
- final String consumerKey = makeConsumerKey(topic, consumerGroupId, clientId);
- final Kafka011Consumer kc = fConsumers.get(consumerKey);
-
- if (kc != null) {
- log.debug("Consumer cache hit for [" + consumerKey + "], last was at " + kc.getLastTouch());
- kc.touch();
- fMetrics.onKafkaConsumerCacheHit();
- } else {
- log.debug("Consumer cache miss for [" + consumerKey + "]");
- fMetrics.onKafkaConsumerCacheMiss();
- }
-
- return kc;
- }
-
- /**
- * Get a cached consumer by topic, group, and id, if it exists (and remains
- * valid) In addition, this method waits for all other consumer caches in
- * the cluster to release their ownership and delete their version of this
- * consumer.
- *
- * @param topic
- * @param consumerGroupId
- * @param clientId
- * @return a consumer, or null
- */
- public ArrayList<Kafka011Consumer> getConsumerListForCG(String topicgroup, String clientId)
- throws KafkaConsumerCacheException {
- if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
- throw new KafkaConsumerCacheException("The cache service is unavailable.");
- ArrayList<Kafka011Consumer> kcl = new ArrayList<>();
-
-
- Enumeration<String> strEnum = fConsumers.keys();
- String consumerLocalKey = null;
- while (strEnum.hasMoreElements()) {
- consumerLocalKey = strEnum.nextElement();
-
- if (consumerLocalKey.startsWith(topicgroup) && (!consumerLocalKey.endsWith("::" + clientId))) {
-
-
-
-
- kcl.add(fConsumers.get(consumerLocalKey));
-
- }
- }
-
- return kcl;
- }
-
- public ArrayList<Kafka011Consumer> getConsumerListForCG(String group) throws KafkaConsumerCacheException {
- if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
- throw new KafkaConsumerCacheException("The cache service is unavailable.");
- ArrayList<Kafka011Consumer> kcl = new ArrayList<>();
-
- Enumeration<String> strEnum = fConsumers.keys();
- String consumerLocalKey = null;
- while (strEnum.hasMoreElements()) {
- consumerLocalKey = strEnum.nextElement();
-
- if (consumerLocalKey.startsWith(group)) {
-
-
- kcl.add(fConsumers.get(consumerLocalKey));
-
- }
- }
-
- return kcl;
- }
-
- /**
- * Put a consumer into the cache by topic, group and ID
- *
- * @param topic
- * @param consumerGroupId
- * @param consumerId
- * @param consumer
- * @throws KafkaConsumerCacheException
- */
- public void putConsumerFor(String topic, String consumerGroupId, String consumerId, Kafka011Consumer consumer)
- throws KafkaConsumerCacheException {
- if (getStatus() != KafkaConsumerCache.Status.CONNECTED)
- throw new KafkaConsumerCacheException("The cache service is unavailable.");
-
- final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
- fConsumers.put(consumerKey, consumer);
-
-
-
- log.info("^@ Consumer Added to Cache Consumer Key" + consumerKey + " ApiId" + fApiId);
- }
-
- public Collection<? extends Consumer> getConsumers() {
- return new LinkedList<>(fConsumers.values());
- }
-
- /**
- * This method is to drop all the consumer
- */
- public void dropAllConsumers() {
- for (Entry<String, Kafka011Consumer> entry : fConsumers.entrySet()) {
- dropConsumer(entry.getKey(), true);
- }
-
- // consumers should be empty here
- if (fConsumers.size() > 0) {
- log.warn("During dropAllConsumers, the consumer map is not empty.");
- fConsumers.clear();
- }
- }
-
- /**
- * Drop a consumer from our cache due to a timeout
- *
- * @param key
- */
- private void dropTimedOutConsumer(String key) {
- fMetrics.onKafkaConsumerTimeout();
-
- if (!fConsumers.containsKey(key)) {
- log.warn("Attempted to drop a timed out consumer which was not in our cache: " + key);
- return;
- }
-
- // First, drop this consumer from our cache
- boolean isdrop = dropConsumer(key, true);
- if (!isdrop) {
- return;
- }
- final CuratorFramework curator = ConfigurationReader.getCurator();
-
- try {
- curator.delete().guaranteed().forPath(fBaseZkPath + "/" + key);
- log.info(" ^ deleted " + fBaseZkPath + "/" + key);
- } catch (NoNodeException e) {
- log.warn("A consumer was deleted from " + fApiId
- + "'s cache, but no Cambria API node had ownership of it in ZooKeeper");
- } catch (Exception e) {
- log.debug("Unexpected exception while deleting consumer: " + e.getMessage());
- log.info(" %%%%%%@# Unexpected exception while deleting consumer: " + e.getMessage());
- }
-
- try {
- int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
- String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- kSetting_ConsumerHandoverWaitMs);
- if (strkSetting_ConsumerHandoverWaitMs != null)
- consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
- Thread.sleep(consumerHandoverWaitMs);
- } catch (InterruptedException e) {
- log.error("InterruptedException in dropTimedOutConsumer",e);
- Thread.currentThread().interrupt();
- }
- log.info("Dropped " + key + " consumer due to timeout");
- }
-
- /**
- * Drop a consumer from our cache due to another API node claiming it as
- * their own.
- *
- * @param key
- */
- private void dropClaimedConsumer(String key) {
- // if the consumer is still in our cache, it implies a claim.
- if (fConsumers.containsKey(key)) {
- fMetrics.onKafkaConsumerClaimed();
- log.info("Consumer [" + key + "] claimed by another node.");
- }
- log.info("^dropping claimed Kafka consumer " + key);
- dropConsumer(key, false);
- }
-
- /**
- * Removes the consumer from the cache and closes its connection to the
- * kafka broker(s).
- *
- * @param key
- * @param dueToTimeout
- */
- private boolean dropConsumer(String key, boolean dueToTimeout) {
- final Kafka011Consumer kc = fConsumers.get(key);
- log.info("closing Kafka consumer " + key + " object " + kc);
- if (kc != null) {
-
- if (kc.close()) {
- fConsumers.remove(key);
-
- } else {
- return false;
- }
- }
- return true;
- }
-
- // private final rrNvReadable fSettings;
- private MetricsSet fMetrics;
- private final String fBaseZkPath;
- private final ScheduledExecutorService fSweepScheduler;
- private String fApiId;
-
- public void setfMetrics(final MetricsSet metrics) {
- this.fMetrics = metrics;
- }
-
- public void setfApiId(final String id) {
- this.fApiId = id;
- }
-
- private final ConnectionStateListener listener;
-
- private ConcurrentHashMap<String, Kafka011Consumer> fConsumers;
- private PathChildrenCache curatorConsumerCache;
-
- private volatile Status status;
-
- private void handleReconnection() {
-
- log.info("Reading current cache data from ZK and synchronizing local cache");
- final List<ChildData> cacheData = curatorConsumerCache.getCurrentData();
- // Remove all the consumers in this API nodes cache that now belong to
- // other API nodes.
- for (ChildData cachedConsumer : cacheData) {
- final String consumerId = ZKPaths.getNodeFromPath(cachedConsumer.getPath());
- final String owningApiId = (cachedConsumer.getData() != null) ? new String(cachedConsumer.getData())
- : "undefined";
- if (!fApiId.equals(owningApiId)) {
- fConsumers.remove(consumerId); // Commented to avoid removing
- // the value cache hashmap but the lock still exists.
- // This is not considered in kafka consumer Factory
- log.info("@@@ Validating current cache data from ZK and synchronizing local cache" + owningApiId
- + " removing " + consumerId);
- }
- }
-
- setStatus(Status.CONNECTED);
- }
-
- private void handleConnectionSuspended() {
- log.info("Suspending cache until ZK connection is re-established");
-
- setStatus(Status.SUSPENDED);
- }
-
- private void handleConnectionLoss() {
- log.info("Clearing consumer cache (shutting down all Kafka consumers on this node)");
-
- setStatus(Status.DISCONNECTED);
-
- closeAllCachedConsumers();
- fConsumers.clear();
- }
-
- private void closeAllCachedConsumers() {
- for (Entry<String, Kafka011Consumer> entry : fConsumers.entrySet()) {
- try {
- entry.getValue().close();
- } catch (Exception e) {
- log.info("@@@@@@ Error occurd while closing Clearing All cache " + e);
- }
- }
- }
-
- private static String makeConsumerKey(String topic, String consumerGroupId, String clientId) {
- return topic + "::" + consumerGroupId + "::" + clientId;
- }
-
- /**
- * This method is to get a lock
- *
- * @param topic
- * @param consumerGroupId
- * @param consumerId
- * @throws KafkaConsumerCacheException
- */
- public void signalOwnership(final String topic, final String consumerGroupId, final String consumerId)
- throws KafkaConsumerCacheException {
- // get a lock at <base>/<topic>::<consumerGroupId>::<consumerId>
- final String consumerKey = makeConsumerKey(topic, consumerGroupId, consumerId);
-
- try(final CdmTimer timer = new CdmTimer(fMetrics, "CacheSignalOwnership")) {
- final String consumerPath = fBaseZkPath + "/" + consumerKey;
- log.debug(fApiId + " attempting to claim ownership of consumer " + consumerKey);
- final CuratorFramework curator = ConfigurationReader.getCurator();
-
- try {
- curator.setData().forPath(consumerPath, fApiId.getBytes());
- } catch (KeeperException.NoNodeException e) {
- curator.create().creatingParentsIfNeeded().forPath(consumerPath, fApiId.getBytes());
- }
- log.info(fApiId + " successfully claimed ownership of consumer " + consumerKey);
- timer.end();
- } catch (Exception e) {
- log.error(fApiId + " failed to claim ownership of consumer " + consumerKey);
- throw new KafkaConsumerCacheException(e);
- }
-
- log.info("Backing off to give the Kafka broker time to clean up the ZK data for this consumer");
-
- try {
- int consumerHandoverWaitMs = kDefault_ConsumerHandoverWaitMs;
- String strkSetting_ConsumerHandoverWaitMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- kSetting_ConsumerHandoverWaitMs);
- if (strkSetting_ConsumerHandoverWaitMs != null)
- consumerHandoverWaitMs = Integer.parseInt(strkSetting_ConsumerHandoverWaitMs);
- Thread.sleep(consumerHandoverWaitMs);
- } catch (InterruptedException e) {
- log.error("InterruptedException in signalOwnership",e);
- Thread.currentThread().interrupt();
- }
- }
-
- public KafkaLiveLockAvoider2 getkafkaLiveLockAvoiderObj() {
- return null;
- }
-
- public void sweep() {
- final LinkedList<String> removals = new LinkedList<String>();
- long mustTouchEveryMs = kDefault_MustTouchEveryMs;
- String strkSetting_TouchEveryMs = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- kSetting_TouchEveryMs);
- if (null != strkSetting_TouchEveryMs) {
- mustTouchEveryMs = Long.parseLong(strkSetting_TouchEveryMs);
- }
-
-
- final long oldestAllowedTouchMs = System.currentTimeMillis() - mustTouchEveryMs;
-
- for (Entry<String, Kafka011Consumer> e : fConsumers.entrySet()) {
- final long lastTouchMs = e.getValue().getLastTouch();
- log.debug("consumer #####1" + e.getKey() + " " + lastTouchMs + " < " + oldestAllowedTouchMs);
-
- if (lastTouchMs < oldestAllowedTouchMs) {
- log.info("consumer " + e.getKey() + " has expired");
- removals.add(e.getKey());
- }
- }
-
- for (String key : removals) {
- dropTimedOutConsumer(key);
- }
- }
-
- /**
- * Creating a thread to run the sweep method
- *
- * @author nilanjana.maity
- *
- */
- private class sweeper implements Runnable {
- /**
- * run method
- */
- public void run() {
- sweep();
- }
- }
-
- /**
- * This method is to drop consumer
- *
- * @param topic
- * @param consumerGroup
- * @param clientId
- */
- public void dropConsumer(String topic, String consumerGroup, String clientId) {
- dropConsumer(makeConsumerKey(topic, consumerGroup, clientId), false);
- }
-
- private Status getStatus() {
- return this.status;
- }
-
- private void setStatus(Status status) {
- this.status = status;
- }
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaConsumerCache.class);
-
-} \ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java
deleted file mode 100644
index f521b41..0000000
--- a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaLiveLockAvoider2.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.kafka;
-
-
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-import javax.annotation.PostConstruct;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.recipes.locks.InterProcessMutex;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.Watcher;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Component;
-
-//@ComponentScan(basePackages="com.att.dmf.mr.backends.kafka")
-@Component
-public class KafkaLiveLockAvoider2 {
-
- public static final String ZNODE_ROOT = "/live-lock-avoid";
- public static final String ZNODE_LOCKS = "/locks";
- public static final String ZNODE_UNSTICK_TASKS ="/unstick-tasks";
-
- private static String locksPath = ZNODE_ROOT+ZNODE_LOCKS;
- private static String tasksPath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS;
- private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaLiveLockAvoider2.class.getName());
-
- @Autowired
- @Qualifier("curator")
- private CuratorFramework curatorFramework;
-
- @PostConstruct
- public void init() {
- log.info("Welcome......................................................................................");
- try {
- if (curatorFramework.checkExists().forPath(locksPath) == null) {
- curatorFramework.create().creatingParentsIfNeeded().forPath(locksPath);
- }
- if (curatorFramework.checkExists().forPath(tasksPath) == null) {
- curatorFramework.create().creatingParentsIfNeeded().forPath(tasksPath);
- }
-
- } catch (Exception e) {
-
- log.error("Error during creation of permanent Znodes under /live-lock-avoid ",e);
-
- }
-
-
- }
- public void unlockConsumerGroup(String appId, String groupName) throws Exception {
-
- log.info("Signalling unlock to all conumsers of in group [{}] now, " , groupName);
-
- String fullLockPath = String.format("%s/%s", locksPath, groupName );
- String fullTasksPath = null;
-
- try {
-
- //Use the Curator recipe for a Mutex lock, only one process can be broadcasting unlock instructions for a group
- InterProcessMutex lock = new InterProcessMutex(curatorFramework, fullLockPath);
- if ( lock.acquire(100L, TimeUnit.MILLISECONDS) )
- {
- try
- {
- List<String> taskNodes = curatorFramework.getChildren().forPath(tasksPath);
- for (String taskNodeName : taskNodes) {
- if(!taskNodeName.equals(appId)) {
-
- fullTasksPath = String.format("%s/%s/%s", tasksPath, taskNodeName, groupName);
- log.info("Writing groupName {} to path {}",groupName, fullTasksPath);
-
-
- if(curatorFramework.checkExists().forPath(fullTasksPath) != null) {
- curatorFramework.delete().forPath(fullTasksPath);
- }
- curatorFramework.create().withMode(CreateMode.EPHEMERAL).forPath(fullTasksPath);
- }
- }
-
-
- }
- finally
- {
- //Curator lock recipe requires a acquire() to be followed by a release()
- lock.release();
- }
- }else {
- log.info("Could not obtain the avoider lock, another process has the avoider lock? {}", !lock.isAcquiredInThisProcess() );
- }
-
-
- } catch (Exception e) {
- log.error("Error setting up either lock ZNode {} or task ZNode {}",fullLockPath, fullTasksPath,e);
- throw e;
- }
-
-
- }
-
- /*
- * Shoud be called once per MR server instance.
- *
- */
- public void startNewWatcherForServer(String appId, LiveLockAvoidance avoidanceCallback) {
- LockInstructionWatcher instructionWatcher = new LockInstructionWatcher(curatorFramework,avoidanceCallback,this);
- assignNewProcessNode(appId, instructionWatcher);
-
- }
-
-
- protected void assignNewProcessNode(String appId, Watcher processNodeWatcher ) {
-
- String taskHolderZnodePath = ZNODE_ROOT+ZNODE_UNSTICK_TASKS+"/"+appId;
-
-
- try {
-
- if(curatorFramework.checkExists().forPath(taskHolderZnodePath) != null) {
- curatorFramework.delete().deletingChildrenIfNeeded().forPath(taskHolderZnodePath);
-
- }
- curatorFramework.create().forPath(taskHolderZnodePath);
- //setup the watcher
- curatorFramework.getChildren().usingWatcher(processNodeWatcher).inBackground().forPath(taskHolderZnodePath);
- log.info("Done creating task holder and watcher for APP name: {}",appId);
-
- } catch (Exception e) {
- log.error("Could not add new processing node for name {}", appId, e);
- }
-
- }
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java b/src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java
deleted file mode 100644
index 1e78b01..0000000
--- a/src/main/java/com/att/dmf/mr/backends/kafka/KafkaPublisher.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.kafka;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Properties;
-
-import org.apache.kafka.clients.producer.KafkaProducer;
-import org.apache.kafka.clients.producer.Producer;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.json.JSONException;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.dmf.mr.backends.Publisher;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.utils.Utils;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-
-
-
-/**
- * Sends raw JSON objects into Kafka.
- *
- * Could improve space: BSON rather than JSON?
- *
- * @author peter
- *
- */
-
-public class KafkaPublisher implements Publisher {
- /**
- * constructor initializing
- *
- * @param settings
- * @throws rrNvReadable.missingReqdSetting
- */
- public KafkaPublisher(@Qualifier("propertyReader") rrNvReadable settings) throws rrNvReadable.missingReqdSetting {
- //fSettings = settings;
-
- final Properties props = new Properties();
- /*transferSetting(fSettings, props, "metadata.broker.list", "localhost:9092");
- transferSetting(fSettings, props, "request.required.acks", "1");
- transferSetting(fSettings, props, "message.send.max.retries", "5");
- transferSetting(fSettings, props, "retry.backoff.ms", "150"); */
- String kafkaConnUrl= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka.metadata.broker.list");
- if(null==kafkaConnUrl){
-
- kafkaConnUrl="localhost:9092";
- }
-
-
- if(Utils.isCadiEnabled()){
- transferSetting( props, "sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
- transferSetting( props, "security.protocol", "SASL_PLAINTEXT");
- transferSetting( props, "sasl.mechanism", "PLAIN");
- }
- transferSetting( props, "bootstrap.servers",kafkaConnUrl);
-
- transferSetting( props, "request.required.acks", "1");
- transferSetting( props, "message.send.max.retries", "5");
- transferSetting(props, "retry.backoff.ms", "150");
-
-
-
- props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
- props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
-
-
-
- fProducer = new KafkaProducer<>(props);
- }
-
- /**
- * Send a message with a given topic and key.
- *
- * @param msg
- * @throws FailedToSendMessageException
- * @throws JSONException
- */
- @Override
- public void sendMessage(String topic, message msg) throws IOException{
- final List<message> msgs = new LinkedList<message>();
- msgs.add(msg);
- sendMessages(topic, msgs);
- }
-
- /**
- * method publishing batch messages
- * This method is commented from 0.8 to 0.11 upgrade
- * @param topic
- * @param kms
- * throws IOException
- *
- public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws IOException {
- try {
- fProducer.send(kms);
-
- } catch (FailedToSendMessageException excp) {
- log.error("Failed to send message(s) to topic [" + topic + "].", excp);
- throw new FailedToSendMessageException(excp.getMessage(), excp);
- }
-
- } */
-
-
- /*
- * Kafka 11.0 Interface
- * @see com.att.nsa.cambria.backends.Publisher#sendBatchMessageNew(java.lang.String, java.util.ArrayList)
- */
- public void sendBatchMessageNew(String topic, ArrayList <ProducerRecord<String,String>> kms) throws IOException {
- try {
- for (ProducerRecord<String,String> km : kms) {
- fProducer.send(km);
- }
-
- } catch (Exception excp) {
- log.error("Failed to send message(s) to topic [" + topic + "].", excp);
- throw new IOException(excp.getMessage(), excp);
- }
-
- }
-
- /**
- * Send a set of messages. Each must have a "key" string value.
- *
- * @param topic
- * @param msg
- * @throws FailedToSendMessageException
- * @throws JSONException
- *
- @Override
- public void sendMessages(String topic, List<? extends message> msgs)
- throws IOException, FailedToSendMessageException {
- log.info("sending " + msgs.size() + " events to [" + topic + "]");
-
- final List<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(msgs.size());
- for (message o : msgs) {
- final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, o.getKey(), o.toString());
- kms.add(data);
- }
- try {
- fProducer.send(kms);
-
- } catch (FailedToSendMessageException excp) {
- log.error("Failed to send message(s) to topic [" + topic + "].", excp);
- throw new FailedToSendMessageException(excp.getMessage(), excp);
- }
- } */
- @Override
- public void sendMessagesNew(String topic, List<? extends message> msgs)
- throws IOException {
- log.info("sending " + msgs.size() + " events to [" + topic + "]");
-try{
- final List<ProducerRecord<String, String>> kms = new ArrayList<>(msgs.size());
- for (message o : msgs) {
-
- final ProducerRecord<String, String> data = new ProducerRecord<>(topic, o.getKey(), o.toString());
-
-
- try {
-
- fProducer.send(data);
-
- } catch (Exception excp) {
- log.error("Failed to send message(s) to topic [" + topic + "].", excp);
- throw new Exception(excp.getMessage(), excp);
- }
- }
-
- }catch(Exception e){}
-}
- //private final rrNvReadable fSettings;
-
-
- private Producer<String, String> fProducer;
-
- /**
- * It sets the key value pair
- * @param topic
- * @param msg
- * @param key
- * @param defVal
- */
- private void transferSetting(Properties props, String key, String defVal) {
- String kafka_prop= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"kafka." + key);
- if (null==kafka_prop) kafka_prop=defVal;
- //props.put(key, settings.getString("kafka." + key, defVal));
- props.put(key, kafka_prop);
- }
-
- //private static final Logger log = LoggerFactory.getLogger(KafkaPublisher.class);
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(KafkaPublisher.class);
-
- @Override
- public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
- // TODO Auto-generated method stub
-
- }
-
-
-} \ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java b/src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java
deleted file mode 100644
index a13ecea..0000000
--- a/src/main/java/com/att/dmf/mr/backends/kafka/LiveLockAvoidance.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.kafka;
-
-
-
-/**
- * Live Lock Avoidance interface. To be implemented by the main message router client
- *
- */
-public interface LiveLockAvoidance {
-
- /**
- * Gets the unique id
- * @return the unique id for the Message Router server instance
- */
- String getAppId();
-
-
- /**
- * Main callback to inform the local MR server instance that all consumers in a group need to soft poll
- * @param groupName name of the Kafka consumer group needed a soft poll
- */
- void handleRebalanceUnlock( String groupName);
-
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java b/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java
deleted file mode 100644
index cc3338b..0000000
--- a/src/main/java/com/att/dmf/mr/backends/kafka/LockInstructionWatcher.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.kafka;
-
-import java.util.List;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.zookeeper.WatchedEvent;
-import org.apache.zookeeper.Watcher;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- *
- * LockInstructionWatcher
- * A package-private class used internally by the KafkaLiveLockAvoider.
- *
- * This class implements the zookeeper Watcher callback and listens for changes on child nodes changing.
- * Each child node is actually a Kafka group name that needs to be soft polled. Deletion of the child nodes
- * after soft poll unlocking is finished.
- *
- *
- */
-public class LockInstructionWatcher implements Watcher {
-
- private CuratorFramework curatorFramework;
- private LiveLockAvoidance avoidanceCallback;
- private KafkaLiveLockAvoider2 avoider;
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(LockInstructionWatcher.class.getName());
-
-
- public LockInstructionWatcher(CuratorFramework curatorFramework, LiveLockAvoidance avoidanceCallback,
- KafkaLiveLockAvoider2 avoider) {
- super();
- this.curatorFramework = curatorFramework;
- this.avoidanceCallback = avoidanceCallback;
- this.avoider = avoider;
- }
-
-
- @Override
- public void process(WatchedEvent event) {
-
- switch (event.getType()) {
- case NodeChildrenChanged:
-
-
- try {
-
- log.info("node children changed at path: {}", event.getPath());
-
- List<String> children = curatorFramework.getChildren().forPath(event.getPath());
-
- log.info("found children nodes prodcessing now");
- for (String child : children) {
- String childPath = String.format("%s/%s", event.getPath(), child);
- log.info("Processing child task at node {}",childPath);
- avoidanceCallback.handleRebalanceUnlock( child);
- log.info("Deleting child task at node {}",childPath);
- curatorFramework.delete().forPath(childPath);
- }
- //reset the watch with the avoider
- avoider.assignNewProcessNode(avoidanceCallback.getAppId(), this);
-
-
- } catch (Exception e) {
- log.error("Error manipulating ZNode data in watcher",e);
- }
-
- break;
-
- default:
- log.info("Listner fired on path: {}, with event: {}", event.getPath(), event.getType());
- break;
- }
- }
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java
deleted file mode 100644
index 237cac8..0000000
--- a/src/main/java/com/att/dmf/mr/backends/memory/MemoryConsumerFactory.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.memory;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.Consumer;
-import com.att.dmf.mr.backends.ConsumerFactory;
-/**
- *
- * @author anowarul.islam
- *
- */
-public class MemoryConsumerFactory implements ConsumerFactory
-{
-
- private final MemoryQueue fQueue;
-
- /**
- *
- * Initializing constructor
- * @param q
- */
- public MemoryConsumerFactory ( MemoryQueue q )
- {
- fQueue = q;
- }
-
- /**
- *
- * @param topic
- * @param consumerGroupId
- * @param clientId
- * @param timeoutMs
- * @return Consumer
- */
- @Override
- public Consumer getConsumerFor ( String topic, String consumerGroupId, String clientId, int timeoutMs, String remotehost )
- {
- return new MemoryConsumer ( topic, consumerGroupId );
- }
-
- /**
- *
- * Define nested inner class
- *
- */
- private class MemoryConsumer implements Consumer
- {
-
- private final String fTopic;
- private final String fConsumer;
- private final long fCreateMs;
- private long fLastAccessMs;
-
- /**
- *
- * Initializing MemoryConsumer constructor
- * @param topic
- * @param consumer
- *
- */
- public MemoryConsumer ( String topic, String consumer )
- {
- fTopic = topic;
- fConsumer = consumer;
- fCreateMs = System.currentTimeMillis ();
- fLastAccessMs = fCreateMs;
- }
-
- @Override
- /**
- *
- * return consumer details
- */
- public Message nextMessage ()
- {
- return fQueue.get ( fTopic, fConsumer );
- }
-
- @Override
- public boolean close() {
- //Nothing to close/clean up.
- return true;
- }
- /**
- *
- */
- public void commitOffsets()
- {
- // ignoring this aspect
- }
- /**
- * get offset
- */
- public long getOffset()
- {
- return 0;
- }
-
- @Override
- /**
- * get consumer topic name
- */
- public String getName ()
- {
- return fTopic + "/" + fConsumer;
- }
-
- @Override
- public long getCreateTimeMs ()
- {
- return fCreateMs;
- }
-
- @Override
- public long getLastAccessMs ()
- {
- return fLastAccessMs;
- }
-
-
-
- @Override
- public void setOffset(long offset) {
- // TODO Auto-generated method stub
-
- }
-
-
- }
-
- @Override
- public void destroyConsumer(String topic, String consumerGroupId,
- String clientId) {
- //No cache for memory consumers, so NOOP
- }
-
- @Override
- public void dropCache ()
- {
- // nothing to do - there's no cache here
- }
-
- @Override
- /**
- * @return ArrayList<MemoryConsumer>
- */
- public Collection<? extends Consumer> getConsumers ()
- {
- return new ArrayList<> ();
- }
-
- @Override
- public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs,
- String remotehost) throws UnavailableException, CambriaApiException {
- // TODO Auto-generated method stub
- return null;
- }
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java
deleted file mode 100644
index e0c80bd..0000000
--- a/src/main/java/com/att/dmf/mr/backends/memory/MemoryMetaBroker.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.memory;
-
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Set;
-import java.util.TreeSet;
-
-import com.att.dmf.mr.metabroker.Broker;
-import com.att.dmf.mr.metabroker.Topic;
-import com.att.nsa.configs.ConfigDb;
-import com.att.nsa.security.NsaAcl;
-import com.att.nsa.security.NsaApiKey;
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public class MemoryMetaBroker implements Broker {
-
- private final MemoryQueue fQueue;
- private final HashMap<String, MemTopic> fTopics;
-
- /**
- *
- * @param mq
- * @param configDb
- * @param settings
- */
- public MemoryMetaBroker(MemoryQueue mq, ConfigDb configDb) {
-
- fQueue = mq;
- fTopics = new HashMap<>();
- }
-
- @Override
- public List<Topic> getAllTopics() {
- return new LinkedList<Topic>(fTopics.values());
- }
-
- @Override
- public Topic getTopic(String topic) {
- return fTopics.get(topic);
- }
-
- @Override
- public Topic createTopic(String topic, String desc, String ownerApiId, int partitions, int replicas,
- boolean transactionEnabled) throws TopicExistsException {
- if (getTopic(topic) != null) {
- throw new TopicExistsException(topic);
- }
- fQueue.createTopic(topic);
- fTopics.put(topic, new MemTopic(topic, desc, ownerApiId, transactionEnabled));
- return getTopic(topic);
- }
-
- @Override
- public void deleteTopic(String topic) {
- fTopics.remove(topic);
- fQueue.removeTopic(topic);
- }
-
- private static class MemTopic implements Topic {
-
- private final String fName;
- private final String fDesc;
- private final String fOwner;
- private NsaAcl fReaders;
- private NsaAcl fWriters;
- private boolean ftransactionEnabled;
- private String accessDenied = "User does not own this topic ";
-
- /**
- * constructor initialization
- *
- * @param name
- * @param desc
- * @param owner
- * @param transactionEnabled
- */
- public MemTopic(String name, String desc, String owner, boolean transactionEnabled) {
- fName = name;
- fDesc = desc;
- fOwner = owner;
- ftransactionEnabled = transactionEnabled;
- fReaders = null;
- fWriters = null;
- }
-
- @Override
- public String getOwner() {
- return fOwner;
- }
-
- @Override
- public NsaAcl getReaderAcl() {
- return fReaders;
- }
-
- @Override
- public NsaAcl getWriterAcl() {
- return fWriters;
- }
-
- @Override
- public void checkUserRead(NsaApiKey user) throws AccessDeniedException {
- if (fReaders != null && (user == null || !fReaders.canUser(user.getKey()))) {
- throw new AccessDeniedException(user == null ? "" : user.getKey());
- }
- }
-
- @Override
- public void checkUserWrite(NsaApiKey user) throws AccessDeniedException {
- if (fWriters != null && (user == null || !fWriters.canUser(user.getKey()))) {
- throw new AccessDeniedException(user == null ? "" : user.getKey());
- }
- }
-
- @Override
- public String getName() {
- return fName;
- }
-
- @Override
- public String getDescription() {
- return fDesc;
- }
-
- @Override
- public void permitWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException {
- if (!fOwner.equals(asUser.getKey())) {
- throw new AccessDeniedException(accessDenied + fName);
- }
- if (fWriters == null) {
- fWriters = new NsaAcl();
- }
- fWriters.add(publisherId);
- }
-
- @Override
- public void denyWritesFromUser(String publisherId, NsaApiKey asUser) throws AccessDeniedException {
- if (!fOwner.equals(asUser.getKey())) {
- throw new AccessDeniedException(accessDenied + fName);
- }
- fWriters.remove(publisherId);
- }
-
- @Override
- public void permitReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException {
- if (!fOwner.equals(asUser.getKey())) {
- throw new AccessDeniedException(accessDenied + fName);
- }
- if (fReaders == null) {
- fReaders = new NsaAcl();
- }
- fReaders.add(consumerId);
- }
-
- @Override
- public void denyReadsByUser(String consumerId, NsaApiKey asUser) throws AccessDeniedException {
- if (!fOwner.equals(asUser.getKey())) {
- throw new AccessDeniedException(accessDenied + fName);
- }
- fReaders.remove(consumerId);
- }
-
- @Override
- public boolean isTransactionEnabled() {
- return ftransactionEnabled;
- }
-
- @Override
- public Set<String> getOwners() {
- final TreeSet<String> set = new TreeSet<> ();
- set.add ( fOwner );
- return set;
- }
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java
deleted file mode 100644
index 25cb2df..0000000
--- a/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueue.java
+++ /dev/null
@@ -1,207 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.memory;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-
-import com.att.dmf.mr.backends.Consumer;
-import com.att.dmf.mr.backends.Publisher.message;
-
-/**
- * When broker type is memory, then this class is doing all the topic related
- * operations
- *
- * @author anowarul.islam
- *
- */
-public class MemoryQueue {
- // map from topic to list of msgs
- private HashMap<String, LogBuffer> fQueue;
- private HashMap<String, HashMap<String, Integer>> fOffsets;
-
- /**
- * constructor storing hashMap objects in Queue and Offsets object
- */
- public MemoryQueue() {
- fQueue = new HashMap<>();
- fOffsets = new HashMap<>();
- }
-
- /**
- * method used to create topic
- *
- * @param topic
- */
- public synchronized void createTopic(String topic) {
- LogBuffer q = fQueue.get(topic);
- if (q == null) {
- q = new LogBuffer(1024 * 1024);
- fQueue.put(topic, q);
- }
- }
-
- /**
- * method used to remove topic
- *
- * @param topic
- */
- public synchronized void removeTopic(String topic) {
- LogBuffer q = fQueue.get(topic);
- if (q != null) {
- fQueue.remove(topic);
- }
- }
-
- /**
- * method to write message on topic
- *
- * @param topic
- * @param m
- */
- public synchronized void put(String topic, message m) {
- LogBuffer q = fQueue.get(topic);
- if (q == null) {
- createTopic(topic);
- q = fQueue.get(topic);
- }
- q.push(m.getMessage());
- }
-
- /**
- * method to read consumer messages
- *
- * @param topic
- * @param consumerName
- * @return
- */
- public synchronized Consumer.Message get(String topic, String consumerName) {
- final LogBuffer q = fQueue.get(topic);
- if (q == null) {
- return null;
- }
-
- HashMap<String, Integer> offsetMap = fOffsets.get(consumerName);
- if (offsetMap == null) {
- offsetMap = new HashMap<>();
- fOffsets.put(consumerName, offsetMap);
- }
- Integer offset = offsetMap.get(topic);
- if (offset == null) {
- offset = 0;
- }
-
- final msgInfo result = q.read(offset);
- if (result != null && result.msg != null) {
- offsetMap.put(topic, result.offset + 1);
- }
- return result;
- }
-
- /**
- * static inner class used to details about consumed messages
- *
- * @author anowarul.islam
- *
- */
- private static class msgInfo implements Consumer.Message {
- /**
- * published message which is consumed
- */
- public String msg;
- /**
- * offset associated with message
- */
- public int offset;
-
- /**
- * get offset of messages
- */
- @Override
- public long getOffset() {
- return offset;
- }
-
- /**
- * get consumed message
- */
- @Override
- public String getMessage() {
- return msg;
- }
- }
-
- /**
- *
- * @author sneha.d.desai
- *
- * private LogBuffer class has synchronized push and read method
- */
- private class LogBuffer {
- private int fBaseOffset;
- private final int fMaxSize;
- private final ArrayList<String> fList;
-
- /**
- * constructor initializing the offset, maxsize and list
- *
- * @param maxSize
- */
- public LogBuffer(int maxSize) {
- fBaseOffset = 0;
- fMaxSize = maxSize;
- fList = new ArrayList<>();
- }
-
- /**
- * pushing message
- *
- * @param msg
- */
- public synchronized void push(String msg) {
- fList.add(msg);
- while (fList.size() > fMaxSize) {
- fList.remove(0);
- fBaseOffset++;
- }
- }
-
- /**
- * reading messages
- *
- * @param offset
- * @return
- */
- public synchronized msgInfo read(int offset) {
- final int actual = Math.max(0, offset - fBaseOffset);
-
- final msgInfo mi = new msgInfo();
- mi.msg = (actual >= fList.size()) ? null : fList.get(actual);
- if (mi.msg == null)
- return null;
-
- mi.offset = actual + fBaseOffset;
- return mi;
- }
-
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java b/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java
deleted file mode 100644
index 2b43ed3..0000000
--- a/src/main/java/com/att/dmf/mr/backends/memory/MemoryQueuePublisher.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.memory;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.kafka.clients.producer.ProducerRecord;
-
-import com.att.dmf.mr.backends.Publisher;
-import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
-
-
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public class MemoryQueuePublisher implements Publisher {
- /**
- *
- * @param q
- * @param b
- */
- public MemoryQueuePublisher(MemoryQueue q, MemoryMetaBroker b) {
- fBroker = b;
- fQueue = q;
- }
-
-
- /**
- *
- * @param topic
- * @param msg
- * @throws IOException
- */
- @Override
- public void sendMessage(String topic, message msg) throws IOException {
- if (null == fBroker.getTopic(topic)) {
- try {
- fBroker.createTopic(topic, topic, null, 8, 3, false);
- } catch (TopicExistsException e) {
- throw new RuntimeException(e);
- }
- }
- fQueue.put(topic, msg);
- }
-
- @Override
- /**
- * @param topic
- * @param msgs
- * @throws IOException
- */
-
- public void sendBatchMessageNew(String topic, ArrayList<ProducerRecord<String, String>> kms) throws IOException {
-
- }
-
- public void sendMessagesNew(String topic, List<? extends message> msgs) throws IOException {
- }
-
- public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
- for (message m : msgs) {
- sendMessage(topic, m);
- }
- }
-
- private final MemoryMetaBroker fBroker;
- private final MemoryQueue fQueue;
-}
diff --git a/src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java b/src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java
deleted file mode 100644
index eb77dc2..0000000
--- a/src/main/java/com/att/dmf/mr/backends/memory/MessageLogger.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.backends.memory;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.kafka.clients.producer.ProducerRecord;
-
-import com.att.dmf.mr.backends.Publisher;
-
-
-
-/**
- * class used for logging perspective
- *
- * @author anowarul.islam
- *
- */
-public class MessageLogger implements Publisher {
- public MessageLogger() {
- }
-
- public void setFile(File f) throws FileNotFoundException {
- fStream = new FileOutputStream(f, true);
- }
-
- /**
- *
- * @param topic
- * @param msg
- * @throws IOException
- */
- @Override
- public void sendMessage(String topic, message msg) throws IOException {
- logMsg(msg);
- }
-
- /**
- * @param topic
- * @param msgs
- * @throws IOException
- */
- @Override
- public void sendMessages(String topic, List<? extends message> msgs) throws IOException {
- for (message m : msgs) {
- logMsg(m);
- }
- }
-
- /**
- * @param topic
- * @param kms
- * @throws IOException
-
- @Override
- public void sendBatchMessage(String topic, ArrayList<KeyedMessage<String, String>> kms) throws
-
- IOException {
- }
- */
- private FileOutputStream fStream;
-
- /**
- *
- * @param msg
- * @throws IOException
- */
- private void logMsg(message msg) throws IOException {
- String key = msg.getKey();
- if (key == null)
- key = "<none>";
-
- fStream.write('[');
- fStream.write(key.getBytes());
- fStream.write("] ".getBytes());
- fStream.write(msg.getMessage().getBytes());
- fStream.write('\n');
- }
- public void sendBatchMessageNew(String topic, ArrayList<ProducerRecord<String, String>> kms) throws IOException {
-
- }
-
- public void sendMessagesNew(String topic, List<? extends message> msgs) throws IOException {
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java b/src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java
deleted file mode 100644
index 4f0108f..0000000
--- a/src/main/java/com/att/dmf/mr/beans/ApiKeyBean.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import java.io.Serializable;
-
-import javax.xml.bind.annotation.XmlRootElement;
-
-import com.att.nsa.drumlin.till.data.uniqueStringGenerator;
-/**
- *
- * @author anowarul.islam
- *
- */
-@XmlRootElement
-public class ApiKeyBean implements Serializable {
-
- private static final long serialVersionUID = -8219849086890567740L;
-
- private static final String KEY_CHARS = "ABCDEFGHJIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
-
- private String email;
- private String description;
- /**
- * constructor
- */
- public ApiKeyBean() {
- super();
- }
-/**
- *
- * @param email
- * @param description
- */
- public ApiKeyBean(String email, String description) {
- super();
- this.email = email;
- this.description = description;
- }
-
- public String getEmail() {
- return email;
- }
-
- public void setEmail(String email) {
- this.email = email;
- }
-
- public String getDescription() {
- return description;
- }
-
- public void setDescription(String description) {
- this.description = description;
- }
-
- public String getKey() {
- return generateKey(16);
- }
-
- public String getSharedSecret() {
- return generateKey(24);
- }
-
- private static String generateKey ( int length ) {
- return uniqueStringGenerator.createKeyUsingAlphabet ( KEY_CHARS, length );
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java b/src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java
deleted file mode 100644
index 8cbf64f..0000000
--- a/src/main/java/com/att/dmf/mr/beans/DMaaPCambriaLimiter.java
+++ /dev/null
@@ -1,288 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import java.util.Date;
-import java.util.HashMap;
-import java.util.concurrent.TimeUnit;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Component;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.exception.DMaaPResponseCode;
-import com.att.dmf.mr.exception.ErrorResponse;
-import com.att.dmf.mr.utils.Utils;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.metrics.impl.CdmRateTicker;
-
-/**
- * class provide rate information
- *
- * @author anowarul.islam
- *
- */
-@Component
-public class DMaaPCambriaLimiter {
- private final HashMap<String, RateInfo> fRateInfo;
- private final HashMap<String, RateInfoCheck> fRateInfoCheck;
- private final double fMaxEmptyPollsPerMinute;
- private final double fMaxPollsPerMinute;
- private final int fWindowLengthMins;
- private final long fSleepMs;
- private final long fSleepMs1;
- private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPCambriaLimiter.class);
-
- /**
- * constructor initializes
- *
- * @param settings
- * @throws missingReqdSetting
- * @throws invalidSettingValue
- */
- @Autowired
- public DMaaPCambriaLimiter(@Qualifier("propertyReader") rrNvReadable settings) {
- fRateInfo = new HashMap<>();
- fRateInfoCheck = new HashMap<>();
- fMaxEmptyPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxEmptyPollsPerMinute,
- CambriaConstants.kDefault_MaxEmptyPollsPerMinute);
- fMaxPollsPerMinute = settings.getDouble(CambriaConstants.kSetting_MaxPollsPerMinute,
- 30);
- fWindowLengthMins = settings.getInt(CambriaConstants.kSetting_RateLimitWindowLength,
- CambriaConstants.kDefault_RateLimitWindowLength);
- fSleepMs = settings.getLong(CambriaConstants.kSetting_SleepMsOnRateLimit,
- CambriaConstants.kDefault_SleepMsOnRateLimit);
- fSleepMs1 = settings.getLong(CambriaConstants.kSetting_SleepMsRealOnRateLimit,
- 5000);
-
- }
-
- /**
- * Construct a rate limiter.
- *
- * @param maxEmptyPollsPerMinute
- * Pass <= 0 to deactivate rate limiting.
- * @param windowLengthMins
- */
- public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute, double maxPollsPerMinute,int windowLengthMins) {
- this(maxEmptyPollsPerMinute,maxPollsPerMinute, windowLengthMins, getSleepMsForRate(maxEmptyPollsPerMinute),getSleepMsForRate(1));
- }
-
- /**
- * Construct a rate limiter
- *
- * @param maxEmptyPollsPerMinute
- * Pass <= 0 to deactivate rate limiting.
- * @param sleepMs
- * @param windowLengthMins
- */
- public DMaaPCambriaLimiter(double maxEmptyPollsPerMinute,double maxPollsPerMinute, int windowLengthMins, long sleepMs ,long sleepMS1) {
- fRateInfo = new HashMap<>();
- fRateInfoCheck = new HashMap<>();
- fMaxEmptyPollsPerMinute = Math.max(0, maxEmptyPollsPerMinute);
- fMaxPollsPerMinute = Math.max(0, maxPollsPerMinute);
- fWindowLengthMins = windowLengthMins;
- fSleepMs = Math.max(0, sleepMs);
- fSleepMs1 = Math.max(0, sleepMS1);
- }
-
- /**
- * static method provide the sleep time
- *
- * @param ratePerMinute
- * @return
- */
- public static long getSleepMsForRate(double ratePerMinute) {
- if (ratePerMinute <= 0.0)
- return 0;
- return Math.max(1000, Math.round(60 * 1000 / ratePerMinute));
- }
-
- /**
- * Tell the rate limiter about a call to a topic/group/id. If the rate is
- * too high, this call delays its return and throws an exception.
- *
- * @param topic
- * @param consumerGroup
- * @param clientId
- * @throws CambriaApiException
- */
- public void onCall(String topic, String consumerGroup, String clientId,String remoteHost) throws CambriaApiException {
- // do nothing if rate is configured 0 or less
- if (fMaxEmptyPollsPerMinute <= 0) {
- return;
- }
- // setup rate info for this tuple
- final RateInfo ri = getRateInfo(topic, consumerGroup, clientId);
- final double rate = ri.onCall();
- log.info(ri.getLabel() + ": " + rate + " empty replies/minute.");
- if (rate > fMaxEmptyPollsPerMinute) {
- try {
- log.warn(ri.getLabel() + ": " + rate + " empty replies/minute, limit is " + fMaxPollsPerMinute
- + ".");
- if (fSleepMs > 0) {
- log.warn(ri.getLabel() + ": " + "Slowing response with " + fSleepMs
- + " ms sleep, then responding in error.");
- Thread.sleep(fSleepMs);
-
- } else {
- log.info(ri.getLabel() + ": " + "No sleep configured, just throwing error.");
- }
- } catch (InterruptedException e) {
- log.error("Exception "+ e);
- // ignore
- }
-
-
- ErrorResponse errRes = new ErrorResponse(HttpStatusCodes.k429_tooManyRequests,
- DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(),
- "This client is making too many requests. Please use a long poll "
- + "setting to decrease the number of requests that result in empty responses. ","",Utils.getFormattedDate(new Date()),topic,"","",consumerGroup+"/"+clientId,remoteHost);
-
- log.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
-
-
- }
-
- /**
- *
- * @param topic
- * @param consumerGroup
- * @param clientId
- * @param sentCount
- */
- public void onSend(String topic, String consumerGroup, String clientId, long sentCount) {
- // check for good replies
- if (sentCount > 0) {
- // that was a good send, reset the metric
- getRateInfo(topic, consumerGroup, clientId).reset();
- }
- }
-
- private static class RateInfo {
- private final String fLabel;
- private final CdmRateTicker fCallRateSinceLastMsgSend;
- /**
- * constructor initialzes
- *
- * @param label
- * @param windowLengthMinutes
- */
- public RateInfo(String label, int windowLengthMinutes) {
- fLabel = label;
- fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES,
- windowLengthMinutes, TimeUnit.MINUTES);
- }
-
- public String getLabel() {
- return fLabel;
- }
-
- /**
- * CdmRateTicker is reset
- */
- public void reset() {
- fCallRateSinceLastMsgSend.reset();
- }
-
- /**
- *
- * @return
- */
- public double onCall() {
- fCallRateSinceLastMsgSend.tick();
- return fCallRateSinceLastMsgSend.getRate();
- }
- }
-
-
-
- private static class RateInfoCheck {
-
- private final String fLabel;
- private final CdmRateTicker fCallRateSinceLastMsgSend;
- /**
- * constructor initialzes
- *
- * @param label
- * @param windowLengthMinutes
- */
- public RateInfoCheck(String label, int windowLengthMinutes) {
- fLabel = label;
- fCallRateSinceLastMsgSend = new CdmRateTicker("Call rate since last msg send", 1, TimeUnit.MINUTES,
- windowLengthMinutes, TimeUnit.MINUTES);
- }
-
- public String getLabel() {
- return fLabel;
- }
-
- /**
- * CdmRateTicker is reset
- */
- public void reset() {
- fCallRateSinceLastMsgSend.reset();
- }
-
- /**
- *
- * @return
- */
- public double onCall() {
- fCallRateSinceLastMsgSend.tick();
- return fCallRateSinceLastMsgSend.getRate();
- }
- }
-
-
-
-
- private RateInfo getRateInfo(String topic, String consumerGroup, String clientId) {
- final String key = makeKey(topic, consumerGroup, clientId);
- RateInfo ri = fRateInfo.get(key);
- if (ri == null) {
- ri = new RateInfo(key, fWindowLengthMins);
- fRateInfo.put(key, ri);
- }
- return ri;
- }
-
-
-
-
-
-
-
- private String makeKey(String topic, String group, String id) {
- return topic + "::" + group + "::" + id;
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPContext.java b/src/main/java/com/att/dmf/mr/beans/DMaaPContext.java
deleted file mode 100644
index a880877..0000000
--- a/src/main/java/com/att/dmf/mr/beans/DMaaPContext.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
-import com.att.dmf.mr.utils.ConfigurationReader;
-
-/**
- * DMaaPContext provide and maintain all the configurations , Http request/response
- * Session and consumer Request Time
- * @author nilanjana.maity
- *
- */
-public class DMaaPContext {
-
- private ConfigurationReader configReader;
- private HttpServletRequest request;
- private HttpServletResponse response;
- private HttpSession session;
- private String consumerRequestTime;
- static int i=0;
-
- public synchronized static long getBatchID() {
- try{
- final long metricsSendTime = System.currentTimeMillis();
- final Date d = new Date(metricsSendTime);
- final String text = new SimpleDateFormat("ddMMyyyyHHmmss").format(d);
- long dt= Long.valueOf(text)+i;
- i++;
- return dt;
- }
- catch(NumberFormatException ex){
- return 0;
- }
- }
-
- public HttpServletRequest getRequest() {
- return request;
- }
-
- public void setRequest(HttpServletRequest request) {
- this.request = request;
- }
-
- public HttpServletResponse getResponse() {
- return response;
- }
-
- public void setResponse(HttpServletResponse response) {
- this.response = response;
- }
-
- public HttpSession getSession() {
- this.session = request.getSession();
- return session;
- }
-
- public void setSession(HttpSession session) {
- this.session = session;
- }
-
- public ConfigurationReader getConfigReader() {
- return configReader;
- }
-
- public void setConfigReader(ConfigurationReader configReader) {
- this.configReader = configReader;
- }
-
- public String getConsumerRequestTime() {
- return consumerRequestTime;
- }
-
- public void setConsumerRequestTime(String consumerRequestTime) {
- this.consumerRequestTime = consumerRequestTime;
- }
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java b/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java
deleted file mode 100644
index fb0ace0..0000000
--- a/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaConsumerFactory.java
+++ /dev/null
@@ -1,361 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Properties;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.recipes.locks.InterProcessMutex;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.Consumer;
-import com.att.dmf.mr.backends.ConsumerFactory;
-import com.att.dmf.mr.backends.MetricsSet;
-import com.att.dmf.mr.backends.kafka.Kafka011Consumer;
-import com.att.dmf.mr.backends.kafka.Kafka011ConsumerUtil;
-import com.att.dmf.mr.backends.kafka.KafkaConsumerCache;
-import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
-import com.att.dmf.mr.backends.kafka.KafkaLiveLockAvoider2;
-import com.att.dmf.mr.backends.kafka.LiveLockAvoidance;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.utils.ConfigurationReader;
-import com.att.dmf.mr.utils.Utils;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-
-/**
- * @author nilanjana.maity
- *
- */
-public class DMaaPKafkaConsumerFactory implements ConsumerFactory {
-
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPKafkaConsumerFactory.class);
-
-
- /**
- * constructor initialization
- *
- * @param settings
- * @param metrics
- * @param curator
- * @throws missingReqdSetting
- * @throws KafkaConsumerCacheException
- * @throws UnknownHostException
- */
-
- public DMaaPKafkaConsumerFactory(@Qualifier("dMaaPMetricsSet") MetricsSet metrics,
- @Qualifier("curator") CuratorFramework curator,
- @Qualifier("kafkalockavoid") KafkaLiveLockAvoider2 kafkaLiveLockAvoider)
- throws missingReqdSetting, KafkaConsumerCacheException, UnknownHostException {
-
- String apiNodeId = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- CambriaConstants.kSetting_ApiNodeIdentifier);
- if (apiNodeId == null) {
-
- apiNodeId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + CambriaConstants.kDefault_Port;
- }
-
- log.info("This Cambria API Node identifies itself as [" + apiNodeId + "].");
- final String mode = CambriaConstants.DMAAP;
-
- fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "kafka.metadata.broker.list");
- if (null == fkafkaBrokers) {
-
- fkafkaBrokers = "localhost:9092";
- }
-
- boolean kSetting_EnableCache = kDefault_IsCacheEnabled;
- String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "cambria.consumer.cache.enabled");
- if (null != strkSetting_EnableCache)
- kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
-
- final boolean isCacheEnabled = kSetting_EnableCache;
-
-
- fCache = null;
- if (isCacheEnabled) {
- fCache = KafkaConsumerCache.getInstance();
-
- }
- if (fCache != null) {
- fCache.setfMetrics(metrics);
- fCache.setfApiId(apiNodeId);
- fCache.startCache(mode, curator);
- if(kafkaLiveLockAvoider!=null){
- kafkaLiveLockAvoider.startNewWatcherForServer(apiNodeId, makeAvoidanceCallback(apiNodeId));
- fkafkaLiveLockAvoider = kafkaLiveLockAvoider;
- }
- }
- }
-
- /*
- * getConsumerFor
- *
- * @see
- * com.att.dmf.mr.backends.ConsumerFactory#getConsumerFor(java.lang.String,
- * java.lang.String, java.lang.String, int, java.lang.String) This method is
- * used by EventServiceImpl.getEvents() method to get a Kakfa consumer
- * either from kafkaconsumer cache or create a new connection This also get
- * the list of other consumer objects for the same consumer group and set to
- * KafkaConsumer object. This list may be used during poll-rebalancing
- * issue.
- */
- @Override
- public Consumer getConsumerFor(String topic, String consumerGroupName, String consumerId, int timeoutMs,
- String remotehost) throws UnavailableException, CambriaApiException {
- Kafka011Consumer kc;
-
- // To synchronize based on the consumer group.
-
- Object syncObject = synchash.get(topic + consumerGroupName);
- if (null == syncObject) {
- syncObject = new Object();
- synchash.put(topic + consumerGroupName, syncObject);
- }
-
- synchronized (syncObject) {
- try {
- kc = (fCache != null) ? fCache.getConsumerFor(topic, consumerGroupName, consumerId) : null; // consumerId
-
- } catch (KafkaConsumerCacheException e) {
- log.info("######@@@@### Error occured in Kafka Caching" + e + " " + topic + "::" + consumerGroupName
- + "::" + consumerId);
- log.error("####@@@@## Error occured in Kafka Caching" + e + " " + topic + "::" + consumerGroupName
- + "::" + consumerId);
- throw new UnavailableException(e);
- }
-
- // Ideally if cache exists below flow should be skipped. If cache
- // didnt
- // exist, then create this first time on this node.
- if (kc == null) {
-
- log.info("^Kafka consumer cache value " + topic + "::" + consumerGroupName + "::" + consumerId + " =>"
- + kc);
-
- final InterProcessMutex ipLock = new InterProcessMutex(ConfigurationReader.getCurator(),
- "/consumerFactory/" + topic + "/" + consumerGroupName + "/" + consumerId);
- boolean locked = false;
-
- try {
-
- locked = ipLock.acquire(30, TimeUnit.SECONDS);
- if (!locked) {
-
- log.info("Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic
- + ", " + consumerGroupName + ", " + consumerId + ") from " + remotehost);
- throw new UnavailableException(
- "Could not acquire lock in order to create (topic, group, consumer) = " + "(" + topic
- + ", " + consumerGroupName + ", " + consumerId + ") " + remotehost);
- }
-
- // ConfigurationReader.getCurator().checkExists().forPath("S").
-
- log.info("Creating Kafka consumer for group [" + consumerGroupName + "], consumer [" + consumerId
- + "], on topic [" + topic + "].");
-
- if (fCache != null) {
- fCache.signalOwnership(topic, consumerGroupName, consumerId);
- }
-
- final Properties props = createConsumerConfig(topic,consumerGroupName, consumerId);
- long fCreateTimeMs = System.currentTimeMillis();
- KafkaConsumer<String, String> cc = new KafkaConsumer<>(props);
- kc = new Kafka011Consumer(topic, consumerGroupName, consumerId, cc, fkafkaLiveLockAvoider);
- log.info(" kafka stream created in " + (System.currentTimeMillis() - fCreateTimeMs));
-
- if (fCache != null) {
- fCache.putConsumerFor(topic, consumerGroupName, consumerId, kc); //
- }
-
- } catch (org.I0Itec.zkclient.exception.ZkTimeoutException x) {
- log.info(
- "Kafka consumer couldn't connect to ZK. " + x + " " + consumerGroupName + "/" + consumerId);
- throw new UnavailableException("Couldn't connect to ZK.");
- } catch (KafkaConsumerCacheException e) {
- log.info("Failed to cache consumer (this may have performance implications): " + e.getMessage()
- + " " + consumerGroupName + "/" + consumerId);
- } catch (UnavailableException u) {
- log.info("Failed and in UnavailableException block " + u.getMessage() + " " + consumerGroupName
- + "/" + consumerId);
- throw new UnavailableException("Error while acquiring consumer factory lock " + u.getMessage(), u);
- } catch (Exception e) {
- log.info("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/"
- + consumerId);
- log.error("Failed and go to Exception block " + e.getMessage() + " " + consumerGroupName + "/"
- + consumerId);
-
- } finally {
- if (locked) {
- try {
- ipLock.release();
- } catch (Exception e) {
- throw new UnavailableException("Error while releasing consumer factory lock" + e, e);
- }
- }
- }
- }
- }
- return kc;
- }
-
- @Override
- public synchronized void destroyConsumer(String topic, String consumerGroup, String clientId) {
- if (fCache != null) {
- fCache.dropConsumer(topic, consumerGroup, clientId);
- }
- }
-
- @Override
- public synchronized Collection<? extends Consumer> getConsumers() {
- return fCache.getConsumers();
- }
-
- @Override
- public synchronized void dropCache() {
- fCache.dropAllConsumers();
- }
-
-
- private KafkaConsumerCache fCache;
- private KafkaLiveLockAvoider2 fkafkaLiveLockAvoider;
- private String fkafkaBrokers;
-
-
-
- private static String makeLongKey(String key, String prefix) {
- return prefix + "." + key;
- }
-
- private void transferSettingIfProvided(Properties target, String key, String prefix) {
- String keyVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, makeLongKey(key, prefix));
-
-
- if (null != keyVal) {
-
- log.info("Setting [" + key + "] to " + keyVal + ".");
- target.put(key, keyVal);
- }
- }
-
- /**
- * Name CreateConsumerconfig
- * @param topic
- * @param groupId
- * @param consumerId
- * @return Properties
- *
- * This method is to create Properties required to create kafka connection
- * Group name is replaced with different format groupid--topic to address same
- * groupids for multiple topics. Same groupid with multiple topics
- * may start frequent consumer rebalancing on all the topics . Replacing them makes it unique
- */
- private Properties createConsumerConfig(String topic ,String groupId, String consumerId) {
- final Properties props = new Properties();
- //fakeGroupName is added to avoid multiple consumer group for multiple topics.Donot Change this logic
- //Fix for CPFMF-644 :
- final String fakeGroupName = groupId + "--" + topic;
- props.put("group.id", fakeGroupName);
- props.put("enable.auto.commit", "false"); // 0.11
- props.put("bootstrap.servers", fkafkaBrokers);
- if(Utils.isCadiEnabled()){
- props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
- props.put("security.protocol", "SASL_PLAINTEXT");
- props.put("sasl.mechanism", "PLAIN");
- }
- props.put("client.id", consumerId);
-
- // additional settings: start with our defaults, then pull in configured
- // overrides
- populateKafkaInternalDefaultsMap();
- for (String key : KafkaConsumerKeys) {
- transferSettingIfProvided(props, key, "kafka");
- }
-
- props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
- props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
-
- return props;
- }
-
-
- private static final String KafkaConsumerKeys[] = { "bootstrap.servers", "heartbeat.interval.ms",
- "auto.offset.reset", "exclude.internal.topics", "session.timeout.ms", "fetch.max.bytes",
- "auto.commit.interval.ms", "connections.max.idle.ms", "fetch.min.bytes", "isolation.level",
- "fetch.max.bytes", "request.timeout.ms", "fetch.max.wait.bytes", "reconnect.backoff.max.ms",
- "max.partition.fetch.bytes", "reconnect.backoff.max.ms", "reconnect.backoff.ms", "retry.backoff.ms",
- "max.poll.interval.ms", "max.poll.records", "receive.buffer.bytes", "metadata.max.age.ms" };
-
- /**
- * putting values in hashmap like consumer timeout, zookeeper time out, etc
- *
- * @param setting
- */
- private static void populateKafkaInternalDefaultsMap() { }
-
- /*
- * The starterIncremnt value is just to emulate calling certain consumers,
- * in this test app all the consumers are local
- *
- */
- private LiveLockAvoidance makeAvoidanceCallback(final String appId) {
-
- return new LiveLockAvoidance() {
-
- @Override
- public String getAppId() {
- return appId;
- }
-
- @Override
- public void handleRebalanceUnlock(String groupName) {
- log.info("FORCE A POLL NOW FOR appId: [{}] group: [{}]", getAppId(), groupName);
- Kafka011ConsumerUtil.forcePollOnConsumer(groupName + "::");
- }
-
- };
-
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public HashMap getConsumerForKafka011(String topic, String consumerGroupName, String consumerId, int timeoutMs,
- String remotehost) throws UnavailableException, CambriaApiException {
- // TODO Auto-generated method stub
- return null;
- }
-
- private HashMap<String, Object> synchash = new HashMap<String, Object>();
-
-} \ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java b/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java
deleted file mode 100644
index acf4824..0000000
--- a/src/main/java/com/att/dmf/mr/beans/DMaaPKafkaMetaBroker.java
+++ /dev/null
@@ -1,495 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.concurrent.ExecutionException;
-
-import org.I0Itec.zkclient.ZkClient;
-import org.I0Itec.zkclient.exception.ZkNoNodeException;
-import org.apache.kafka.clients.admin.AdminClient;
-import org.apache.kafka.clients.admin.AdminClientConfig;
-import org.apache.kafka.clients.admin.CreateTopicsResult;
-import org.apache.kafka.clients.admin.NewTopic;
-import org.apache.kafka.common.KafkaFuture;
-import org.json.JSONObject;
-import org.json.JSONArray;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Component;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.metabroker.Broker;
-import com.att.dmf.mr.metabroker.Broker1;
-import com.att.dmf.mr.metabroker.Topic;
-import com.att.dmf.mr.utils.ConfigurationReader;
-import com.att.dmf.mr.utils.Utils;
-//import org.apache.log4-j.Logger;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-import com.att.nsa.configs.ConfigDb;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.configs.ConfigPath;
-import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.security.NsaAcl;
-import com.att.nsa.security.NsaAclUtils;
-import com.att.nsa.security.NsaApiKey;
-
-
-/**
- * class performing all topic operations
- *
- * @author anowarul.islam
- *
- */
-//@Component
-public class DMaaPKafkaMetaBroker implements Broker1 {
-
- public DMaaPKafkaMetaBroker() {
- fZk = null;
- fCambriaConfig = null;
- fBaseTopicData = null;
- final Properties props = new Properties ();
- String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "kafka.metadata.broker.list");
- if (null == fkafkaBrokers) {
-
- fkafkaBrokers = "localhost:9092";
- }
-
- props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers );
- if(Utils.isCadiEnabled()){
- props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
- props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
- props.put("sasl.mechanism", "PLAIN");
- }
-
- fKafkaAdminClient=AdminClient.create ( props );
-
- }
-
- //private static final Logger log = Logger.getLogger(DMaaPKafkaMetaBroker.class);
- private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
- private final AdminClient fKafkaAdminClient;
-
-
-
- /**
- * DMaaPKafkaMetaBroker constructor initializing
- *
- * @param settings
- * @param zk
- * @param configDb
- */
- public DMaaPKafkaMetaBroker(@Qualifier("propertyReader") rrNvReadable settings,
- @Qualifier("dMaaPZkClient") ZkClient zk, @Qualifier("dMaaPZkConfigDb") ConfigDb configDb) {
- //fSettings = settings;
- fZk = zk;
- fCambriaConfig = configDb;
- fBaseTopicData = configDb.parse("/topics");
- final Properties props = new Properties ();
- String fkafkaBrokers = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "kafka.metadata.broker.list");
- if (null == fkafkaBrokers) {
-
- fkafkaBrokers = "localhost:9092";
- }
-
- if(Utils.isCadiEnabled()){
- props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='"+Utils.getKafkaproperty()+"';");
- props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
- props.put("sasl.mechanism", "PLAIN");
- }
- props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, fkafkaBrokers );
-
- fKafkaAdminClient=AdminClient.create ( props );
-
-
-
- }
-
- public DMaaPKafkaMetaBroker( rrNvReadable settings,
- ZkClient zk, ConfigDb configDb,AdminClient client) {
-
- fZk = zk;
- fCambriaConfig = configDb;
- fBaseTopicData = configDb.parse("/topics");
- fKafkaAdminClient= client;
-
-
-
- }
-
- @Override
- public List<Topic> getAllTopics() throws ConfigDbException {
- log.info("Retrieving list of all the topics.");
- final LinkedList<Topic> result = new LinkedList<Topic>();
- try {
- log.info("Retrieving all topics from root: " + zkTopicsRoot);
- final List<String> topics = fZk.getChildren(zkTopicsRoot);
- for (String topic : topics) {
- result.add(new KafkaTopic(topic, fCambriaConfig, fBaseTopicData));
- }
- JSONObject dataObj = new JSONObject();
- dataObj.put("topics", new JSONObject());
-
- for (String topic : topics) {
- dataObj.getJSONObject("topics").put(topic, new JSONObject());
- }
- } catch (ZkNoNodeException excp) {
- // very fresh kafka doesn't have any topics or a topics node
- log.error("ZK doesn't have a Kakfa topics node at " + zkTopicsRoot, excp);
- }
- return result;
- }
-
- @Override
- public Topic getTopic(String topic) throws ConfigDbException {
- if (fZk.exists(zkTopicsRoot + "/" + topic)) {
- return getKafkaTopicConfig(fCambriaConfig, fBaseTopicData, topic);
- }
- // else: no such topic in kafka
- return null;
- }
-
- /**
- * static method get KafkaTopic object
- *
- * @param db
- * @param base
- * @param topic
- * @return
- * @throws ConfigDbException
- */
- public static KafkaTopic getKafkaTopicConfig(ConfigDb db, ConfigPath base, String topic) throws ConfigDbException {
- return new KafkaTopic(topic, db, base);
- }
-
- /**
- * creating topic
- */
- @Override
- public Topic createTopic(String topic, String desc, String ownerApiKey, int partitions, int replicas,
- boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException {
- log.info("Creating topic: " + topic);
- try {
- log.info("Check if topic [" + topic + "] exist.");
- // first check for existence "our way"
- final Topic t = getTopic(topic);
- if (t != null) {
- log.info("Could not create topic [" + topic + "]. Topic Already exists.");
- throw new TopicExistsException("Could not create topic [" + topic + "]. Topic Alreay exists.");
- }
- } catch (ConfigDbException e1) {
- log.error("Topic [" + topic + "] could not be created. Couldn't check topic data in config db.", e1);
- throw new CambriaApiException(HttpStatusCodes.k503_serviceUnavailable,
- "Couldn't check topic data in config db.");
- }
-
- // we only allow 3 replicas. (If we don't test this, we get weird
- // results from the cluster,
- // so explicit test and fail.)
- if (replicas < 1 || replicas > 3) {
- log.info("Topic [" + topic + "] could not be created. The replica count must be between 1 and 3.");
- throw new CambriaApiException(HttpStatusCodes.k400_badRequest,
- "The replica count must be between 1 and 3.");
- }
- if (partitions < 1) {
- log.info("Topic [" + topic + "] could not be created. The partition count must be at least 1.");
- throw new CambriaApiException(HttpStatusCodes.k400_badRequest, "The partition count must be at least 1.");
- }
-
- // create via kafka
-
- try
- {
- final NewTopic topicRequest = new NewTopic ( topic, partitions, new Integer(replicas).shortValue () );
- final CreateTopicsResult ctr = fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) );
- final KafkaFuture<Void> ctrResult = ctr.all ();
- ctrResult.get ();
- // underlying Kafka topic created. now setup our API info
- return createTopicEntry ( topic, desc, ownerApiKey, transactionEnabled );
- }
- catch ( InterruptedException e )
- {
-
- log.warn ( "Execution of describeTopics timed out." );
- throw new ConfigDbException ( e );
- }
- catch ( ExecutionException e )
- {
-
- log.warn ( "Execution of describeTopics failed: " + e.getCause ().getMessage (), e.getCause () );
- throw new ConfigDbException ( e.getCause () );
- }
-
- }
-
- @Override
- public void deleteTopic(String topic) throws CambriaApiException, TopicExistsException,ConfigDbException {
- log.info("Deleting topic: " + topic);
- ZkClient zkClient = null;
- try {
- log.info("Loading zookeeper client for topic deletion.");
- // topic creation. (Otherwise, the topic is only partially created
- // in ZK.)
-
-
- fKafkaAdminClient.deleteTopics(Arrays.asList(topic));
- log.info("Zookeeper client loaded successfully. Deleting topic.");
-
- } catch (Exception e) {
- log.error("Failed to delete topic [" + topic + "]. " + e.getMessage(), e);
- throw new ConfigDbException(e);
- } finally {
- log.info("Closing zookeeper connection.");
- if (zkClient != null)
- zkClient.close();
- }
-
- // throw new UnsupportedOperationException ( "We can't programmatically
- // delete Kafka topics yet." );
- }
-
- //private final rrNvReadable fSettings;
- private final ZkClient fZk;
- private final ConfigDb fCambriaConfig;
- private final ConfigPath fBaseTopicData;
-
- private static final String zkTopicsRoot = "/brokers/topics";
- private static final JSONObject kEmptyAcl = new JSONObject();
-
- /**
- * method Providing KafkaTopic Object associated with owner and
- * transactionenabled or not
- *
- * @param name
- * @param desc
- * @param owner
- * @param transactionEnabled
- * @return
- * @throws ConfigDbException
- */
- public KafkaTopic createTopicEntry(String name, String desc, String owner, boolean transactionEnabled)
- throws ConfigDbException {
- return createTopicEntry(fCambriaConfig, fBaseTopicData, name, desc, owner, transactionEnabled);
- }
-
- /**
- * static method giving kafka topic object
- *
- * @param db
- * @param basePath
- * @param name
- * @param desc
- * @param owner
- * @param transactionEnabled
- * @return
- * @throws ConfigDbException
- */
- public static KafkaTopic createTopicEntry(ConfigDb db, ConfigPath basePath, String name, String desc, String owner,
- boolean transactionEnabled) throws ConfigDbException {
- final JSONObject o = new JSONObject();
- o.put("owner", owner);
- o.put("description", desc);
- o.put("txenabled", transactionEnabled);
- db.store(basePath.getChild(name), o.toString());
- return new KafkaTopic(name, db, basePath);
- }
-
- /**
- * class performing all user opearation like user is eligible to read,
- * write. permitting a user to write and read,
- *
- * @author anowarul.islam
- *
- */
- public static class KafkaTopic implements Topic {
- /**
- * constructor initializes
- *
- * @param name
- * @param configdb
- * @param baseTopic
- * @throws ConfigDbException
- */
- public KafkaTopic(String name, ConfigDb configdb, ConfigPath baseTopic) throws ConfigDbException {
- fName = name;
- fConfigDb = configdb;
- fBaseTopicData = baseTopic;
-
- String data = fConfigDb.load(fBaseTopicData.getChild(fName));
- if (data == null) {
- data = "{}";
- }
-
- final JSONObject o = new JSONObject(data);
- fOwner = o.optString("owner", "");
- fDesc = o.optString("description", "");
- fTransactionEnabled = o.optBoolean("txenabled", false);// default
- // value is
- // false
- // if this topic has an owner, it needs both read/write ACLs. If there's no
- // owner (or it's empty), null is okay -- this is for existing or implicitly
- // created topics.
- JSONObject readers = o.optJSONObject ( "readers" );
- if ( readers == null && fOwner.length () > 0 ) readers = kEmptyAcl;
- fReaders = fromJson ( readers );
-
- JSONObject writers = o.optJSONObject ( "writers" );
- if ( writers == null && fOwner.length () > 0 ) writers = kEmptyAcl;
- fWriters = fromJson ( writers );
- }
-
- private NsaAcl fromJson(JSONObject o) {
- NsaAcl acl = new NsaAcl();
- if (o != null) {
- JSONArray a = o.optJSONArray("allowed");
- if (a != null) {
- for (int i = 0; i < a.length(); ++i) {
- String user = a.getString(i);
- acl.add(user);
- }
- }
- }
- return acl;
- }
-
- @Override
- public String getName() {
- return fName;
- }
-
- @Override
- public String getOwner() {
- return fOwner;
- }
-
- @Override
- public String getDescription() {
- return fDesc;
- }
-
- @Override
- public NsaAcl getReaderAcl() {
- return fReaders;
- }
-
- @Override
- public NsaAcl getWriterAcl() {
- return fWriters;
- }
-
- @Override
- public void checkUserRead(NsaApiKey user) throws AccessDeniedException {
- NsaAclUtils.checkUserAccess ( fOwner, getReaderAcl(), user );
- }
-
- @Override
- public void checkUserWrite(NsaApiKey user) throws AccessDeniedException {
- NsaAclUtils.checkUserAccess ( fOwner, getWriterAcl(), user );
- }
-
- @Override
- public void permitWritesFromUser(String pubId, NsaApiKey asUser)
- throws ConfigDbException, AccessDeniedException {
- updateAcl(asUser, false, true, pubId);
- }
-
- @Override
- public void denyWritesFromUser(String pubId, NsaApiKey asUser) throws ConfigDbException, AccessDeniedException {
- updateAcl(asUser, false, false, pubId);
- }
-
- @Override
- public void permitReadsByUser(String consumerId, NsaApiKey asUser)
- throws ConfigDbException, AccessDeniedException {
- updateAcl(asUser, true, true, consumerId);
- }
-
- @Override
- public void denyReadsByUser(String consumerId, NsaApiKey asUser)
- throws ConfigDbException, AccessDeniedException {
- updateAcl(asUser, true, false, consumerId);
- }
-
- private void updateAcl(NsaApiKey asUser, boolean reader, boolean add, String key)
- throws ConfigDbException, AccessDeniedException{
- try
- {
- final NsaAcl acl = NsaAclUtils.updateAcl ( this, asUser, key, reader, add );
-
- // we have to assume we have current data, or load it again. for the expected use
- // case, assuming we can overwrite the data is fine.
- final JSONObject o = new JSONObject ();
- o.put ( "owner", fOwner );
- o.put ( "readers", safeSerialize ( reader ? acl : fReaders ) );
- o.put ( "writers", safeSerialize ( reader ? fWriters : acl ) );
- fConfigDb.store ( fBaseTopicData.getChild ( fName ), o.toString () );
-
- log.info ( "ACL_UPDATE: " + asUser.getKey () + " " + ( add ? "added" : "removed" ) + ( reader?"subscriber":"publisher" ) + " " + key + " on " + fName );
-
- }
- catch ( ConfigDbException x )
- {
- throw x;
- }
- catch ( AccessDeniedException x )
- {
- throw x;
- }
-
- }
-
- private JSONObject safeSerialize(NsaAcl acl) {
- return acl == null ? null : acl.serialize();
- }
-
- private final String fName;
- private final ConfigDb fConfigDb;
- private final ConfigPath fBaseTopicData;
- private final String fOwner;
- private final String fDesc;
- private final NsaAcl fReaders;
- private final NsaAcl fWriters;
- private boolean fTransactionEnabled;
-
- public boolean isTransactionEnabled() {
- return fTransactionEnabled;
- }
-
- @Override
- public Set<String> getOwners() {
- final TreeSet<String> owners = new TreeSet<String> ();
- owners.add ( fOwner );
- return owners;
- }
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java b/src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java
deleted file mode 100644
index 4c9532b..0000000
--- a/src/main/java/com/att/dmf/mr/beans/DMaaPMetricsSet.java
+++ /dev/null
@@ -1,231 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-import com.att.dmf.mr.CambriaApiVersionInfo;
-import com.att.dmf.mr.backends.MetricsSet;
-import com.att.mr.apiServer.metrics.cambria.DMaaPMetricsSender;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.metrics.impl.CdmConstant;
-import com.att.nsa.metrics.impl.CdmCounter;
-import com.att.nsa.metrics.impl.CdmMetricsRegistryImpl;
-import com.att.nsa.metrics.impl.CdmMovingAverage;
-import com.att.nsa.metrics.impl.CdmRateTicker;
-import com.att.nsa.metrics.impl.CdmSimpleMetric;
-import com.att.nsa.metrics.impl.CdmStringConstant;
-import com.att.nsa.metrics.impl.CdmTimeSince;
-
-/*@Component("dMaaPMetricsSet")*/
-/**
- * Metrics related information
- *
- * @author anowarul.islam
- *
- */
-public class DMaaPMetricsSet extends CdmMetricsRegistryImpl implements MetricsSet {
-
- private final CdmStringConstant fVersion;
- private final CdmConstant fStartTime;
- private final CdmTimeSince fUpTime;
-
- private final CdmCounter fRecvTotal;
- private final CdmRateTicker fRecvEpsInstant;
- private final CdmRateTicker fRecvEpsShort;
- private final CdmRateTicker fRecvEpsLong;
-
- private final CdmCounter fSendTotal;
- private final CdmRateTicker fSendEpsInstant;
- private final CdmRateTicker fSendEpsShort;
- private final CdmRateTicker fSendEpsLong;
-
- private final CdmCounter fKafkaConsumerCacheMiss;
- private final CdmCounter fKafkaConsumerCacheHit;
-
- private final CdmCounter fKafkaConsumerClaimed;
- private final CdmCounter fKafkaConsumerTimeout;
-
- private final CdmSimpleMetric fFanOutRatio;
-
- private final HashMap<String, CdmRateTicker> fPathUseRates;
- private final HashMap<String, CdmMovingAverage> fPathAvgs;
-
- private rrNvReadable fSettings;
-
- private final ScheduledExecutorService fScheduler;
-
- /**
- * Constructor initialization
- *
- * @param cs
- */
-
- public DMaaPMetricsSet(rrNvReadable cs) {
-
- fVersion = new CdmStringConstant("Version " + CambriaApiVersionInfo.getVersion());
- super.putItem("version", fVersion);
-
- final long startTime = System.currentTimeMillis();
- final Date d = new Date(startTime);
- final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d);
- fStartTime = new CdmConstant(startTime / 1000, "Start Time (epoch); " + text);
- super.putItem("startTime", fStartTime);
-
- fUpTime = new CdmTimeSince("seconds since start");
- super.putItem("upTime", fUpTime);
-
- fRecvTotal = new CdmCounter("Total events received since start");
- super.putItem("recvTotalEvents", fRecvTotal);
-
- fRecvEpsInstant = new CdmRateTicker("recv eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES);
- super.putItem("recvEpsInstant", fRecvEpsInstant);
-
- fRecvEpsShort = new CdmRateTicker("recv eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES);
- super.putItem("recvEpsShort", fRecvEpsShort);
-
- fRecvEpsLong = new CdmRateTicker("recv eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS);
- super.putItem("recvEpsLong", fRecvEpsLong);
-
- fSendTotal = new CdmCounter("Total events sent since start");
- super.putItem("sendTotalEvents", fSendTotal);
-
- fSendEpsInstant = new CdmRateTicker("send eps (1 min)", 1, TimeUnit.SECONDS, 1, TimeUnit.MINUTES);
- super.putItem("sendEpsInstant", fSendEpsInstant);
-
- fSendEpsShort = new CdmRateTicker("send eps (10 mins)", 1, TimeUnit.SECONDS, 10, TimeUnit.MINUTES);
- super.putItem("sendEpsShort", fSendEpsShort);
-
- fSendEpsLong = new CdmRateTicker("send eps (1 hr)", 1, TimeUnit.SECONDS, 1, TimeUnit.HOURS);
- super.putItem("sendEpsLong", fSendEpsLong);
-
- fKafkaConsumerCacheMiss = new CdmCounter("Kafka Consumer Cache Misses");
- super.putItem("kafkaConsumerCacheMiss", fKafkaConsumerCacheMiss);
-
- fKafkaConsumerCacheHit = new CdmCounter("Kafka Consumer Cache Hits");
- super.putItem("kafkaConsumerCacheHit", fKafkaConsumerCacheHit);
-
- fKafkaConsumerClaimed = new CdmCounter("Kafka Consumers Claimed");
- super.putItem("kafkaConsumerClaims", fKafkaConsumerClaimed);
-
- fKafkaConsumerTimeout = new CdmCounter("Kafka Consumers Timedout");
- super.putItem("kafkaConsumerTimeouts", fKafkaConsumerTimeout);
-
- // FIXME: CdmLevel is not exactly a great choice
- fFanOutRatio = new CdmSimpleMetric() {
- @Override
- public String getRawValueString() {
- return getRawValue().toString();
- }
-
- @Override
- public Number getRawValue() {
- final double s = fSendTotal.getValue();
- final double r = fRecvTotal.getValue();
- return r == 0.0 ? 0.0 : s / r;
- }
-
- @Override
- public String summarize() {
- return getRawValueString() + " sends per recv";
- }
-
- };
- super.putItem("fanOut", fFanOutRatio);
-
- // these are added to the metrics catalog as they're discovered
- fPathUseRates = new HashMap<String, CdmRateTicker>();
- fPathAvgs = new HashMap<String, CdmMovingAverage>();
-
- fScheduler = Executors.newScheduledThreadPool(1);
- }
-
- @Override
- public void setupCambriaSender() {
- DMaaPMetricsSender.sendPeriodically(fScheduler, this, "cambria.apinode.metrics.dmaap");
- }
-
- @Override
- public void onRouteComplete(String name, long durationMs) {
- CdmRateTicker ticker = fPathUseRates.get(name);
- if (ticker == null) {
- ticker = new CdmRateTicker("calls/min on path " + name + "", 1, TimeUnit.MINUTES, 1, TimeUnit.HOURS);
- fPathUseRates.put(name, ticker);
- super.putItem("pathUse_" + name, ticker);
- }
- ticker.tick();
-
- CdmMovingAverage durs = fPathAvgs.get(name);
- if (durs == null) {
- durs = new CdmMovingAverage("ms avg duration on path " + name + ", last 10 minutes", 10, TimeUnit.MINUTES);
- fPathAvgs.put(name, durs);
- super.putItem("pathDurationMs_" + name, durs);
- }
- durs.tick(durationMs);
- }
-
- @Override
- public void publishTick(int amount) {
- if (amount > 0) {
- fRecvTotal.bumpBy(amount);
- fRecvEpsInstant.tick(amount);
- fRecvEpsShort.tick(amount);
- fRecvEpsLong.tick(amount);
- }
- }
-
- @Override
- public void consumeTick(int amount) {
- if (amount > 0) {
- fSendTotal.bumpBy(amount);
- fSendEpsInstant.tick(amount);
- fSendEpsShort.tick(amount);
- fSendEpsLong.tick(amount);
- }
- }
-
- @Override
- public void onKafkaConsumerCacheMiss() {
- fKafkaConsumerCacheMiss.bump();
- }
-
- @Override
- public void onKafkaConsumerCacheHit() {
- fKafkaConsumerCacheHit.bump();
- }
-
- @Override
- public void onKafkaConsumerClaimed() {
- fKafkaConsumerClaimed.bump();
- }
-
- @Override
- public void onKafkaConsumerTimeout() {
- fKafkaConsumerTimeout.bump();
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java b/src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java
deleted file mode 100644
index 963ff2d..0000000
--- a/src/main/java/com/att/dmf/mr/beans/DMaaPNsaApiDb.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import java.security.Key;
-
-
-import org.springframework.beans.factory.annotation.Autowired;
-
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDb;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.configs.confimpl.EncryptingLayer;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.db.BaseNsaApiDbImpl;
-import com.att.nsa.security.db.EncryptingApiDbImpl;
-import com.att.nsa.security.db.NsaApiDb;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
-import com.att.nsa.util.rrConvertor;
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public class DMaaPNsaApiDb {
-
-
- private DMaaPZkConfigDb cdb;
-
- //private static final Logger log = Logger
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPNsaApiDb.class);
-
-/**
- *
- * Constructor initialized
- * @param settings
- * @param cdb
- */
- @Autowired
- public DMaaPNsaApiDb(rrNvReadable settings, DMaaPZkConfigDb cdb) {
-
- this.setCdb(cdb);
- }
- /**
- *
- * @param settings
- * @param cdb
- * @return
- * @throws ConfigDbException
- * @throws missingReqdSetting
- */
- public static NsaApiDb<NsaSimpleApiKey> buildApiKeyDb(
- rrNvReadable settings, ConfigDb cdb) throws ConfigDbException,
- missingReqdSetting {
- // Cambria uses an encrypted api key db
-
-
- final String keyBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.key");
-
-
-
- final String initVectorBase64 =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"cambria.secureConfig.iv");
- // if neither value was provided, don't encrypt api key db
- if (keyBase64 == null && initVectorBase64 == null) {
- log.info("This server is configured to use an unencrypted API key database. See the settings documentation.");
- return new BaseNsaApiDbImpl<>(cdb,
- new NsaSimpleApiKeyFactory());
- } else if (keyBase64 == null) {
- // neither or both, otherwise something's goofed
- throw new missingReqdSetting("cambria.secureConfig.key");
- } else if (initVectorBase64 == null) {
- // neither or both, otherwise something's goofed
- throw new missingReqdSetting("cambria.secureConfig.iv");
- } else {
- log.info("This server is configured to use an encrypted API key database.");
- final Key key = EncryptingLayer.readSecretKey(keyBase64);
- final byte[] iv = rrConvertor.base64Decode(initVectorBase64);
- return new EncryptingApiDbImpl<>(cdb,
- new NsaSimpleApiKeyFactory(), key, iv);
- }
- }
-
- /**
- * @return
- * returns settings
- */
-
-
-
-
- /**
- * @param settings
- * set settings
- */
-
-
-
-
- /**
- * @return
- * returns cbd
- */
- public DMaaPZkConfigDb getCdb() {
- return cdb;
- }
- /**
- * @param cdb
- * set cdb
- */
- public void setCdb(DMaaPZkConfigDb cdb) {
- this.cdb = cdb;
- }
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java b/src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java
deleted file mode 100644
index 78a7426..0000000
--- a/src/main/java/com/att/dmf/mr/beans/DMaaPZkClient.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import org.I0Itec.zkclient.ZkClient;
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.dmf.mr.utils.ConfigurationReader;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-
-/**
- * Created for Zookeeper client which will read configuration and settings parameter
- * @author nilanjana.maity
- *
- */
-public class DMaaPZkClient extends ZkClient {
-
- /**
- * This constructor will get the settings value from rrNvReadable
- * and ConfigurationReader's zookeeper connection
- * @param settings
- */
- public DMaaPZkClient(@Qualifier("propertyReader") rrNvReadable settings) {
- super(ConfigurationReader.getMainZookeeperConnectionString());
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java b/src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java
deleted file mode 100644
index 5aa25fa..0000000
--- a/src/main/java/com/att/dmf/mr/beans/DMaaPZkConfigDb.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import org.springframework.beans.factory.annotation.Qualifier;
-
-import com.att.dmf.mr.utils.ConfigurationReader;
-import com.att.nsa.configs.confimpl.ZkConfigDb;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-
-/**
- * Provide the zookeeper config db connection
- * @author nilanjana.maity
- *
- */
-public class DMaaPZkConfigDb extends ZkConfigDb {
- /**
- * This Constructor will provide the configuration details from the property reader
- * and DMaaPZkClient
- * @param zk
- * @param settings
- */
- public DMaaPZkConfigDb(@Qualifier("dMaaPZkClient") DMaaPZkClient zk,
- @Qualifier("propertyReader") rrNvReadable settings) {
-
-
- super(ConfigurationReader.getMainZookeeperConnectionString(),ConfigurationReader.getMainZookeeperConnectionSRoot());
-
- }
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/beans/LogDetails.java b/src/main/java/com/att/dmf/mr/beans/LogDetails.java
deleted file mode 100644
index b7fb325..0000000
--- a/src/main/java/com/att/dmf/mr/beans/LogDetails.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/**
- *
- */
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import java.util.Date;
-
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.utils.Utils;
-
-/**
- * @author muzainulhaque.qazi
- *
- */
-
-public class LogDetails {
-
- private String publisherId;
- private String topicId;
- private String subscriberGroupId;
- private String subscriberId;
- private String publisherIp;
- private String messageBatchId;
- private String messageSequence;
- private String messageTimestamp;
- private String consumeTimestamp;
- private String transactionIdTs;
- private String serverIp;
-
- private long messageLengthInBytes;
- private long totalMessageCount;
-
- private boolean transactionEnabled;
- /**
- * This is for transaction enabled logging details
- *
- */
- public LogDetails() {
- super();
- }
-
- public String getTransactionId() {
- StringBuilder transactionId = new StringBuilder();
- transactionId.append(transactionIdTs);
- transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
- transactionId.append(publisherIp);
- transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
- transactionId.append(messageBatchId);
- transactionId.append(CambriaConstants.TRANSACTION_ID_SEPARATOR);
- transactionId.append(messageSequence);
-
- return transactionId.toString();
- }
-
- public String getPublisherId() {
- return publisherId;
- }
-
- public void setPublisherId(String publisherId) {
- this.publisherId = publisherId;
- }
-
- public String getTopicId() {
- return topicId;
- }
-
- public void setTopicId(String topicId) {
- this.topicId = topicId;
- }
-
- public String getSubscriberGroupId() {
- return subscriberGroupId;
- }
-
- public void setSubscriberGroupId(String subscriberGroupId) {
- this.subscriberGroupId = subscriberGroupId;
- }
-
- public String getSubscriberId() {
- return subscriberId;
- }
-
- public void setSubscriberId(String subscriberId) {
- this.subscriberId = subscriberId;
- }
-
- public String getPublisherIp() {
- return publisherIp;
- }
-
- public void setPublisherIp(String publisherIp) {
- this.publisherIp = publisherIp;
- }
-
- public String getMessageBatchId() {
- return messageBatchId;
- }
-
- public void setMessageBatchId(Long messageBatchId) {
- this.messageBatchId = Utils.getFromattedBatchSequenceId(messageBatchId);
- }
-
- public String getMessageSequence() {
- return messageSequence;
- }
-
- public void setMessageSequence(String messageSequence) {
- this.messageSequence = messageSequence;
- }
-
- public String getMessageTimestamp() {
- return messageTimestamp;
- }
-
- public void setMessageTimestamp(String messageTimestamp) {
- this.messageTimestamp = messageTimestamp;
- }
-
- public String getPublishTimestamp() {
- return Utils.getFormattedDate(new Date());
- }
-
- public String getConsumeTimestamp() {
- return consumeTimestamp;
- }
-
- public void setConsumeTimestamp(String consumeTimestamp) {
- this.consumeTimestamp = consumeTimestamp;
- }
-
- public long getMessageLengthInBytes() {
- return messageLengthInBytes;
- }
-
- public void setMessageLengthInBytes(long messageLengthInBytes) {
- this.messageLengthInBytes = messageLengthInBytes;
- }
-
- public long getTotalMessageCount() {
- return totalMessageCount;
- }
-
- public void setTotalMessageCount(long totalMessageCount) {
- this.totalMessageCount = totalMessageCount;
- }
-
- public boolean isTransactionEnabled() {
- return transactionEnabled;
- }
-
- public void setTransactionEnabled(boolean transactionEnabled) {
- this.transactionEnabled = transactionEnabled;
- }
-
- public String getTransactionIdTs() {
- return transactionIdTs;
- }
-
- public void setTransactionIdTs(String transactionIdTs) {
- this.transactionIdTs = transactionIdTs;
- }
-
- public String getPublisherLogDetails() {
-
- StringBuilder buffer = new StringBuilder();
- buffer.append("[publisherId=" + publisherId);
- buffer.append(", topicId=" + topicId);
- buffer.append(", messageTimestamp=" + messageTimestamp);
- buffer.append(", publisherIp=" + publisherIp);
- buffer.append(", messageBatchId=" + messageBatchId);
- buffer.append(", messageSequence=" + messageSequence );
- buffer.append(", messageLengthInBytes=" + messageLengthInBytes);
- buffer.append(", transactionEnabled=" + transactionEnabled);
- buffer.append(", transactionId=" + getTransactionId());
- buffer.append(", publishTimestamp=" + getPublishTimestamp());
- buffer.append(", serverIp=" + getServerIp()+"]");
- return buffer.toString();
-
- }
-
- public String getServerIp() {
- return serverIp;
- }
-
- public void setServerIp(String serverIp) {
- this.serverIp = serverIp;
- }
-
- public void setMessageBatchId(String messageBatchId) {
- this.messageBatchId = messageBatchId;
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/beans/TopicBean.java b/src/main/java/com/att/dmf/mr/beans/TopicBean.java
deleted file mode 100644
index a397921..0000000
--- a/src/main/java/com/att/dmf/mr/beans/TopicBean.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/**
- *
- */
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.beans;
-
-import java.io.Serializable;
-
-import javax.xml.bind.annotation.XmlRootElement;
-
-/**
- * @author muzainulhaque.qazi
- *
- */
-@XmlRootElement
-public class TopicBean implements Serializable {
-
- private static final long serialVersionUID = -8620390377775457949L;
- private String topicName;
- private String topicDescription;
-
- private int partitionCount;
- private int replicationCount;
-
- private boolean transactionEnabled;
-
- /**
- * constructor
- */
- public TopicBean() {
- super();
- }
-
- /**
- * constructor initialization with topic details name, description,
- * partition, replication, transaction
- *
- * @param topicName
- * @param description
- * @param partitionCount
- * @param replicationCount
- * @param transactionEnabled
- */
- public TopicBean(String topicName, String topicDescription, int partitionCount, int replicationCount,
- boolean transactionEnabled) {
- super();
- this.topicName = topicName;
- this.topicDescription = topicDescription;
- this.partitionCount = partitionCount;
- this.replicationCount = replicationCount;
- this.transactionEnabled = transactionEnabled;
- }
-
- /**
- * @return
- * returns topic name which is of String type
- */
- public String getTopicName() {
- return topicName;
- }
-
- /**
- * @param topicName
- * set topic name
- */
- public void setTopicName(String topicName) {
- this.topicName = topicName;
- }
-
-
- /**
- * @return
- * returns partition count which is of int type
- */
- public int getPartitionCount() {
- return partitionCount;
- }
-
- /**
- * @param partitionCount
- * set partition Count
- */
- public void setPartitionCount(int partitionCount) {
- this.partitionCount = partitionCount;
- }
-
- /**
- * @return
- * returns replication count which is of int type
- */
- public int getReplicationCount() {
- return replicationCount;
- }
-
- /**
- * @param
- * set replication count which is of int type
- */
- public void setReplicationCount(int replicationCount) {
- this.replicationCount = replicationCount;
- }
-
- /**
- * @return
- * returns boolean value which indicates whether transaction is Enabled
- */
- public boolean isTransactionEnabled() {
- return transactionEnabled;
- }
-
- /**
- * @param
- * sets boolean value which indicates whether transaction is Enabled
- */
- public void setTransactionEnabled(boolean transactionEnabled) {
- this.transactionEnabled = transactionEnabled;
- }
-
- /**
- *
- * @return returns description which is of String type
- */
- public String getTopicDescription() {
- return topicDescription;
- }
- /**
- *
- * @param topicDescription
- * set description which is of String type
- */
- public void setTopicDescription(String topicDescription) {
- this.topicDescription = topicDescription;
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/constants/CambriaConstants.java b/src/main/java/com/att/dmf/mr/constants/CambriaConstants.java
deleted file mode 100644
index cb6653c..0000000
--- a/src/main/java/com/att/dmf/mr/constants/CambriaConstants.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.constants;
-
-import com.att.dmf.mr.utils.Utils;
-
-/**
- * This is the constant files for all the property or parameters.
- * @author nilanjana.maity
- *
- */
-public interface CambriaConstants {
-
- String CAMBRIA = "Cambria";
- String DMAAP = "DMaaP";
-
- String kDefault_ZkRoot = "/fe3c/cambria";
-
- String kSetting_ZkConfigDbRoot = "config.zk.root";
- String kDefault_ZkConfigDbRoot = kDefault_ZkRoot + "/config";
-String msgRtr_prop="MsgRtrApi.properties";
- String kBrokerType = "broker.type";
-
- /**
- * value to use to signal kafka broker type.
- */
- String kBrokerType_Kafka = "kafka";
- String kBrokerType_Memory = "memory";
- String kSetting_AdminSecret = "authentication.adminSecret";
-
- String kSetting_ApiNodeIdentifier = "cambria.api.node.identifier";
-
- /**
- * value to use to signal max empty poll per minute
- */
- String kSetting_MaxEmptyPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
- String kSetting_MaxPollsPerMinute = "cambria.rateLimit.maxEmptyPollsPerMinute";
- double kDefault_MaxEmptyPollsPerMinute = 10.0;
-
- String kSetting_SleepMsOnRateLimit = "cambria.rateLimit.delay.ms";
- String kSetting_SleepMsRealOnRateLimit = "cambria.rateLimitActual.delay.ms";
- long kDefault_SleepMsOnRateLimit = Utils.getSleepMsForRate ( kDefault_MaxEmptyPollsPerMinute );
-
- String kSetting_RateLimitWindowLength = "cambria.rateLimit.window.minutes";
- int kDefault_RateLimitWindowLength = 5;
-
- String kConfig = "c";
-
- String kSetting_Port = "cambria.service.port";
- /**
- * value to use to signal default port
- */
- int kDefault_Port = 3904;
-
- String kSetting_MaxThreads = "tomcat.maxthreads";
- int kDefault_MaxThreads = -1;
-
-
-
- //String kDefault_TomcatProtocolClass = Http11NioProtocol.class.getName ();
-
- String kSetting_ZkConfigDbServers = "config.zk.servers";
-
- /**
- * value to indicate localhost port number
- */
- String kDefault_ZkConfigDbServers = "localhost:2181";
-
- /**
- * value to use to signal Session time out
- */
- String kSetting_ZkSessionTimeoutMs = "cambria.consumer.cache.zkSessionTimeout";
- int kDefault_ZkSessionTimeoutMs = 20 * 1000;
-
- /**
- * value to use to signal connection time out
- */
- String kSetting_ZkConnectionTimeoutMs = "cambria.consumer.cache.zkConnectionTimeout";
- int kDefault_ZkConnectionTimeoutMs = 5 * 1000;
-
- String TRANSACTION_ID_SEPARATOR = "::";
-
- /**
- * value to use to signal there's no timeout on the consumer request.
- */
- public static final int kNoTimeout = 10000;
-
- /**
- * value to use to signal no limit in the number of messages returned.
- */
- public static final int kNoLimit = 0;
-
- /**
- * value to use to signal that the caller wants the next set of events
- */
- public static final int kNextOffset = -1;
-
- /**
- * value to use to signal there's no filter on the response stream.
- */
- public static final String kNoFilter = "";
-
- //Added for Metric publish
- public static final int kStdCambriaServicePort = 3904;
- public static final String kBasePath = "/events/";
-
-}
diff --git a/src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java b/src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java
deleted file mode 100644
index de66617..0000000
--- a/src/main/java/com/att/dmf/mr/exception/DMaaPAccessDeniedException.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.exception;
-
-import com.att.dmf.mr.CambriaApiException;
-
-public class DMaaPAccessDeniedException extends CambriaApiException{
-
-
-
- public DMaaPAccessDeniedException(ErrorResponse errRes) {
- super(errRes);
-
- }
-
- /**
- *
- */
- private static final long serialVersionUID = 1L;
-
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java b/src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java
deleted file mode 100644
index 304c15b..0000000
--- a/src/main/java/com/att/dmf/mr/exception/DMaaPCambriaExceptionMapper.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.exception;
-
-import javax.inject.Singleton;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
-
-import org.apache.http.HttpStatus;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * Exception Mapper class to handle
- * CambriaApiException
- * @author rajashree.khare
- *
- */
-@Provider
-@Singleton
-public class DMaaPCambriaExceptionMapper implements ExceptionMapper<CambriaApiException>{
-
-private ErrorResponse errRes;
-
-
-private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPCambriaExceptionMapper.class);
-
- @Autowired
- private DMaaPErrorMessages msgs;
-
- public DMaaPCambriaExceptionMapper() {
- super();
- LOGGER.info("Cambria Exception Mapper Created..");
- }
-
- @Override
- public Response toResponse(CambriaApiException ex) {
-
- LOGGER.info("Reached Cambria Exception Mapper..");
-
- /**
- * Cambria Generic Exception
- */
- if(ex instanceof CambriaApiException)
- {
-
- errRes = ex.getErrRes();
- if(errRes!=null) {
-
- return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
- .build();
- }
- else
- {
- return Response.status(ex.getStatus()).entity(ex.getMessage()).type(MediaType.APPLICATION_JSON)
- .build();
- }
-
-
- }
- else
- {
- errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED, DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), msgs.getServerUnav());
- return Response.status(HttpStatus.SC_EXPECTATION_FAILED).entity(errRes).type(MediaType.APPLICATION_JSON).build();
- }
-
- }
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java b/src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java
deleted file mode 100644
index 409aa60..0000000
--- a/src/main/java/com/att/dmf/mr/exception/DMaaPErrorMessages.java
+++ /dev/null
@@ -1,248 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.exception;
-
-import javax.annotation.PostConstruct;
-
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.stereotype.Component;
-import org.springframework.web.context.support.SpringBeanAutowiringSupport;
-
-/**
- * This Class reads the error message properties
- * from the properties file
- * @author rajashree.khare
- *
- */
-@Component
-public class DMaaPErrorMessages {
-
-
-
-
- //@Value("${resource.not.found}")
- private String notFound="The requested resource was not found.Please verify the URL and try again";
-
-// @Value("${server.unavailable}")
- private String serverUnav="Server is temporarily unavailable or busy.Try again later, or try another server in the cluster.";
-
-// @Value("${http.method.not.allowed}")
- private String methodNotAllowed="The specified HTTP method is not allowed for the requested resource.Enter a valid HTTP method and try again.";
-
- //@Value("${incorrect.request.json}")
- private String badRequest="Incorrect JSON object. Please correct the JSON format and try again.";
-
-// @Value("${network.time.out}")
- private String nwTimeout="Connection to the DMaaP MR was timed out.Please try again.";
-
- //@Value("${get.topic.failure}")
- private String topicsfailure="Failed to retrieve list of all topics.";
-
- //@Value("${not.permitted.access.1}")
- private String notPermitted1="Access Denied.User does not have permission to perform";
-
- //@Value("${not.permitted.access.2}")
- private String notPermitted2="operation on Topic:";
-
- //@Value("${get.topic.details.failure}")
- private String topicDetailsFail="Failed to retrieve details of topic:";
-
- //@Value("${create.topic.failure}")
- private String createTopicFail="Failed to create topic:";
-
- //@Value("${delete.topic.failure}")
- private String deleteTopicFail="Failed to delete topic:";
-
- //@Value("${incorrect.json}")
- private String incorrectJson="Incorrect JSON object.Could not parse JSON. Please correct the JSON format and try again.";
-
- //@Value("${consume.msg.error}")
- private String consumeMsgError="Error while reading data from topic.";
-
- //@Value("${publish.msg.error}")
- private String publishMsgError="Error while publishing data to topic.";
-
-
- //@Value("${publish.msg.count}")
- private String publishMsgCount="Successfully published number of messages :";
-
-
- //@Value("${authentication.failure}")
- private String authFailure="Access Denied: Invalid Credentials. Enter a valid MechId and Password and try again.";
- //@Value("${msg_size_exceeds}")
- private String msgSizeExceeds="Message size exceeds the default size.";
-
-
- //@Value("${topic.not.exist}")
- private String topicNotExist="No such topic exists.";
-
- public String getMsgSizeExceeds() {
- return msgSizeExceeds;
- }
-
- public void setMsgSizeExceeds(String msgSizeExceeds) {
- this.msgSizeExceeds = msgSizeExceeds;
- }
-
- public String getNotFound() {
- return notFound;
- }
-
- public void setNotFound(String notFound) {
- this.notFound = notFound;
- }
-
- public String getServerUnav() {
- return serverUnav;
- }
-
- public void setServerUnav(String serverUnav) {
- this.serverUnav = serverUnav;
- }
-
- public String getMethodNotAllowed() {
- return methodNotAllowed;
- }
-
- public void setMethodNotAllowed(String methodNotAllowed) {
- this.methodNotAllowed = methodNotAllowed;
- }
-
- public String getBadRequest() {
- return badRequest;
- }
-
- public void setBadRequest(String badRequest) {
- this.badRequest = badRequest;
- }
-
- public String getNwTimeout() {
- return nwTimeout;
- }
-
- public void setNwTimeout(String nwTimeout) {
- this.nwTimeout = nwTimeout;
- }
-
- public String getNotPermitted1() {
- return notPermitted1;
- }
-
- public void setNotPermitted1(String notPermitted1) {
- this.notPermitted1 = notPermitted1;
- }
-
- public String getNotPermitted2() {
- return notPermitted2;
- }
-
- public void setNotPermitted2(String notPermitted2) {
- this.notPermitted2 = notPermitted2;
- }
-
- public String getTopicsfailure() {
- return topicsfailure;
- }
-
- public void setTopicsfailure(String topicsfailure) {
- this.topicsfailure = topicsfailure;
- }
-
- public String getTopicDetailsFail() {
- return topicDetailsFail;
- }
-
- public void setTopicDetailsFail(String topicDetailsFail) {
- this.topicDetailsFail = topicDetailsFail;
- }
-
- public String getCreateTopicFail() {
- return createTopicFail;
- }
-
- public void setCreateTopicFail(String createTopicFail) {
- this.createTopicFail = createTopicFail;
- }
-
- public String getIncorrectJson() {
- return incorrectJson;
- }
-
- public void setIncorrectJson(String incorrectJson) {
- this.incorrectJson = incorrectJson;
- }
-
- public String getDeleteTopicFail() {
- return deleteTopicFail;
- }
-
- public void setDeleteTopicFail(String deleteTopicFail) {
- this.deleteTopicFail = deleteTopicFail;
- }
-
- public String getConsumeMsgError() {
- return consumeMsgError;
- }
-
- public void setConsumeMsgError(String consumeMsgError) {
- this.consumeMsgError = consumeMsgError;
- }
-
- public String getPublishMsgError() {
- return publishMsgError;
- }
-
- public void setPublishMsgError(String publishMsgError) {
- this.publishMsgError = publishMsgError;
- }
-
- public String getPublishMsgCount() {
- return publishMsgCount;
- }
-
- public String getAuthFailure() {
- return authFailure;
- }
-
- public void setAuthFailure(String authFailure) {
- this.authFailure = authFailure;
- }
-
- public void setPublishMsgCount(String publishMsgCount) {
- this.publishMsgCount = publishMsgCount;
- }
-
- public String getTopicNotExist() {
- return topicNotExist;
- }
-
- public void setTopicNotExist(String topicNotExist) {
- this.topicNotExist = topicNotExist;
- }
-
-
- @PostConstruct
- public void init() {
- SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(this);
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java b/src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java
deleted file mode 100644
index 593863a..0000000
--- a/src/main/java/com/att/dmf/mr/exception/DMaaPResponseCode.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.exception;
-
-/**
- * Define the Error Response Codes for MR
- * using this enumeration
- * @author rajashree.khare
- *
- */
-public enum DMaaPResponseCode {
-
-
- /**
- * GENERIC
- */
- RESOURCE_NOT_FOUND(3001),
- SERVER_UNAVAILABLE(3002),
- METHOD_NOT_ALLOWED(3003),
- GENERIC_INTERNAL_ERROR(1004),
- /**
- * AAF
- */
- INVALID_CREDENTIALS(4001),
- ACCESS_NOT_PERMITTED(4002),
- UNABLE_TO_AUTHORIZE(4003),
- /**
- * PUBLISH AND SUBSCRIBE
- */
- MSG_SIZE_EXCEEDS_BATCH_LIMIT(5001),
- UNABLE_TO_PUBLISH(5002),
- INCORRECT_BATCHING_FORMAT(5003),
- MSG_SIZE_EXCEEDS_MSG_LIMIT(5004),
- INCORRECT_JSON(5005),
- CONN_TIMEOUT(5006),
- PARTIAL_PUBLISH_MSGS(5007),
- CONSUME_MSG_ERROR(5008),
- PUBLISH_MSG_ERROR(5009),
- RETRIEVE_TRANSACTIONS(5010),
- RETRIEVE_TRANSACTIONS_DETAILS(5011),
- TOO_MANY_REQUESTS(5012),
-
- RATE_LIMIT_EXCEED(301),
-
- /**
- * TOPICS
- */
- GET_TOPICS_FAIL(6001),
- GET_TOPICS_DETAILS_FAIL(6002),
- CREATE_TOPIC_FAIL(6003),
- DELETE_TOPIC_FAIL(6004),
- GET_PUBLISHERS_BY_TOPIC(6005),
- GET_CONSUMERS_BY_TOPIC(6006),
- PERMIT_PUBLISHER_FOR_TOPIC(6007),
- REVOKE_PUBLISHER_FOR_TOPIC(6008),
- PERMIT_CONSUMER_FOR_TOPIC(6009),
- REVOKE_CONSUMER_FOR_TOPIC(6010),
- GET_CONSUMER_CACHE(6011),
- DROP_CONSUMER_CACHE(6012),
- GET_METRICS_ERROR(6013),
- GET_BLACKLIST(6014),
- ADD_BLACKLIST(6015),
- REMOVE_BLACKLIST(6016),
- TOPIC_NOT_IN_AAF(6017);
- private int responseCode;
-
- public int getResponseCode() {
- return responseCode;
- }
- private DMaaPResponseCode (final int code) {
- responseCode = code;
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java b/src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java
deleted file mode 100644
index db691bd..0000000
--- a/src/main/java/com/att/dmf/mr/exception/DMaaPWebExceptionMapper.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.exception;
-
-import javax.inject.Singleton;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.InternalServerErrorException;
-import javax.ws.rs.NotAllowedException;
-import javax.ws.rs.NotAuthorizedException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.ServiceUnavailableException;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
-
-import org.apache.http.HttpStatus;
-
-import org.springframework.beans.factory.annotation.Autowired;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * Exception Mapper class to handle
- * Jersey Exceptions
- * @author rajashree.khare
- *
- */
-@Provider
-@Singleton
-public class DMaaPWebExceptionMapper implements ExceptionMapper<WebApplicationException>{
-
-
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DMaaPWebExceptionMapper.class);
- private ErrorResponse errRes;
-
- @Autowired
- private DMaaPErrorMessages msgs;
-
- public DMaaPWebExceptionMapper() {
- super();
- LOGGER.info("WebException Mapper Created..");
- }
-
- @Override
- public Response toResponse(WebApplicationException ex) {
-
- LOGGER.info("Reached WebException Mapper");
-
- /**
- * Resource Not Found
- */
- if(ex instanceof NotFoundException)
- {
- errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),msgs.getNotFound());
-
- LOGGER.info(errRes.toString());
-
- return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
- .build();
-
- }
-
- if(ex instanceof InternalServerErrorException)
- {
- errRes = new ErrorResponse(HttpStatus.SC_INTERNAL_SERVER_ERROR,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav());
-
- LOGGER.info(errRes.toString());
- return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
- .build();
-
- }
-
- if(ex instanceof NotAuthorizedException)
- {
- errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),msgs.getAuthFailure());
-
- LOGGER.info(errRes.toString());
- return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
- .build();
- }
-
- if(ex instanceof BadRequestException)
- {
- errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,DMaaPResponseCode.INCORRECT_JSON.getResponseCode(),msgs.getBadRequest());
-
- LOGGER.info(errRes.toString());
- return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
- .build();
- }
- if(ex instanceof NotAllowedException)
- {
- errRes = new ErrorResponse(HttpStatus.SC_METHOD_NOT_ALLOWED,DMaaPResponseCode.METHOD_NOT_ALLOWED.getResponseCode(),msgs.getMethodNotAllowed());
-
- LOGGER.info(errRes.toString());
- return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
- .build();
- }
-
- if(ex instanceof ServiceUnavailableException)
- {
- errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),msgs.getServerUnav());
-
- LOGGER.info(errRes.toString());
- return Response.status(errRes.getHttpStatusCode()).entity(errRes).type(MediaType.APPLICATION_JSON)
- .build();
- }
-
-
- return Response.serverError().build();
- }
-
-
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/exception/ErrorResponse.java b/src/main/java/com/att/dmf/mr/exception/ErrorResponse.java
deleted file mode 100644
index c92cadd..0000000
--- a/src/main/java/com/att/dmf/mr/exception/ErrorResponse.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.exception;
-import org.json.JSONObject;
-/**
- * Represents the Error Response Object
- * that is rendered as a JSON object when
- * an exception or error occurs on MR Rest Service.
- * @author rajashree.khare
- *
- */
-//@XmlRootElement
-public class ErrorResponse {
-
- private int httpStatusCode;
- private int mrErrorCode;
- private String errorMessage;
- private String helpURL;
- private String statusTs;
- private String topic;
- private String publisherId;
- private String publisherIp;
- private String subscriberId;
- private String subscriberIp;
-
-
- public ErrorResponse(int httpStatusCode, int mrErrorCode,
- String errorMessage, String helpURL, String statusTs, String topic,
- String publisherId, String publisherIp, String subscriberId,
- String subscriberIp) {
- super();
- this.httpStatusCode = httpStatusCode;
- this.mrErrorCode = mrErrorCode;
- this.errorMessage = errorMessage;
- this.helpURL = "http://onap.readthedocs.io";
- this.statusTs = statusTs;
- this.topic = topic;
- this.publisherId = publisherId;
- this.publisherIp = publisherIp;
- this.subscriberId = subscriberId;
- this.subscriberIp = subscriberIp;
- }
-
- public ErrorResponse(int httpStatusCode, int mrErrorCode,
- String errorMessage) {
- super();
- this.httpStatusCode = httpStatusCode;
- this.mrErrorCode = mrErrorCode;
- this.errorMessage = errorMessage;
- this.helpURL = "http://onap.readthedocs.io";
-
- }
-
- public int getHttpStatusCode() {
- return httpStatusCode;
- }
-
- public void setHttpStatusCode(int httpStatusCode) {
- this.httpStatusCode = httpStatusCode;
- }
-
- public int getMrErrorCode() {
- return mrErrorCode;
- }
-
-
- public void setMrErrorCode(int mrErrorCode) {
- this.mrErrorCode = mrErrorCode;
- }
-
-
- public String getErrorMessage() {
- return errorMessage;
- }
-
- public void setErrorMessage(String errorMessage) {
- this.errorMessage = errorMessage;
- }
-
- public String getHelpURL() {
- return helpURL;
- }
-
- public void setHelpURL(String helpURL) {
- this.helpURL = helpURL;
- }
-
- @Override
- public String toString() {
- return "ErrorResponse {\"httpStatusCode\":\"" + httpStatusCode
- + "\", \"mrErrorCode\":\"" + mrErrorCode + "\", \"errorMessage\":\""
- + errorMessage + "\", \"helpURL\":\"" + helpURL + "\", \"statusTs\":\""+statusTs+"\""
- + ", \"topicId\":\""+topic+"\", \"publisherId\":\""+publisherId+"\""
- + ", \"publisherIp\":\""+publisherIp+"\", \"subscriberId\":\""+subscriberId+"\""
- + ", \"subscriberIp\":\""+subscriberIp+"\"}";
- }
-
- public String getErrMapperStr1() {
- return "ErrorResponse [httpStatusCode=" + httpStatusCode + ", mrErrorCode=" + mrErrorCode + ", errorMessage="
- + errorMessage + ", helpURL=" + helpURL + "]";
- }
-
-
-
- public JSONObject getErrMapperStr() {
- JSONObject o = new JSONObject();
- o.put("status", getHttpStatusCode());
- o.put("mrstatus", getMrErrorCode());
- o.put("message", getErrorMessage());
- o.put("helpURL", getHelpURL());
- return o;
- }
-
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java b/src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java
deleted file mode 100644
index 64b20e8..0000000
--- a/src/main/java/com/att/dmf/mr/listener/CambriaServletContextListener.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.listener;
-
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * This is the Cambria Servlet Context Listner which helpes while loading the app which provide the endpoints
- * @author nilanjana.maity
- *
- */
-public class CambriaServletContextListener implements ServletContextListener {
-
- DME2EndPointLoader loader = DME2EndPointLoader.getInstance();
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaServletContextListener.class);
-
-
- @Override
-
- /**
- * contextDestroyed() loads unpublished end points
- * @param arg0
- */
- public void contextDestroyed(ServletContextEvent arg0) {
- log.info("CambriaServletContextListener contextDestroyed");
-
- loader.unPublishEndPoints();
- }
-
- @Override
- /**
- * contextInitialized() loads published end points
- * @param arg0
- */
- public void contextInitialized(ServletContextEvent arg0) {
- log.info("CambriaServletContextListener contextInitialized");
- loader.publishEndPoints();
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java b/src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java
deleted file mode 100644
index f61b6ea..0000000
--- a/src/main/java/com/att/dmf/mr/listener/DME2EndPointLoader.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.listener;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-
-import com.att.aft.dme2.manager.registry.DME2EndpointRegistry;
-import com.att.aft.dme2.api.DME2Exception;
-import com.att.aft.dme2.api.DME2Manager;
-import com.att.dmf.mr.service.impl.EventsServiceImpl;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public class DME2EndPointLoader {
-
- private String latitude;
- private String longitude;
- private String version;
- private String serviceName;
- private String env;
- private String routeOffer;
- private String hostName;
- private String port;
- private String contextPath;
- private String protocol;
- private String serviceURL;
- private static DME2EndPointLoader loader = new DME2EndPointLoader();
-
- private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
- private DME2EndPointLoader() {
- }
-
- public static DME2EndPointLoader getInstance() {
- return loader;
- }
-
- /**
- * publishing endpoints
- */
- public void publishEndPoints() {
-
- try {
- InputStream input = this.getClass().getResourceAsStream("/endpoint.properties");
- Properties props = new Properties();
- props.load(input);
-
- latitude = props.getProperty("Latitude");
- longitude = props.getProperty("Longitude");
- version = props.getProperty("Version");
- serviceName = props.getProperty("ServiceName");
- env = props.getProperty("Environment");
- routeOffer = props.getProperty("RouteOffer");
- hostName = props.getProperty("HostName");
- port = props.getProperty("Port");
- contextPath = props.getProperty("ContextPath");
- protocol = props.getProperty("Protocol");
-
- System.setProperty("AFT_LATITUDE", latitude);
- System.setProperty("AFT_LONGITUDE", longitude);
- System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
-
- serviceURL = "service=" + serviceName + "/" + "version=" + version + "/" + "envContext=" + env + "/"
- + "routeOffer=" + routeOffer;
-
- DME2Manager manager = new DME2Manager("testEndpointPublish", props);
- manager.setClientCredentials("sh301n", "");
- DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry();
- // Publish API takes service name, context path, hostname, port and
- // protocol as args
- svcRegistry.publish(serviceURL, contextPath, hostName, Integer.parseInt(port), protocol);
-
- } catch (IOException | DME2Exception e) {
- LOG.error("Failed due to :" + e);
- }
-
- }
-/**
- * unpublishing endpoints
- */
- public void unPublishEndPoints() {
-
- DME2Manager manager;
- try {
- System.setProperty("AFT_LATITUDE", latitude);
- System.setProperty("AFT_LONGITUDE", longitude);
- System.setProperty("AFT_ENVIRONMENT", "AFTUAT");
-
- manager = DME2Manager.getDefaultInstance();
- DME2EndpointRegistry svcRegistry = manager.getEndpointRegistry();
- svcRegistry.unpublish(serviceURL, hostName, Integer.parseInt(port));
- } catch (DME2Exception e) {
- LOG.error("Failed due to DME2Exception" + e);
- }
-
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/metabroker/Broker.java b/src/main/java/com/att/dmf/mr/metabroker/Broker.java
deleted file mode 100644
index e5fe8da..0000000
--- a/src/main/java/com/att/dmf/mr/metabroker/Broker.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metabroker;
-
-import java.util.List;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * A broker interface to manage metadata around topics, etc.
- *
- * @author peter
- *
- */
-public interface Broker {
- /**
- *
- * @author anowarul.islam
- *
- */
- public class TopicExistsException extends Exception {
- /**
- *
- * @param topicName
- */
- public TopicExistsException(String topicName) {
- super("Topic " + topicName + " exists.");
- }
-
- private static final long serialVersionUID = 1L;
- }
-
- /**
- * Get all topics in the underlying broker.
- *
- * @return
- * @throws ConfigDbException
- */
- List<Topic> getAllTopics() throws ConfigDbException;
-
- /**
- * Get a specific topic from the underlying broker.
- *
- * @param topic
- * @return a topic, or null
- */
- Topic getTopic(String topic) throws ConfigDbException;
-
- /**
- * create a topic
- *
- * @param topic
- * @param description
- * @param ownerApiKey
- * @param partitions
- * @param replicas
- * @param transactionEnabled
- * @return
- * @throws TopicExistsException
- * @throws CambriaApiException
- */
- Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
- boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException;
-
- /**
- * Delete a topic by name
- *
- * @param topic
- */
- void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException;
-}
diff --git a/src/main/java/com/att/dmf/mr/metabroker/Broker1.java b/src/main/java/com/att/dmf/mr/metabroker/Broker1.java
deleted file mode 100644
index e7d7f6c..0000000
--- a/src/main/java/com/att/dmf/mr/metabroker/Broker1.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metabroker;
-
-import java.util.List;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * A broker interface to manage metadata around topics, etc.
- * alternate for Broker1 to avoid this error in spring boot
- *org.springframework.beans.factory.NoUniqueBeanDefinitionException:
- * No qualifying bean of type [com.att.dmf.mr.metabroker.Broker] is defined:
- * expected single matching bean but found 2: mmb,dMaaPKafkaMetaBroker
-
- *
- */
-public interface Broker1 {
- /**
- *
- * @author Ramkumar
- *
- */
- public class TopicExistsException extends Exception {
- /**
- *
- * @param topicName
- */
- public TopicExistsException(String topicName) {
- super("Topic " + topicName + " exists.");
- }
-
- private static final long serialVersionUID = 1L;
- }
-
- /**
- * Get all topics in the underlying broker.
- *
- * @return
- * @throws ConfigDbException
- */
- List<Topic> getAllTopics() throws ConfigDbException;
-
- /**
- * Get a specific topic from the underlying broker.
- *
- * @param topic
- * @return a topic, or null
- */
- Topic getTopic(String topic) throws ConfigDbException;
-
- /**
- * create a topic
- *
- * @param topic
- * @param description
- * @param ownerApiKey
- * @param partitions
- * @param replicas
- * @param transactionEnabled
- * @return
- * @throws TopicExistsException
- * @throws CambriaApiException
- */
- Topic createTopic(String topic, String description, String ownerApiKey, int partitions, int replicas,
- boolean transactionEnabled) throws TopicExistsException, CambriaApiException,ConfigDbException;
-
- /**
- * Delete a topic by name
- *
- * @param topic
- */
- void deleteTopic(String topic) throws AccessDeniedException, CambriaApiException, TopicExistsException,ConfigDbException;
-}
diff --git a/src/main/java/com/att/dmf/mr/metabroker/Topic.java b/src/main/java/com/att/dmf/mr/metabroker/Topic.java
deleted file mode 100644
index d191070..0000000
--- a/src/main/java/com/att/dmf/mr/metabroker/Topic.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metabroker;
-
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.NsaAcl;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource;
-/**
- * This is the interface for topic and all the topic related operations
- * get topic name, owner, description, transactionEnabled etc.
- * @author nilanjana.maity
- *
- */
-public interface Topic extends ReadWriteSecuredResource
-{
- /**
- * User defined exception for access denied while access the topic for Publisher and consumer
- * @author nilanjana.maity
- *
- *//*
- public class AccessDeniedException extends Exception
-
- *//**
- * AccessDenied Description
- *//*
-
- *//**
- * AccessDenied Exception for the user while authenticating the user request
- * @param user
- *//*
-
- private static final long serialVersionUID = 1L;
- }*/
-
- /**
- * Get this topic's name
- * @return
- */
- String getName ();
-
- /**
- * Get the API key of the owner of this topic.
- * @return
- */
- String getOwner ();
-
- /**
- * Get a description of the topic, as set by the owner at creation time.
- * @return
- */
- String getDescription ();
-
- /**
- * If the topic is transaction enabled
- * @return boolean true/false
- */
- boolean isTransactionEnabled();
-
- /**
- * Get the ACL for reading on this topic. Can be null.
- * @return
- */
- NsaAcl getReaderAcl ();
-
- /**
- * Get the ACL for writing on this topic. Can be null.
- * @return
- */
- NsaAcl getWriterAcl ();
-
- /**
- * Check if this user can read the topic. Throw otherwise. Note that
- * user may be null.
- * @param user
- */
- void checkUserRead ( NsaApiKey user ) throws AccessDeniedException;
-
- /**
- * Check if this user can write to the topic. Throw otherwise. Note
- * that user may be null.
- * @param user
- */
- void checkUserWrite ( NsaApiKey user ) throws AccessDeniedException;
-
- /**
- * allow the given user to publish
- * @param publisherId
- * @param asUser
- */
- void permitWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
-
- /**
- * deny the given user from publishing
- * @param publisherId
- * @param asUser
- */
- void denyWritesFromUser ( String publisherId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
-
- /**
- * allow the given user to read the topic
- * @param consumerId
- * @param asUser
- */
- void permitReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
-
- /**
- * deny the given user from reading the topic
- * @param consumerId
- * @param asUser
- * @throws ConfigDbException
- */
- void denyReadsByUser ( String consumerId, NsaApiKey asUser ) throws AccessDeniedException, ConfigDbException;
-}
diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java
deleted file mode 100644
index 45644b7..0000000
--- a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaBatchingPublisher.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metrics.publisher;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-/**
- * A Cambria batching publisher is a publisher with additional functionality
- * for managing delayed sends.
- *
- * @author peter
- *
- */
-public interface CambriaBatchingPublisher extends CambriaPublisher
-{
- /**
- * Get the number of messages that have not yet been sent.
- * @return the number of pending messages
- */
- int getPendingMessageCount ();
-
- /**
- * Close this publisher, sending any remaining messages.
- * @param timeout an amount of time to wait for unsent messages to be sent
- * @param timeoutUnits the time unit for the timeout arg
- * @return a list of any unsent messages after the timeout
- * @throws IOException
- * @throws InterruptedException
- */
- List<message> close ( long timeout, TimeUnit timeoutUnits ) throws IOException, InterruptedException;
-}
diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java
deleted file mode 100644
index 4b219b1..0000000
--- a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaClient.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metrics.publisher;
-
-
-
-//
-import com.att.eelf.configuration.EELFLogger;
-
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public interface CambriaClient {
- /**
- * An exception at the Cambria layer. This is used when the HTTP transport
- * layer returns a success code but the transaction is not completed as
- * expected.
- */
- public class CambriaApiException extends Exception {
- /**
- *
- * @param msg
- */
- public CambriaApiException(String msg) {
- super(msg);
- }
-
- /**
- *
- * @param msg
- * @param t
- */
- public CambriaApiException(String msg, Throwable t) {
- super(msg, t);
- }
-
- private static final long serialVersionUID = 1L;
- }
-
- /**
- * Optionally set the Logger to use
- *
- * @param log
- */
- void logTo(EELFLogger log);
-
- /**
- * Set the API credentials for this client connection. Subsequent calls will
- * include authentication headers.who i
- *
- * @param apiKey
- * @param apiSecret
- */
- void setApiCredentials(String apiKey, String apiSecret);
-
- /**
- * Remove API credentials, if any, on this connection. Subsequent calls will
- * not include authentication headers.
- */
- void clearApiCredentials();
-
- /**
- * Close this connection. Some client interfaces have additional close
- * capability.
- */
- void close();
-}
diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java
deleted file mode 100644
index 4a6ca81..0000000
--- a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaConsumer.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metrics.publisher;
-
-import java.io.IOException;
-
-/**
- * This interface will provide fetch mechanism for consumer
- * @author nilanjana.maity
- *
- */
-public interface CambriaConsumer extends CambriaClient
-{
- /**
- * Fetch a set of messages. The consumer's timeout and message limit are used if set in the constructor call.
-
- * @return a set of messages
- * @throws IOException
- */
- Iterable<String> fetch () throws IOException;
-
- /**
- * Fetch a set of messages with an explicit timeout and limit for this call. These values
- * override any set in the constructor call.
- *
- * @param timeoutMs The amount of time in milliseconds that the server should keep the connection
- * open while waiting for message traffic. Use -1 for default timeout (controlled on the server-side).
- * @param limit A limit on the number of messages returned in a single call. Use -1 for no limit.
- * @return a set messages
- * @throws IOException if there's a problem connecting to the server
- */
- Iterable<String> fetch ( int timeoutMs, int limit ) throws IOException;
-}
diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java
deleted file mode 100644
index 4020a6d..0000000
--- a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisher.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metrics.publisher;
-
-import java.io.IOException;
-import java.util.Collection;
-
-/**
- * A Cambria publishing interface.
- *
- * @author peter
- *
- */
-public interface CambriaPublisher extends CambriaClient {
- /**
- * A simple message container
- */
- public static class message {
- /**
- *
- * @param partition
- * @param msg
- */
- public message(String partition, String msg) {
- fPartition = partition == null ? "" : partition;
- fMsg = msg;
- if (fMsg == null) {
- throw new IllegalArgumentException("Can't send a null message.");
- }
- }
-
- /**
- *
- * @param msg
- */
- public message(message msg) {
- this(msg.fPartition, msg.fMsg);
- }
-
- /**
- * declaring partition string
- */
- public final String fPartition;
- /**
- * declaring fMsg String
- */
- public final String fMsg;
- }
-
- /**
- * Send the given message using the given partition.
- *
- * @param partition
- * @param msg
- * @return the number of pending messages
- * @throws IOException
- */
- int send(String partition, String msg) throws IOException;
-
- /**
- * Send the given message using its partition.
- *
- * @param msg
- * @return the number of pending messages
- * @throws IOException
- */
- int send(message msg) throws IOException;
-
- /**
- * Send the given messages using their partitions.
- *
- * @param msgs
- * @return the number of pending messages
- * @throws IOException
- */
- int send(Collection<message> msgs) throws IOException;
-
- /**
- * Close this publisher. It's an error to call send() after close()
- */
- void close();
-}
diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java b/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java
deleted file mode 100644
index 46dfa99..0000000
--- a/src/main/java/com/att/dmf/mr/metrics/publisher/CambriaPublisherUtility.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metrics.publisher;
-
-import java.io.UnsupportedEncodingException;
-import java.net.URLEncoder;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.http.HttpHost;
-/**
- *
- * @author anowarul.islam
- *
- */
-public class CambriaPublisherUtility
-{
- public static final String kBasePath = "/events/";
- public static final int kStdCambriaServicePort = 3904;
-/**
- *
- * Translates a string into <code>application/x-www-form-urlencoded</code>
- * format using a specific encoding scheme.
- * @param s
- * @return
- *
- */
- public static String escape ( String s )
- {
- try
- {
- return URLEncoder.encode ( s, "UTF-8");
- }
- catch ( UnsupportedEncodingException e )
- {
- throw new RuntimeException ( e );
- }
- }
-/**
- *
- * building url
- * @param rawTopic
- * @return
- */
- public static String makeUrl ( String rawTopic )
- {
- final String cleanTopic = escape ( rawTopic );
-
- final StringBuffer url = new StringBuffer().
- append ( CambriaPublisherUtility.kBasePath ).
- append ( cleanTopic );
- return url.toString ();
- }
-/**
- *
- * building consumerUrl
- * @param topic
- * @param rawConsumerGroup
- * @param rawConsumerId
- * @return
- */
- public static String makeConsumerUrl ( String topic, String rawConsumerGroup, String rawConsumerId )
- {
- final String cleanConsumerGroup = escape ( rawConsumerGroup );
- final String cleanConsumerId = escape ( rawConsumerId );
- return CambriaPublisherUtility.kBasePath + topic + "/" + cleanConsumerGroup + "/" + cleanConsumerId;
- }
-
- /**
- * Create a list of HttpHosts from an input list of strings. Input strings have
- * host[:port] as format. If the port section is not provided, the default port is used.
- *
- * @param hosts
- * @return a list of hosts
- */
- public static List<HttpHost> createHostsList(Collection<String> hosts)
- {
- final ArrayList<HttpHost> convertedHosts = new ArrayList<>();
- for ( String host : hosts )
- {
- if ( host.length () == 0 ) continue;
- convertedHosts.add ( hostForString ( host ) );
- }
- return convertedHosts;
- }
-
- /**
- * Return an HttpHost from an input string. Input string has
- * host[:port] as format. If the port section is not provided, the default port is used.
- *
- * @param hosts
- * @return a list of hosts
- * if host.length<1 throws IllegalArgumentException
- *
- */
- public static HttpHost hostForString ( String host )
- {
- if ( host.length() < 1 ) throw new IllegalArgumentException ( "An empty host entry is invalid." );
-
- String hostPart = host;
- int port = kStdCambriaServicePort;
-
- final int colon = host.indexOf ( ':' );
- if ( colon == 0 ) throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid." );
- if ( colon > 0 )
- {
- hostPart = host.substring ( 0, colon ).trim();
-
- final String portPart = host.substring ( colon + 1 ).trim();
- if ( portPart.length () > 0 )
- {
- try
- {
- port = Integer.parseInt ( portPart );
- }
- catch ( NumberFormatException x )
- {
- throw new IllegalArgumentException ( "Host entry '" + host + "' is invalid.", x );
- }
- }
- // else: use default port on "foo:"
- }
-
- return new HttpHost ( hostPart, port );
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java b/src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java
deleted file mode 100644
index d7818de..0000000
--- a/src/main/java/com/att/dmf/mr/metrics/publisher/DMaaPCambriaClientFactory.java
+++ /dev/null
@@ -1,420 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metrics.publisher;
-
-import java.net.MalformedURLException;
-import java.nio.channels.NotYetConnectedException;
-import java.util.Collection;
-import java.util.TreeSet;
-import java.util.UUID;
-
-import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaConsumerImpl;
-import com.att.dmf.mr.metrics.publisher.impl.DMaaPCambriaSimplerBatchPublisher;
-
-/**
- * A factory for Cambria clients.<br/>
- * <br/>
- * Use caution selecting a consumer creator factory. If the call doesn't accept
- * a consumer group name, then it creates a consumer that is not restartable.
- * That is, if you stop your process and start it again, your client will NOT
- * receive any missed messages on the topic. If you need to ensure receipt of
- * missed messages, then you must use a consumer that's created with a group
- * name and ID. (If you create multiple consumer processes using the same group,
- * load is split across them. Be sure to use a different ID for each instance.)<br/>
- * <br/>
- * Publishers
- *
- * @author peter
- */
-public class DMaaPCambriaClientFactory {
- /**
- * Create a consumer instance with the default timeout and no limit on
- * messages returned. This consumer operates as an independent consumer
- * (i.e., not in a group) and is NOT re-startable across sessions.
- *
- * @param hostList
- * A comma separated list of hosts to use to connect to Cambria.
- * You can include port numbers (3904 is the default). For
- * example, "ueb01hydc.it.att.com:8080,ueb02hydc.it.att.com"
- *
- * @param topic
- * The topic to consume
- *
- * @return a consumer
- */
- public static CambriaConsumer createConsumer(String hostList, String topic) {
- return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList),
- topic);
- }
-
- /**
- * Create a consumer instance with the default timeout and no limit on
- * messages returned. This consumer operates as an independent consumer
- * (i.e., not in a group) and is NOT re-startable across sessions.
- *
- * @param hostSet
- * The host used in the URL to Cambria. Entries can be
- * "host:port".
- * @param topic
- * The topic to consume
- *
- * @return a consumer
- */
- public static CambriaConsumer createConsumer(Collection<String> hostSet,
- String topic) {
- return createConsumer(hostSet, topic, null);
- }
-
- /**
- * Create a consumer instance with server-side filtering, the default
- * timeout, and no limit on messages returned. This consumer operates as an
- * independent consumer (i.e., not in a group) and is NOT re-startable
- * across sessions.
- *
- * @param hostSet
- * The host used in the URL to Cambria. Entries can be
- * "host:port".
- * @param topic
- * The topic to consume
- * @param filter
- * a filter to use on the server side
- *
- * @return a consumer
- */
- public static CambriaConsumer createConsumer(Collection<String> hostSet,
- String topic, String filter) {
- return createConsumer(hostSet, topic, UUID.randomUUID().toString(),
- "0", -1, -1, filter, null, null);
- }
-
- /**
- * Create a consumer instance with the default timeout, and no limit on
- * messages returned. This consumer can operate in a logical group and is
- * re-startable across sessions when you use the same group and ID on
- * restart.
- *
- * @param hostSet
- * The host used in the URL to Cambria. Entries can be
- * "host:port".
- * @param topic
- * The topic to consume
- * @param consumerGroup
- * The name of the consumer group this consumer is part of
- * @param consumerId
- * The unique id of this consume in its group
- *
- * @return a consumer
- */
- public static CambriaConsumer createConsumer(Collection<String> hostSet,
- final String topic, final String consumerGroup,
- final String consumerId) {
- return createConsumer(hostSet, topic, consumerGroup, consumerId, -1, -1);
- }
-
- /**
- * Create a consumer instance with the default timeout, and no limit on
- * messages returned. This consumer can operate in a logical group and is
- * re-startable across sessions when you use the same group and ID on
- * restart.
- *
- * @param hostSet
- * The host used in the URL to Cambria. Entries can be
- * "host:port".
- * @param topic
- * The topic to consume
- * @param consumerGroup
- * The name of the consumer group this consumer is part of
- * @param consumerId
- * The unique id of this consume in its group
- * @param timeoutMs
- * The amount of time in milliseconds that the server should keep
- * the connection open while waiting for message traffic. Use -1
- * for default timeout.
- * @param limit
- * A limit on the number of messages returned in a single call.
- * Use -1 for no limit.
- *
- * @return a consumer
- */
- public static CambriaConsumer createConsumer(Collection<String> hostSet,
- final String topic, final String consumerGroup,
- final String consumerId, int timeoutMs, int limit) {
- return createConsumer(hostSet, topic, consumerGroup, consumerId,
- timeoutMs, limit, null, null, null);
- }
-
- /**
- * Create a consumer instance with the default timeout, and no limit on
- * messages returned. This consumer can operate in a logical group and is
- * re-startable across sessions when you use the same group and ID on
- * restart. This consumer also uses server-side filtering.
- *
- * @param hostList
- * A comma separated list of hosts to use to connect to Cambria.
- * You can include port numbers (3904 is the default). For
- * example, "ueb01hydc.it.att.com:8080,ueb02hydc.it.att.com"
- * @param topic
- * The topic to consume
- * @param consumerGroup
- * The name of the consumer group this consumer is part of
- * @param consumerId
- * The unique id of this consume in its group
- * @param timeoutMs
- * The amount of time in milliseconds that the server should keep
- * the connection open while waiting for message traffic. Use -1
- * for default timeout.
- * @param limit
- * A limit on the number of messages returned in a single call.
- * Use -1 for no limit.
- * @param filter
- * A Highland Park filter expression using only built-in filter
- * components. Use null for "no filter".
- * @param apiKey
- * key associated with a user
- * @param apiSecret
- * of a user
- *
- * @return a consumer
- */
- public static CambriaConsumer createConsumer(String hostList,
- final String topic, final String consumerGroup,
- final String consumerId, int timeoutMs, int limit, String filter,
- String apiKey, String apiSecret) {
- return createConsumer(DMaaPCambriaConsumerImpl.stringToList(hostList),
- topic, consumerGroup, consumerId, timeoutMs, limit, filter,
- apiKey, apiSecret);
- }
-
- /**
- * Create a consumer instance with the default timeout, and no limit on
- * messages returned. This consumer can operate in a logical group and is
- * re-startable across sessions when you use the same group and ID on
- * restart. This consumer also uses server-side filtering.
- *
- * @param hostSet
- * The host used in the URL to Cambria. Entries can be
- * "host:port".
- * @param topic
- * The topic to consume
- * @param consumerGroup
- * The name of the consumer group this consumer is part of
- * @param consumerId
- * The unique id of this consume in its group
- * @param timeoutMs
- * The amount of time in milliseconds that the server should keep
- * the connection open while waiting for message traffic. Use -1
- * for default timeout.
- * @param limit
- * A limit on the number of messages returned in a single call.
- * Use -1 for no limit.
- * @param filter
- * A Highland Park filter expression using only built-in filter
- * components. Use null for "no filter".
- * @param apiKey
- * key associated with a user
- * @param apiSecret
- * of a user
- * @return a consumer
- */
- public static CambriaConsumer createConsumer(Collection<String> hostSet,
- final String topic, final String consumerGroup,
- final String consumerId, int timeoutMs, int limit, String filter,
- String apiKey, String apiSecret) {
- if (sfMock != null)
- return sfMock;
- try {
- return new DMaaPCambriaConsumerImpl(hostSet, topic, consumerGroup,
- consumerId, timeoutMs, limit, filter, apiKey, apiSecret);
- } catch (MalformedURLException e) {
-
- NotYetConnectedException exception=new NotYetConnectedException();
- exception.setStackTrace(e.getStackTrace());
-
- throw exception ;
- }
- }
-
- /*************************************************************************/
- /*************************************************************************/
- /*************************************************************************/
-
- /**
- * Create a publisher that sends each message (or group of messages)
- * immediately. Most applications should favor higher latency for much
- * higher message throughput and the "simple publisher" is not a good
- * choice.
- *
- * @param hostlist
- * The host used in the URL to Cambria. Can be "host:port", can
- * be multiple comma-separated entries.
- * @param topic
- * The topic on which to publish messages.
- * @return a publisher
- */
- public static CambriaBatchingPublisher createSimplePublisher(
- String hostlist, String topic) {
- return createBatchingPublisher(hostlist, topic, 1, 1);
- }
-
- /**
- * Create a publisher that batches messages. Be sure to close the publisher
- * to send the last batch and ensure a clean shutdown. Message payloads are
- * not compressed.
- *
- * @param hostlist
- * The host used in the URL to Cambria. Can be "host:port", can
- * be multiple comma-separated entries.
- * @param topic
- * The topic on which to publish messages.
- * @param maxBatchSize
- * The largest set of messages to batch
- * @param maxAgeMs
- * The maximum age of a message waiting in a batch
- *
- * @return a publisher
- */
- public static CambriaBatchingPublisher createBatchingPublisher(
- String hostlist, String topic, int maxBatchSize, long maxAgeMs) {
- return createBatchingPublisher(hostlist, topic, maxBatchSize, maxAgeMs,
- false);
- }
-
- /**
- * Create a publisher that batches messages. Be sure to close the publisher
- * to send the last batch and ensure a clean shutdown.
- *
- * @param hostlist
- * The host used in the URL to Cambria. Can be "host:port", can
- * be multiple comma-separated entries.
- * @param topic
- * The topic on which to publish messages.
- * @param maxBatchSize
- * The largest set of messages to batch
- * @param maxAgeMs
- * The maximum age of a message waiting in a batch
- * @param compress
- * use gzip compression
- *
- * @return a publisher
- */
- public static CambriaBatchingPublisher createBatchingPublisher(
- String hostlist, String topic, int maxBatchSize, long maxAgeMs,
- boolean compress) {
- return createBatchingPublisher(
- DMaaPCambriaConsumerImpl.stringToList(hostlist), topic,
- maxBatchSize, maxAgeMs, compress);
- }
-
- /**
- * Create a publisher that batches messages. Be sure to close the publisher
- * to send the last batch and ensure a clean shutdown.
- *
- * @param hostSet
- * A set of hosts to be used in the URL to Cambria. Can be
- * "host:port". Use multiple entries to enable failover.
- * @param topic
- * The topic on which to publish messages.
- * @param maxBatchSize
- * The largest set of messages to batch
- * @param maxAgeMs
- * The maximum age of a message waiting in a batch
- * @param compress
- * use gzip compression
- *
- * @return a publisher
- */
- public static CambriaBatchingPublisher createBatchingPublisher(
- String[] hostSet, String topic, int maxBatchSize, long maxAgeMs,
- boolean compress) {
- final TreeSet<String> hosts = new TreeSet<String>();
- for (String hp : hostSet) {
- hosts.add(hp);
- }
- return createBatchingPublisher(hosts, topic, maxBatchSize, maxAgeMs,
- compress);
- }
-
- /**
- * Create a publisher that batches messages. Be sure to close the publisher
- * to send the last batch and ensure a clean shutdown.
- *
- * @param hostSet
- * A set of hosts to be used in the URL to Cambria. Can be
- * "host:port". Use multiple entries to enable failover.
- * @param topic
- * The topic on which to publish messages.
- * @param maxBatchSize
- * The largest set of messages to batch
- * @param maxAgeMs
- * The maximum age of a message waiting in a batch
- * @param compress
- * use gzip compression
- *
- * @return a publisher
- */
- public static CambriaBatchingPublisher createBatchingPublisher(
- Collection<String> hostSet, String topic, int maxBatchSize,
- long maxAgeMs, boolean compress) {
- return new DMaaPCambriaSimplerBatchPublisher.Builder()
- .againstUrls(hostSet).onTopic(topic)
- .batchTo(maxBatchSize, maxAgeMs).compress(compress).build();
- }
-
- /**
- * Create an identity manager client to work with API keys.
- *
- * @param hostSet
- * A set of hosts to be used in the URL to Cambria. Can be
- * "host:port". Use multiple entries to enable failover.
- * @param apiKey
- * Your API key
- * @param apiSecret
- * Your API secret
- * @return an identity manager
- */
-
-
- /**
- * Create a topic manager for working with topics.
- *
- * @param hostSet
- * A set of hosts to be used in the URL to Cambria. Can be
- * "host:port". Use multiple entries to enable failover.
- * @param apiKey
- * Your API key
- * @param apiSecret
- * Your API secret
- * @return a topic manager
- */
-
-
- /**
- * Inject a consumer. Used to support unit tests.
- *
- * @param cc
- */
- public static void $testInject(CambriaConsumer cc) {
- sfMock = cc;
- }
-
- private static CambriaConsumer sfMock = null;
-}
diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java
deleted file mode 100644
index 84576fc..0000000
--- a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/CambriaBaseClient.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metrics.publisher.impl;
-
-import java.net.MalformedURLException;
-import java.util.Collection;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.concurrent.TimeUnit;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-
-import com.att.dmf.mr.constants.CambriaConstants;
-
-//import org.slf4j.LoggerFactory;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.apiClient.http.CacheUse;
-import com.att.nsa.apiClient.http.HttpClient;
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public class CambriaBaseClient extends HttpClient implements com.att.dmf.mr.metrics.publisher.CambriaClient
-{
- protected CambriaBaseClient ( Collection<String> hosts ) throws MalformedURLException
- {
- this ( hosts, null );
- }
-
- public CambriaBaseClient ( Collection<String> hosts, String clientSignature ) throws MalformedURLException
- {
-
-
-
- super(ConnectionType.HTTP, hosts, CambriaConstants.kStdCambriaServicePort, clientSignature, CacheUse.NONE, 1, 1L, TimeUnit.MILLISECONDS, 32, 32, 600000);
-
-
- fLog = EELFManager.getInstance().getLogger(this.getClass().getName());
-
- }
-
- @Override
- public void close ()
- {
- }
-
- public Set<String> jsonArrayToSet ( JSONArray a ) throws JSONException
- {
- if ( a == null ) return null;
-
- final TreeSet<String> set = new TreeSet<>();
- for ( int i=0; i<a.length (); i++ )
- {
- set.add ( a.getString ( i ));
- }
- return set;
- }
- /**
- * @param log
- */
- public void logTo ( EELFLogger log )
- {
- fLog = log;
-
-
- }
-
- public EELFLogger getLog ()
- {
- return fLog;
- }
-
- private EELFLogger fLog;
-
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/Clock.java b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/Clock.java
deleted file mode 100644
index 7463700..0000000
--- a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/Clock.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metrics.publisher.impl;
-
-/**
- *
- * This class maintains the system clocks
- * @author nilanjana.maity
- *
- */
-public class Clock
-{
- public synchronized static Clock getIt ()
- {
- if ( sfClock == null )
- {
- sfClock = new Clock ();
- }
- return sfClock;
- }
-
- /**
- *
- * Get the system's current time in milliseconds.
- * @return the current time
- *
- */
- public static long now ()
- {
- return getIt().nowImpl ();
- }
-
- /**
- * Get current time in milliseconds
- * @return current time in ms
- */
- public long nowImpl ()
- {
- return System.currentTimeMillis ();
- }
-
- /**
- * Initialize constructor
- */
- public Clock ()
- {
- }
-
- private static Clock sfClock = null;
-
- public synchronized static void register ( Clock testClock )
- {
- sfClock = testClock;
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
deleted file mode 100644
index ee56213..0000000
--- a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaConsumerImpl.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metrics.publisher.impl;
-
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.net.MalformedURLException;
-import java.net.URLEncoder;
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility;
-import com.att.nsa.apiClient.http.HttpException;
-import com.att.nsa.apiClient.http.HttpObjectNotFoundException;
-
-import jline.internal.Log;
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public class DMaaPCambriaConsumerImpl extends CambriaBaseClient
- implements com.att.dmf.mr.metrics.publisher.CambriaConsumer {
- private final String fTopic;
- private final String fGroup;
- private final String fId;
- private final int fTimeoutMs;
- private final int fLimit;
- private final String fFilter;
-
- /**
- *
- * @param hostPart
- * @param topic
- * @param consumerGroup
- * @param consumerId
- * @param timeoutMs
- * @param limit
- * @param filter
- * @param apiKey
- * @param apiSecret
- */
- public DMaaPCambriaConsumerImpl(Collection<String> hostPart, final String topic, final String consumerGroup,
- final String consumerId, int timeoutMs, int limit, String filter, String apiKey, String apiSecret) throws MalformedURLException {
- super(hostPart, topic + "::" + consumerGroup + "::" + consumerId);
-
- fTopic = topic;
- fGroup = consumerGroup;
- fId = consumerId;
- fTimeoutMs = timeoutMs;
- fLimit = limit;
- fFilter = filter;
-
- setApiCredentials(apiKey, apiSecret);
- }
-
- /**
- * method converts String to list
- *
- * @param str
- * @return
- */
- public static List<String> stringToList(String str) {
- final LinkedList<String> set = new LinkedList<String>();
- if (str != null) {
- final String[] parts = str.trim().split(",");
- for (String part : parts) {
- final String trimmed = part.trim();
- if (trimmed.length() > 0) {
- set.add(trimmed);
- }
- }
- }
- return set;
- }
-
- @Override
- public Iterable<String> fetch() throws IOException {
- // fetch with the timeout and limit set in constructor
- return fetch(fTimeoutMs, fLimit);
- }
-
- @Override
- public Iterable<String> fetch(int timeoutMs, int limit) throws IOException {
- final LinkedList<String> msgs = new LinkedList<String>();
-
- final String urlPath = createUrlPath(timeoutMs, limit);
-
- getLog().info("UEB GET " + urlPath);
- try {
- final JSONObject o = get(urlPath);
-
- if (o != null) {
- final JSONArray a = o.getJSONArray("result");
- if (a != null) {
- for (int i = 0; i < a.length(); i++) {
- msgs.add(a.getString(i));
- }
- }
- }
- } catch (HttpObjectNotFoundException e) {
- // this can happen if the topic is not yet created. ignore.
- Log.error("Failed due to topic is not yet created" + e);
- } catch (JSONException e) {
- // unexpected response
- reportProblemWithResponse();
- Log.error("Failed due to jsonException", e);
- } catch (HttpException e) {
- throw new IOException(e);
- }
-
- return msgs;
- }
-
- public String createUrlPath(int timeoutMs, int limit) {
- final StringBuilder url = new StringBuilder(CambriaPublisherUtility.makeConsumerUrl(fTopic, fGroup, fId));
- final StringBuilder adds = new StringBuilder();
- if (timeoutMs > -1) {
- adds.append("timeout=").append(timeoutMs);
- }
-
- if (limit > -1) {
- if (adds.length() > 0) {
- adds.append("&");
- }
- adds.append("limit=").append(limit);
- }
- if (fFilter != null && fFilter.length() > 0) {
- try {
- if (adds.length() > 0) {
- adds.append("&");
- }
- adds.append("filter=").append(URLEncoder.encode(fFilter, "UTF-8"));
- } catch (UnsupportedEncodingException e) {
- Log.error("Failed due to UnsupportedEncodingException" + e);
- }
- }
- if (adds.length() > 0) {
- url.append("?").append(adds.toString());
- }
- return url.toString();
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java b/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
deleted file mode 100644
index e9b1cdb..0000000
--- a/src/main/java/com/att/dmf/mr/metrics/publisher/impl/DMaaPCambriaSimplerBatchPublisher.java
+++ /dev/null
@@ -1,422 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.metrics.publisher.impl;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.MalformedURLException;
-import java.nio.channels.NotYetConnectedException;
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-import java.util.zip.GZIPOutputStream;
-
-import javax.ws.rs.client.Client;
-import javax.ws.rs.client.ClientBuilder;
-import javax.ws.rs.client.Entity;
-import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.Response;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.metrics.publisher.CambriaPublisherUtility;
-
-/**
- *
- * class DMaaPCambriaSimplerBatchPublisher used to send the publish the messages
- * in batch
- *
- * @author anowarul.islam
- *
- */
-public class DMaaPCambriaSimplerBatchPublisher extends CambriaBaseClient
- implements com.att.dmf.mr.metrics.publisher.CambriaBatchingPublisher {
- /**
- *
- * static inner class initializes with urls, topic,batchSize
- *
- * @author anowarul.islam
- *
- */
- public static class Builder {
- public Builder() {
- }
-
- /**
- * constructor initialize with url
- *
- * @param baseUrls
- * @return
- *
- */
- public Builder againstUrls(Collection<String> baseUrls) {
- fUrls = baseUrls;
- return this;
- }
-
- /**
- * constructor initializes with topics
- *
- * @param topic
- * @return
- *
- */
- public Builder onTopic(String topic) {
- fTopic = topic;
- return this;
- }
-
- /**
- * constructor initilazes with batch size and batch time
- *
- * @param maxBatchSize
- * @param maxBatchAgeMs
- * @return
- *
- */
- public Builder batchTo(int maxBatchSize, long maxBatchAgeMs) {
- fMaxBatchSize = maxBatchSize;
- fMaxBatchAgeMs = maxBatchAgeMs;
- return this;
- }
-
- /**
- * constructor initializes with compress
- *
- * @param compress
- * @return
- */
- public Builder compress(boolean compress) {
- fCompress = compress;
- return this;
- }
-
- /**
- * method returns DMaaPCambriaSimplerBatchPublisher object
- *
- * @return
- */
- public DMaaPCambriaSimplerBatchPublisher build() {
-
- try {
- return new DMaaPCambriaSimplerBatchPublisher(fUrls, fTopic, fMaxBatchSize, fMaxBatchAgeMs, fCompress);
- } catch (MalformedURLException e) {
-
- NotYetConnectedException exception=new NotYetConnectedException();
- exception.setStackTrace(e.getStackTrace());
-
- throw exception ;
-
- }
- }
-
- private Collection<String> fUrls;
- private String fTopic;
- private int fMaxBatchSize = 100;
- private long fMaxBatchAgeMs = 1000;
- private boolean fCompress = false;
- };
-
- /**
- *
- * @param partition
- * @param msg
- */
- @Override
- public int send(String partition, String msg) {
- return send(new message(partition, msg));
- }
-
- /**
- * @param msg
- */
- @Override
- public int send(message msg) {
- final LinkedList<message> list = new LinkedList<message>();
- list.add(msg);
- return send(list);
- }
-
- /**
- * @param msgs
- */
- @Override
- public synchronized int send(Collection<message> msgs) {
- if (fClosed) {
- throw new IllegalStateException("The publisher was closed.");
- }
-
- for (message userMsg : msgs) {
- fPending.add(new TimestampedMessage(userMsg));
- }
- return getPendingMessageCount();
- }
-
- /**
- * getPending message count
- */
- @Override
- public synchronized int getPendingMessageCount() {
- return fPending.size();
- }
-
- /**
- *
- * @exception InterruptedException
- * @exception IOException
- */
- @Override
- public void close() {
- try {
- final List<message> remains = close(Long.MAX_VALUE, TimeUnit.MILLISECONDS);
- if (remains.isEmpty()) {
- getLog().warn("Closing publisher with " + remains.size() + " messages unsent. "
- + "Consider using CambriaBatchingPublisher.close( long timeout, TimeUnit timeoutUnits ) to recapture unsent messages on close.");
- }
- } catch (InterruptedException e) {
- getLog().warn("Possible message loss. " + e.getMessage(), e);
- } catch (IOException e) {
- getLog().warn("Possible message loss. " + e.getMessage(), e);
- }
- }
-
- /**
- * @param time
- * @param unit
- */
- @Override
- public List<message> close(long time, TimeUnit unit) throws IOException, InterruptedException {
- synchronized (this) {
- fClosed = true;
-
- // stop the background sender
- fExec.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
- fExec.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
- fExec.shutdown();
- }
-
- final long now = Clock.now();
- final long waitInMs = TimeUnit.MILLISECONDS.convert(time, unit);
- final long timeoutAtMs = now + waitInMs;
-
- while (Clock.now() < timeoutAtMs && getPendingMessageCount() > 0) {
- send(true);
- Thread.sleep(250);
- }
- // synchronizing the current object
- synchronized (this) {
- final LinkedList<message> result = new LinkedList<message>();
- fPending.drainTo(result);
- return result;
- }
- }
-
- /**
- * Possibly send a batch to the cambria server. This is called by the
- * background thread and the close() method
- *
- * @param force
- */
- private synchronized void send(boolean force) {
- if (force || shouldSendNow()) {
- if (!sendBatch()) {
- getLog().warn("Send failed, " + fPending.size() + " message to send.");
-
- // note the time for back-off
- fDontSendUntilMs = sfWaitAfterError + Clock.now();
- }
- }
- }
-
- /**
- *
- * @return
- */
- private synchronized boolean shouldSendNow() {
- boolean shouldSend = false;
- if (fPending.isEmpty()) {
- final long nowMs = Clock.now();
-
- shouldSend = (fPending.size() >= fMaxBatchSize);
- if (!shouldSend) {
- final long sendAtMs = fPending.peek().timestamp + fMaxBatchAgeMs;
- shouldSend = sendAtMs <= nowMs;
- }
-
- // however, wait after an error
- shouldSend = shouldSend && nowMs >= fDontSendUntilMs;
- }
- return shouldSend;
- }
-
- /**
- *
- * @return
- */
- private synchronized boolean sendBatch() {
- // it's possible for this call to be made with an empty list. in this
- // case, just return.
- if (fPending.isEmpty()) {
- return true;
- }
-
- final long nowMs = Clock.now();
- final String url = CambriaPublisherUtility.makeUrl(fTopic);
-
- getLog().info("sending " + fPending.size() + " msgs to " + url + ". Oldest: "
- + (nowMs - fPending.peek().timestamp) + " ms");
-
- try {
-
- final ByteArrayOutputStream baseStream = new ByteArrayOutputStream();
- OutputStream os = baseStream;
- if (fCompress) {
- os = new GZIPOutputStream(baseStream);
- }
- for (TimestampedMessage m : fPending) {
- os.write(("" + m.fPartition.length()).getBytes());
- os.write('.');
- os.write(("" + m.fMsg.length()).getBytes());
- os.write('.');
- os.write(m.fPartition.getBytes());
- os.write(m.fMsg.getBytes());
- os.write('\n');
- }
- os.close();
-
- final long startMs = Clock.now();
-
- // code from REST Client Starts
-
-
-
-
- Client client = ClientBuilder.newClient();
- String metricTopicname = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"metrics.send.cambria.topic");
- if (null==metricTopicname) {
-
- metricTopicname="msgrtr.apinode.metrics.dmaap";
- }
- WebTarget target = client
- .target("http://localhost:" + CambriaConstants.kStdCambriaServicePort);
- target = target.path("/events/" + fTopic);
- getLog().info("url : " + target.getUri().toString());
- // API Key
-
- Entity<byte[]> data = Entity.entity(baseStream.toByteArray(), "application/cambria");
-
- Response response = target.request().post(data);
-
- getLog().info("Response received :: " + response.getStatus());
- getLog().info("Response received :: " + response.toString());
-
- // code from REST Client Ends
-
-
- fPending.clear();
- return true;
- } catch (IllegalArgumentException x) {
- getLog().warn(x.getMessage(), x);
- }
-
- catch (IOException x) {
- getLog().warn(x.getMessage(), x);
- }
- return false;
- }
-
- private final String fTopic;
- private final int fMaxBatchSize;
- private final long fMaxBatchAgeMs;
- private final boolean fCompress;
- private boolean fClosed;
-
- private final LinkedBlockingQueue<TimestampedMessage> fPending;
- private long fDontSendUntilMs;
- private final ScheduledThreadPoolExecutor fExec;
-
- private static final long sfWaitAfterError = 1000;
-
- /**
- *
- * @param hosts
- * @param topic
- * @param maxBatchSize
- * @param maxBatchAgeMs
- * @param compress
- */
- private DMaaPCambriaSimplerBatchPublisher(Collection<String> hosts, String topic, int maxBatchSize,
- long maxBatchAgeMs, boolean compress) throws MalformedURLException {
-
- super(hosts);
-
- if (topic == null || topic.length() < 1) {
- throw new IllegalArgumentException("A topic must be provided.");
- }
-
- fClosed = false;
- fTopic = topic;
- fMaxBatchSize = maxBatchSize;
- fMaxBatchAgeMs = maxBatchAgeMs;
- fCompress = compress;
-
- fPending = new LinkedBlockingQueue<TimestampedMessage>();
- fDontSendUntilMs = 0;
-
- fExec = new ScheduledThreadPoolExecutor(1);
- fExec.scheduleAtFixedRate(new Runnable() {
- @Override
- public void run() {
- send(false);
- }
- }, 100, 50, TimeUnit.MILLISECONDS);
- }
-
- /**
- *
- *
- * @author anowarul.islam
- *
- */
- private static class TimestampedMessage extends message {
- /**
- * to store timestamp value
- */
- public final long timestamp;
-
- /**
- * constructor initialize with message
- *
- * @param m
- *
- */
- public TimestampedMessage(message m) {
- super(m);
- timestamp = Clock.now();
- }
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java b/src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java
deleted file mode 100644
index 4565d3a..0000000
--- a/src/main/java/com/att/dmf/mr/resources/CambriaEventSet.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.resources;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.zip.GZIPInputStream;
-
-import javax.servlet.http.HttpServletResponse;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.Publisher.message;
-import com.att.dmf.mr.resources.streamReaders.CambriaJsonStreamReader;
-import com.att.dmf.mr.resources.streamReaders.CambriaRawStreamReader;
-import com.att.dmf.mr.resources.streamReaders.CambriaStreamReader;
-import com.att.dmf.mr.resources.streamReaders.CambriaTextStreamReader;
-import com.att.nsa.apiServer.streams.ChunkedInputStream;
-import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
-
-/**
- * An inbound event set.
- *
- * @author peter
- */
-public class CambriaEventSet {
- private final reader fReader;
-
- /**
- * constructor initialization
- *
- * @param mediaType
- * @param originalStream
- * @param chunked
- * @param defPartition
- * @throws CambriaApiException
- */
- public CambriaEventSet(String mediaType, InputStream originalStream,
- boolean chunked, String defPartition) throws CambriaApiException {
- InputStream is = originalStream;
- if (chunked) {
- is = new ChunkedInputStream(originalStream);
- }
-
- if (("application/json").equals(mediaType)) {
- if (chunked) {
- throw new CambriaApiException(
- HttpServletResponse.SC_BAD_REQUEST,
- "The JSON stream reader doesn't support chunking.");
- }
- fReader = new CambriaJsonStreamReader(is, defPartition);
- } else if (("application/cambria").equals(mediaType)) {
- fReader = new CambriaStreamReader(is);
- } else if (("application/cambria-zip").equals(mediaType)) {
- try {
- is = new GZIPInputStream(is);
- } catch (IOException e) {
- throw new CambriaApiException(HttpStatusCodes.k400_badRequest,
- "Couldn't read compressed format: " + e);
- }
- fReader = new CambriaStreamReader(is);
- } else if (("text/plain").equals(mediaType)) {
- fReader = new CambriaTextStreamReader(is, defPartition);
- } else {
- fReader = new CambriaRawStreamReader(is, defPartition);
- }
- }
-
- /**
- * Get the next message from this event set. Returns null when the end of
- * stream is reached. Will block until a message arrives (or the stream is
- * closed/broken).
- *
- * @return a message, or null
- * @throws IOException
- * @throws CambriaApiException
- */
- public message next() throws IOException, CambriaApiException {
- return fReader.next();
- }
-
- /**
- *
- * @author anowarul.islam
- *
- */
- public interface reader {
- /**
- *
- * @return
- * @throws IOException
- * @throws CambriaApiException
- */
- message next() throws IOException, CambriaApiException;
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java b/src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java
deleted file mode 100644
index aae15fb..0000000
--- a/src/main/java/com/att/dmf/mr/resources/CambriaOutboundEventStream.java
+++ /dev/null
@@ -1,554 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.resources;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Date;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.Consumer;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.metabroker.Topic;
-import com.att.dmf.mr.utils.DMaaPResponseBuilder.StreamWriter;
-import com.att.dmf.mr.utils.Utils;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/*import com.att.sa.highlandPark.config.HpConfigContext;
-import com.att.sa.highlandPark.config.HpReaderException;
-import com.att.sa.highlandPark.events.HpJsonEvent;
-import com.att.sa.highlandPark.events.HpJsonEventFactory;
-import com.att.sa.highlandPark.processor.HpAlarmFilter;
-import com.att.sa.highlandPark.processor.HpEvent;
-import com.att.sa.highlandPark.processor.HpProcessingEngine;
-import com.att.sa.highlandPark.processor.HpProcessingEngine.EventFactory;
-*/
-/**
- * class used to write the consumed messages
- *
- * @author anowarul.islam
- *
- */
-public class CambriaOutboundEventStream implements StreamWriter {
- private static final int kTopLimit = 1024 * 4;
-
- /**
- *
- * static innerclass it takes all the input parameter for kafka consumer
- * like limit, timeout, meta, pretty
- *
- * @author anowarul.islam
- *
- */
- public static class Builder {
-
- // Required
- private final Consumer fConsumer;
- // private final rrNvReadable fSettings; // used during write to tweak
- // format, decide to explicitly
- // close stream or not
-
- // Optional
- private int fLimit;
- private int fTimeoutMs;
- private String fTopicFilter;
- private boolean fPretty;
- private boolean fWithMeta;
- ArrayList<Consumer> fKafkaConsumerList;
-
-
- /**
- * constructor it initializes all the consumer parameters
- *
- * @param c
- * @param settings
- */
- public Builder(Consumer c) {
- this.fConsumer = c;
-
-
- fLimit = CambriaConstants.kNoTimeout;
- fTimeoutMs = CambriaConstants.kNoLimit;
- fTopicFilter = CambriaConstants.kNoFilter;
- fPretty = false;
- fWithMeta = false;
-
-
- }
-
- /**
- *
- * constructor initializes with limit
- *
- * @param l
- * only l no of messages will be consumed
- * @return
- */
- public Builder limit(int l) {
- this.fLimit = l;
- return this;
- }
-
- /**
- * constructor initializes with timeout
- *
- * @param t
- * if there is no message to consume, them DMaaP will wait
- * for t time
- * @return
- */
- public Builder timeout(int t) {
- this.fTimeoutMs = t;
- return this;
- }
-
- /**
- * constructor initializes with filter
- *
- * @param f
- * filter
- * @return
- */
- public Builder filter(String f) {
- this.fTopicFilter = f;
- return this;
- }
-
- /**
- * constructor initializes with boolean value pretty
- *
- * @param p
- * messages print in new line
- * @return
- */
- public Builder pretty(boolean p) {
- fPretty = p;
- return this;
- }
-
- /**
- * constructor initializes with boolean value meta
- *
- * @param withMeta,
- * along with messages offset will print
- * @return
- */
- public Builder withMeta(boolean withMeta) {
- fWithMeta = withMeta;
- return this;
- }
-
- // public Builder atOffset ( int pos )
-
-
- // return this;
- // }
- /**
- * method returs object of CambriaOutboundEventStream
- *
- * @return
- * @throws CambriaApiException
- */
- public CambriaOutboundEventStream build() throws CambriaApiException {
- return new CambriaOutboundEventStream(this);
- }
- }
-
- @SuppressWarnings("unchecked")
- /**
- *
- * @param builder
- * @throws CambriaApiException
- *
- */
- private CambriaOutboundEventStream(Builder builder) throws CambriaApiException {
- fConsumer = builder.fConsumer;
- fLimit = builder.fLimit;
- fTimeoutMs = builder.fTimeoutMs;
-
- fSent = 0;
- fPretty = builder.fPretty;
- fWithMeta = builder.fWithMeta;
- fKafkaConsumerList = builder.fKafkaConsumerList;
- /* if (CambriaConstants.kNoFilter.equals(builder.fTopicFilter)) {
- fHpAlarmFilter = null;
- fHppe = null;
- } else {
- try {
- final JSONObject filter = new JSONObject(new JSONTokener(builder.fTopicFilter));
- HpConfigContext<HpEvent> cc = new HpConfigContext<HpEvent>();
- fHpAlarmFilter = cc.create(HpAlarmFilter.class, filter);
- final EventFactory<HpJsonEvent> ef = new HpJsonEventFactory();
- fHppe = new HpProcessingEngine<HpJsonEvent>(ef);
- } catch (HpReaderException e) {
- // JSON was okay, but the filter engine says it's bogus
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
- "Couldn't create filter: " + e.getMessage());
- } catch (JSONException e) {
- // user sent a bogus JSON object
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
- "Couldn't parse JSON: " + e.getMessage());
- }
- }*/
- }
-
- /**
- *
- * interface provides onWait and onMessage methods
- *
- */
- public interface operation {
- /**
- * Call thread.sleep
- *
- * @throws IOException
- */
- void onWait() throws IOException;
-
- /**
- * provides the output based in the consumer paramter
- *
- * @param count
- * @param msg
- * @throws IOException
- */
-
- void onMessage(int count, String msg, String transId, long offSet) throws IOException, JSONException;
- }
-
- /**
- *
- * @return
- */
- public int getSentCount() {
- return fSent;
- }
-
- @Override
- /**
- *
- * @param os
- * throws IOException
- */
- public void write(final OutputStream os) throws IOException {
-
-
- // final boolean transactionEnabled = istransEnable;
- // synchronized(this){
- os.write('[');
- fSent = forEachMessage(new operation() {
- @Override
- public void onMessage(int count, String msg, String transId, long offSet)
- throws IOException, JSONException {
-
- if (count > 0) {
- os.write(',');
- }
- if (fWithMeta) {
- final JSONObject entry = new JSONObject();
- entry.put("offset", offSet);
- entry.put("message", msg);
- os.write(entry.toString().getBytes());
- } else {
-
- String jsonString = JSONObject.valueToString(msg);
- os.write(jsonString.getBytes());
- }
-
- if (fPretty) {
- os.write('\n');
- }
-
- String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap
- .getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic");
- if (null == metricTopicname)
- metricTopicname = "msgrtr.apinode.metrics.dmaap";
- if (!metricTopicname.equalsIgnoreCase(topic.getName())) {
- try {
- if (istransEnable && istransType) {
- // final String transactionId =
-
-
- StringBuilder consumerInfo = new StringBuilder();
- if (null != dmaapContext && null != dmaapContext.getRequest()) {
- final HttpServletRequest request = dmaapContext.getRequest();
- consumerInfo.append("consumerIp= \"" + request.getRemoteHost() + "\",");
- consumerInfo.append("consServerIp= \"" + request.getLocalAddr() + "\",");
- consumerInfo.append("consumerId= \"" + Utils.getUserApiKey(request) + "\",");
- consumerInfo.append("consumerGroup= \""
- + getConsumerGroupFromRequest(request.getRequestURI()) + "\",");
- consumerInfo.append("consumeTime= \"" + Utils.getFormattedDate(new Date()) + "\",");
- }
- log.info("Consumer [" + consumerInfo.toString() + "transactionId= \"" + transId
- + "\",messageLength= \"" + msg.length() + "\",topic= \"" + topic.getName() + "\"]");
- }
- } catch (Exception e) {
- }
- }
-
- }
-
- @Override
- /**
- *
- * It makes thread to wait
- *
- * @throws IOException
- */
- public void onWait() throws IOException {
- os.flush(); // likely totally unnecessary for a network socket
- try {
- // FIXME: would be good to wait/signal
- Thread.sleep(100);
- } catch (InterruptedException e) {
- // ignore
- }
- }
- });
-
-
- if (null != dmaapContext && istransEnable && istransType) {
-
- dmaapContext.getResponse().setHeader("transactionId",
- Utils.getResponseTransactionId(responseTransactionId));
- }
-
- os.write(']');
- os.flush();
-
- boolean close_out_stream = true;
- String strclose_out_stream = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "close.output.stream");
- if (null != strclose_out_stream)
- close_out_stream = Boolean.parseBoolean(strclose_out_stream);
-
-
- if (close_out_stream) {
- os.close();
-
- }
- }
-
- /**
- *
- * @param requestURI
- * @return
- */
- private String getConsumerGroupFromRequest(String requestURI) {
- if (null != requestURI && !requestURI.isEmpty()) {
-
- String consumerDetails = requestURI.substring(requestURI.indexOf("events/") + 7);
-
- int startIndex = consumerDetails.indexOf("/") + 1;
- int endIndex = consumerDetails.lastIndexOf("/");
- return consumerDetails.substring(startIndex, endIndex);
- }
- return null;
- }
-
- /**
- *
- * @param op
- * @return
- * @throws IOException
- * @throws JSONException
- */
- public int forEachMessage(operation op) throws IOException, JSONException {
- final int effectiveLimit = (fLimit == 0 ? kTopLimit : fLimit);
-
- int count = 0;
- boolean firstPing = true;
- // boolean isTransType=false;
- final long startMs = System.currentTimeMillis();
- final long timeoutMs = fTimeoutMs + startMs -500; //500 ms used in poll
-
- while (firstPing || (count == 0 && System.currentTimeMillis() < timeoutMs)) {
- if (!firstPing) {
- op.onWait();
- }
- firstPing = false;
-
-
- Consumer.Message msgRecord = null;
- while (count < effectiveLimit && (msgRecord =
- fConsumer.nextMessage()) != null) {
-
- String message = "";
- String transactionid = "";
- try {
- // String msgRecord = msg;
- JSONObject jsonMessage = new JSONObject(msgRecord);
- String[] keys = JSONObject.getNames(jsonMessage);
- boolean wrapheader1 = false;
- boolean wrapheader2 = false;
- boolean found_attr3 = false;
- String wrapElement1 = "message";
- String wrapElement2 = "msgWrapMR";
- String transIdElement = "transactionId";
- if (null != keys) {
- for (String key : keys) {
- if (key.equals(wrapElement1)) {
- wrapheader1 = true;
- } else if (key.equals(wrapElement2)) {
- wrapheader2 = true;
- } else if (key.equals(transIdElement)) {
- found_attr3 = true;
- transactionid = jsonMessage.getString(key);
- }
- }
- }
-
- // returns contents of attribute 1 if both attributes
- // present, otherwise
- // the whole msg
- if (wrapheader2 && found_attr3) {
- message = jsonMessage.getString(wrapElement2);
- } else if (wrapheader1 && found_attr3) {
- message = jsonMessage.getString(wrapElement1);
- } else {
- message = msgRecord.getMessage();
- }
- // jsonMessage = extractMessage(jsonMessage ,
- // "message","msgWrapMR","transactionId");
- istransType = true;
- } catch (JSONException e) { // This check is required for the
- // message sent by MR AAF flow but
- // consumed by UEB ACL flow which
- // wont expect transaction id in
- // cambria client api
- // Ignore
- log.info("JSON Exception logged when the message is non JSON Format");
- } catch (Exception exp) {
- log.info("****Some Exception occured for writing messages in topic" + topic.getName()
- + " Exception" + exp);
- }
- if (message == null || message.equals("")) {
- istransType = false;
- message = msgRecord.getMessage();
- }
-
- // If filters are enabled/set, message should be in JSON format
- // for filters to work for
- // otherwise filter will automatically ignore message in
- // non-json format.
- if (filterMatches(message)) {
- op.onMessage(count, message, transactionid, msgRecord.getOffset());
- count++;
-
- }
-
- }
- }
- return count;
- }
-
-
-
- /**
- *
- * Checks whether filter is initialized
- */
-
-
-
-
- /**
- *
- * @param msg
- * @return
- */
- private boolean filterMatches(String msg) {
- boolean result = true;
-
-
-
-
-
-
-
-
-
-
-
-
-
- return result;
- }
-
- public DMaaPContext getDmaapContext() {
- return dmaapContext;
- }
-
- public void setDmaapContext(DMaaPContext dmaapContext) {
- this.dmaapContext = dmaapContext;
- }
-
- public Topic getTopic() {
- return topic;
- }
-
- public void setTopic(Topic topic) {
- this.topic = topic;
- }
-
- public void setTopicStyle(boolean aaftopic) {
- this.isAAFTopic = aaftopic;
- }
-
- public void setTransEnabled(boolean transEnable) {
- this.istransEnable = transEnable;
- }
-
-
- private final Consumer fConsumer;
- private final int fLimit;
- private final int fTimeoutMs;
-
- private final boolean fPretty;
- private final boolean fWithMeta;
- private int fSent;
-
- //private final HpProcessingEngine<HpJsonEvent> fHppe;
- private DMaaPContext dmaapContext;
- private String responseTransactionId;
- private Topic topic;
- private boolean isAAFTopic = false;
- private boolean istransEnable = false;
- private ArrayList<Consumer> fKafkaConsumerList;
- private boolean istransType = true;
- // private static final Logger log =
-
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(CambriaOutboundEventStream.class);
-} \ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java
deleted file mode 100644
index 7a67c92..0000000
--- a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaJsonStreamReader.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.resources.streamReaders;
-
-import java.io.InputStream;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.Publisher.message;
-import com.att.dmf.mr.beans.LogDetails;
-import com.att.dmf.mr.resources.CambriaEventSet.reader;
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public class CambriaJsonStreamReader implements reader {
- private final JSONTokener fTokens;
- private final boolean fIsList;
- private long fCount;
- private final String fDefPart;
- public static final String kKeyField = "cambria.partition";
-
- /**
- *
- * @param is
- * @param defPart
- * @throws CambriaApiException
- */
- public CambriaJsonStreamReader(InputStream is, String defPart) throws CambriaApiException {
- try {
- fTokens = new JSONTokener(is);
- fCount = 0;
- fDefPart = defPart;
-
- final int c = fTokens.next();
- if (c == '[') {
- fIsList = true;
- } else if (c == '{') {
- fTokens.back();
- fIsList = false;
- } else {
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expecting an array or an object.");
- }
- } catch (JSONException e) {
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
- }
- }
-
- @Override
- public message next() throws CambriaApiException {
- try {
- if (!fTokens.more()) {
- return null;
- }
-
- final int c = fTokens.next();
-
-
- if (fIsList) {
- if (c == ']' || (fCount > 0 && c == 10))
- return null;
-
-
- if (fCount > 0 && c != ',' && c!= 10) {
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
- "Expected ',' or closing ']' after last object.");
- }
-
- if (fCount == 0 && c != '{' && c!= 10 && c!=32) {
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected { to start an object.");
- }
- } else if (fCount != 0 || c != '{') {
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected '{' to start an object.");
- }
-
- if (c == '{') {
- fTokens.back();
- }
- final JSONObject o = new JSONObject(fTokens);
- fCount++;
- return new msg(o);
- } catch (JSONException e) {
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
-
- }
- }
-
- private class msg implements message {
- private final String fKey;
- private String fMsg;
- private LogDetails logDetails;
- private boolean transactionEnabled;
-
- /**
- * constructor
- *
- * @param o
- */
-
-
-
- public msg(JSONObject o) {
- String key = o.optString(kKeyField, fDefPart);
- if (key == null) {
- key = "" + System.currentTimeMillis();
- }
- fKey = key;
-
- fMsg = o.toString().trim();
-
- }
-
- @Override
- public String getKey() {
- return fKey;
- }
-
- @Override
- public String getMessage() {
- return fMsg;
- }
-
- @Override
- public boolean isTransactionEnabled() {
- return transactionEnabled;
- }
-
- @Override
- public void setTransactionEnabled(boolean transactionEnabled) {
- this.transactionEnabled = transactionEnabled;
- }
-
- @Override
- public void setLogDetails(LogDetails logDetails) {
- this.logDetails = logDetails;
- }
-
- @Override
- public LogDetails getLogDetails() {
- return logDetails;
- }
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java
deleted file mode 100644
index f64c0de..0000000
--- a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaRawStreamReader.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.resources.streamReaders;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import javax.servlet.http.HttpServletResponse;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.Publisher.message;
-import com.att.dmf.mr.beans.LogDetails;
-import com.att.dmf.mr.resources.CambriaEventSet.reader;
-import com.att.nsa.util.StreamTools;
-
-/**
- *
- * This stream reader reads raw bytes creating a single message.
- * @author peter
- *
- */
-public class CambriaRawStreamReader implements reader
-{
- /**
- * This is the constructor of CambriaRawStreamReader, it will basically the read from Input stream
- * @param is
- * @param defPart
- * @throws CambriaApiException
- */
- public CambriaRawStreamReader ( InputStream is, String defPart ) throws CambriaApiException
- {
- fStream = is;
- fDefPart = defPart;
- fClosed = false;
- }
-
- @Override
- /**
- *
- * next() method reads the bytes and
- * iterates through the messages
- * @throws CambriaApiException
- *
- */
- public message next () throws CambriaApiException
- {
- if ( fClosed ) return null;
-
- try
- {
- final byte[] rawBytes = StreamTools.readBytes ( fStream );
- fClosed = true;
- return new message ()
- {
- private LogDetails logDetails;
- private boolean transactionEnabled;
-
- /**
- * returns boolean value which
- * indicates whether transaction is enabled
- */
- public boolean isTransactionEnabled() {
- return transactionEnabled;
- }
-
- /**
- * sets boolean value which
- * indicates whether transaction is enabled
- */
- public void setTransactionEnabled(boolean transactionEnabled) {
- this.transactionEnabled = transactionEnabled;
- }
-
- @Override
- /**
- * @returns key
- * It ch4ecks whether fDefPart value is Null.
- * If yes, it will return ystem.currentTimeMillis () else
- * it will return fDefPart variable value
- */
- public String getKey ()
- {
- return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart;
- }
-
- @Override
- /**
- * returns the message in String type object
- */
- public String getMessage ()
- {
- return new String ( rawBytes );
- }
-
- /**
- * set log details in logDetails variable
- */
- @Override
- public void setLogDetails(LogDetails logDetails) {
- this.logDetails = logDetails;
- }
-
- @Override
- /**
- * get the log details
- */
- public LogDetails getLogDetails() {
- return this.logDetails;
- }
- };
- }
- catch ( IOException e )
- {
- throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
- }
- }
-
- private final InputStream fStream;
- private final String fDefPart;
- private boolean fClosed;
-
-}
diff --git a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java
deleted file mode 100644
index 3dbf339..0000000
--- a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaStreamReader.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.resources.streamReaders;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import javax.servlet.http.HttpServletResponse;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.Publisher.message;
-import com.att.dmf.mr.beans.LogDetails;
-import com.att.dmf.mr.resources.CambriaEventSet.reader;
-
-/**
- * Read an optionally chunked stream in the Cambria app format. This format
- * allows for speedier server-side message parsing than pure JSON. It's looks
- * like:<br/>
- * <br/>
- * &lt;keyLength&gt;.&lt;msgLength&gt;.&lt;key&gt;&lt;message&gt;<br/>
- * <br/>
- * Whitespace before/after each entry is ignored, so messages can be delivered
- * with newlines between them, or not.
- *
- * @author peter
- *
- */
-public class CambriaStreamReader implements reader {
- /**
- * constructor initializing InputStream with fStream
- *
- * @param senderStream
- * @throws CambriaApiException
- */
- public CambriaStreamReader(InputStream senderStream) throws CambriaApiException {
- fStream = senderStream;
- }
-
- @Override
- /**
- * next method iterates through msg length
- * throws IOException
- * throws CambriaApiException
- *
- */
- public message next() throws IOException, CambriaApiException {
- final int keyLen = readLength();
- if (keyLen == -1)
- return null;
-
- final int msgLen = readLength();
- final String keyPart = readString(keyLen);
- final String msgPart = readString(msgLen);
-
- return new msg(keyPart, msgPart);
- }
-
- private static class msg implements message {
- /**
- * constructor initialization
- *
- * @param key
- * @param msg
- */
- public msg(String key, String msg) {
- // if no key, use the current time. This allows the message to be
- // delivered
- // in any order without forcing it into a single partition as empty
- // string would.
- if (key.length() < 1) {
- key = "" + System.currentTimeMillis();
- }
-
- fKey = key;
- fMsg = msg;
- }
-
- @Override
- /**
- * @returns fkey
- */
- public String getKey() {
- return fKey;
- }
-
- @Override
- /**
- * returns the message in String type object
- */
- public String getMessage() {
- return fMsg;
- }
-
- private final String fKey;
- private final String fMsg;
- private LogDetails logDetails;
- private boolean transactionEnabled;
-
- /**
- * returns boolean value which
- * indicates whether transaction is enabled
- */
- public boolean isTransactionEnabled() {
- return transactionEnabled;
- }
-
- /**
- * sets boolean value which
- * indicates whether transaction is enabled
- */
- public void setTransactionEnabled(boolean transactionEnabled) {
- this.transactionEnabled = transactionEnabled;
- }
-
- @Override
- /**
- * set log details in logDetails variable
- */
- public void setLogDetails(LogDetails logDetails) {
- this.logDetails = logDetails;
- }
-
- @Override
- /**
- * get the log details
- */
- public LogDetails getLogDetails() {
- return this.logDetails;
- }
-
- }
-
- private final InputStream fStream;
-
- /**
- * max cambria length indicates message length
-
- // This limit is here to prevent the server from spinning on a long string of numbers
- // that is delivered with 'application/cambria' as the format. The limit needs to be
- // large enough to support the max message length (currently 1MB, the default Kafka
- // limit)
- * */
-
- private static final int kMaxCambriaLength = 4*1000*1024;
-
-
- /**
- *
- * @return
- * @throws IOException
- * @throws CambriaApiException
- */
- private int readLength() throws IOException, CambriaApiException {
- // always ignore leading whitespace
- int c = fStream.read();
- while (Character.isWhitespace(c)) {
- c = fStream.read();
- }
-
- if (c == -1) {
- return -1;
- }
-
- int result = 0;
- while (Character.isDigit(c)) {
- result = (result * 10) + (c - '0');
- if (result > kMaxCambriaLength) {
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length.");
- }
- c = fStream.read();
- }
-
- if (c != '.') {
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST, "Expected . after length.");
- }
-
- return result;
- }
-
- /**
- *
- * @param len
- * @return
- * @throws IOException
- * @throws CambriaApiException
- */
- private String readString(int len) throws IOException, CambriaApiException {
- final byte[] buffer = new byte[len];
-
- final long startMs = System.currentTimeMillis();
- final long timeoutMs = startMs + 30000; // FIXME configurable
-
- int readTotal = 0;
- while (readTotal < len) {
- final int read = fStream.read(buffer, readTotal, len - readTotal);
- if (read == -1 || System.currentTimeMillis() > timeoutMs) {
- // EOF
- break;
- }
- readTotal += read;
- }
-
- if (readTotal < len) {
- throw new CambriaApiException(HttpServletResponse.SC_BAD_REQUEST,
- "End of stream while reading " + len + " bytes");
- }
-
- return new String(buffer);
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java b/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java
deleted file mode 100644
index b06e17a..0000000
--- a/src/main/java/com/att/dmf/mr/resources/streamReaders/CambriaTextStreamReader.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.resources.streamReaders;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-
-import javax.servlet.http.HttpServletResponse;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.Publisher.message;
-import com.att.dmf.mr.beans.LogDetails;
-import com.att.dmf.mr.resources.CambriaEventSet.reader;
-
-/**
- * This stream reader just pulls single lines. It uses the default partition if provided. If
- * not, the key is the current time, which does not guarantee ordering.
- *
- * @author peter
- *
- */
-public class CambriaTextStreamReader implements reader
-{
- /**
- * This is the constructor for Cambria Text Reader format
- * @param is
- * @param defPart
- * @throws CambriaApiException
- */
- public CambriaTextStreamReader ( InputStream is, String defPart ) throws CambriaApiException
- {
- fReader = new BufferedReader ( new InputStreamReader ( is ) );
- fDefPart = defPart;
- }
-
- @Override
- /**
- * next() method iterates through msg length
- * throws IOException
- * throws CambriaApiException
- *
- */
- public message next () throws CambriaApiException
- {
- try
- {
- final String line = fReader.readLine ();
- if ( line == null ) return null;
-
- return new message ()
- {
- private LogDetails logDetails;
- private boolean transactionEnabled;
-
- /**
- * returns boolean value which
- * indicates whether transaction is enabled
- * @return
- */
- public boolean isTransactionEnabled() {
- return transactionEnabled;
- }
-
- /**
- * sets boolean value which
- * indicates whether transaction is enabled
- */
- public void setTransactionEnabled(boolean transactionEnabled) {
- this.transactionEnabled = transactionEnabled;
- }
-
- @Override
- /**
- * @returns key
- * It ch4ecks whether fDefPart value is Null.
- * If yes, it will return ystem.currentTimeMillis () else
- * it will return fDefPart variable value
- */
- public String getKey ()
- {
- return fDefPart == null ? "" + System.currentTimeMillis () : fDefPart;
- }
-
- @Override
- /**
- * returns the message in String type object
- * @return
- */
- public String getMessage ()
- {
- return line;
- }
-
- @Override
- /**
- * set log details in logDetails variable
- */
- public void setLogDetails(LogDetails logDetails) {
- this.logDetails = logDetails;
- }
-
- @Override
- /**
- * get the log details
- */
- public LogDetails getLogDetails() {
- return this.logDetails;
- }
- };
- }
- catch ( IOException e )
- {
- throw new CambriaApiException ( HttpServletResponse.SC_BAD_REQUEST, e.getMessage () );
- }
- }
-
- private final BufferedReader fReader;
- private final String fDefPart;
-}
diff --git a/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java b/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java
deleted file mode 100644
index 59196d2..0000000
--- a/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticator.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.security;
-
-import javax.servlet.http.HttpServletRequest;
-
-import com.att.dmf.mr.CambriaApiException;
-
-
-
-
-/**
- *
- * @author sneha.d.desai
- *
- */
-public interface DMaaPAAFAuthenticator {
- boolean aafAuthentication( HttpServletRequest req , String role);
- String aafPermissionString(String permission, String action) throws CambriaApiException;
-}
diff --git a/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java b/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java
deleted file mode 100644
index e4e24cd..0000000
--- a/src/main/java/com/att/dmf/mr/security/DMaaPAAFAuthenticatorImpl.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.security;
-
-import javax.servlet.http.HttpServletRequest;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.constants.CambriaConstants;
-
-
-/**
- *
- * @author sneha.d.desai
- *
- */
-public class DMaaPAAFAuthenticatorImpl implements DMaaPAAFAuthenticator {
-
- /**
- * @param req
- * @param role
- */
- @Override
- public boolean aafAuthentication(HttpServletRequest req, String role) {
- boolean auth = false;
- if(req.isUserInRole(role))
- {
-
- auth = true;
- }
-
- return auth;
- }
-
- @Override
- public String aafPermissionString(String topicName, String action) throws CambriaApiException {
-
-
- String permission = "";
- String nameSpace ="";
- if(topicName.contains(".") && topicName.contains("org.onap")) {
-
- nameSpace = topicName.substring(0,topicName.lastIndexOf("."));
- }
- else {
- nameSpace = null;
- nameSpace= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"defaultNSforUEB");
-
- if(null==nameSpace)nameSpace="org.onap.dmaap.mr";
-
-
-
- }
-
- permission = nameSpace+".topic|:topic."+topicName+"|"+action;
- return permission;
-
- }
-
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java b/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java
deleted file mode 100644
index 848d4cc..0000000
--- a/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticator.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.security;
-
-import javax.servlet.http.HttpServletRequest;
-
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.nsa.security.NsaApiKey;
-
-
-/**
- * An interface for authenticating an inbound request.
- * @author nilanjana.maity
- *
- * @param <K> NsaApiKey
- */
-public interface DMaaPAuthenticator<K extends NsaApiKey> {
-
- /**
- * Qualify a request as possibly using the authentication method that this class implements.
- * @param req
- * @return true if the request might be authenticated by this class
- */
- boolean qualify ( HttpServletRequest req );
-
- /**
- * Check for a request being authentic. If it is, return the API key. If not, return null.
- * @param req An inbound web request
- * @return the API key for an authentic request, or null
- */
- K isAuthentic ( HttpServletRequest req );
- /**
- * Check for a ctx being authenticate. If it is, return the API key. If not, return null.
- * @param ctx
- * @return the API key for an authentication request, or null
- */
- K authenticate ( DMaaPContext ctx );
-
-
- void addAuthenticator(DMaaPAuthenticator<K> a);
-
-}
diff --git a/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java b/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java
deleted file mode 100644
index 0ae0839..0000000
--- a/src/main/java/com/att/dmf/mr/security/DMaaPAuthenticatorImpl.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.security;
-
-import java.util.LinkedList;
-
-import javax.servlet.http.HttpServletRequest;
-
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.db.NsaApiDb;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-
-/**
- *
- * @author anowarul.islam
- *
- * @param <K>
- */
-public class DMaaPAuthenticatorImpl<K extends NsaApiKey> implements DMaaPAuthenticator<K> {
-
- private final LinkedList<DMaaPAuthenticator<K>> fAuthenticators;
-
-
-
- // Setting timeout to a large value for testing purpose.
-
- // 10 minutes
- private static final long kDefaultRequestTimeWindow = 1000 * 60 * 10 * 10 * 10 * 10 * 10;
-
- /**
- * Construct the security manager against an API key database
- *
- * @param db
- * the API key db
- */
- public DMaaPAuthenticatorImpl(NsaApiDb<K> db) {
- this(db, kDefaultRequestTimeWindow);
- }
-
-
-
-
- /**
- * Construct the security manager against an API key database with a
- * specific request time window size
- *
- * @param db
- * the API key db
- * @param authTimeWindowMs
- * the size of the time window for request authentication
- */
- public DMaaPAuthenticatorImpl(NsaApiDb<K> db, long authTimeWindowMs) {
- fAuthenticators = new LinkedList<>();
-
- fAuthenticators.add(new DMaaPOriginalUebAuthenticator<K>(db, authTimeWindowMs));
- }
-
- /**
- * Authenticate a user's request. This method returns the API key if the
- * user is authentic, null otherwise.
- *
- * @param ctx
- * @return an api key record, or null
- */
- public K authenticate(DMaaPContext ctx) {
- final HttpServletRequest req = ctx.getRequest();
- for (DMaaPAuthenticator<K> a : fAuthenticators) {
- if (a.qualify(req)) {
- final K k = a.isAuthentic(req);
- if (k != null)
- return k;
- }
- // else: this request doesn't look right to the authenticator
- }
- return null;
- }
-
- /**
- * Get the user associated with the incoming request, or null if the user is
- * not authenticated.
- *
- * @param ctx
- * @return
- */
- public static NsaSimpleApiKey getAuthenticatedUser(DMaaPContext ctx) {
- final DMaaPAuthenticator<NsaSimpleApiKey> m = ctx.getConfigReader().getfSecurityManager();
- return m.authenticate(ctx);
- }
-
- /**
- * method by default returning false
- * @param req
- * @return false
- */
- public boolean qualify(HttpServletRequest req) {
- return false;
- }
-/**
- * method by default returning null
- * @param req
- * @return null
- */
- public K isAuthentic(HttpServletRequest req) {
- return null;
- }
-
- public void addAuthenticator ( DMaaPAuthenticator<K> a )
- {
- this.fAuthenticators.add(a);
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java b/src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java
deleted file mode 100644
index 64dbc14..0000000
--- a/src/main/java/com/att/dmf/mr/security/impl/DMaaPMechIdAuthenticator.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.security.impl;
-
-import javax.servlet.http.HttpServletRequest;
-
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.security.DMaaPAuthenticator;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.authenticators.MechIdAuthenticator;
-
-/**
- * An authenticator for AT&T MechIds.
- *
- * @author peter
- *
- * @param <K>
- */
-public class DMaaPMechIdAuthenticator <K extends NsaApiKey> implements DMaaPAuthenticator<K> {
-
-/**
- * This is not yet implemented. by refault its returing false
- * @param req HttpServletRequest
- * @return false
- */
- public boolean qualify (HttpServletRequest req) {
- // we haven't implemented anything here yet, so there's no qualifying request
- return false;
- }
-/**
- * This metod authenticate the mech id
- * @param req
- * @return APIkey or null
- */
- public K isAuthentic (HttpServletRequest req) {
- final String remoteAddr = req.getRemoteAddr();
- authLog ( "MechId auth is not yet implemented.", remoteAddr );
- return null;
- }
-
- private static void authLog ( String msg, String remoteAddr )
- {
- log.info ( "AUTH-LOG(" + remoteAddr + "): " + msg );
- }
-
-
- //private static final Logger log = Logger.getLogger( MechIdAuthenticator.class.toString());
- private static final EELFLogger log = EELFManager.getInstance().getLogger(MechIdAuthenticator.class);
-/**
- * Curently its not yet implemented returning null
- * @param ctx DMaaP context
- * @return APIkey or null
- */
- @Override
- public K authenticate(DMaaPContext ctx) {
- // TODO Auto-generated method stub
- return null;
- }
-@Override
-public void addAuthenticator(DMaaPAuthenticator<K> a) {
- // TODO Auto-generated method stub
-
-}
-
-} \ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java b/src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java
deleted file mode 100644
index b1e28e7..0000000
--- a/src/main/java/com/att/dmf/mr/security/impl/DMaaPOriginalUebAuthenticator.java
+++ /dev/null
@@ -1,293 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.security.impl;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import javax.servlet.http.HttpServletRequest;
-
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.security.DMaaPAuthenticator;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.till.data.sha1HmacSigner;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.db.NsaApiDb;
-
-/**
- * This authenticator handles an AWS-like authentication, originally used by the
- * Cambria server (the API server for UEB).
- *
- * @author peter
- *
- * @param <K>
- */
-public class DMaaPOriginalUebAuthenticator<K extends NsaApiKey> implements DMaaPAuthenticator<K> {
- /**
- * constructor initialization
- *
- * @param db
- * @param requestTimeWindowMs
- */
- public DMaaPOriginalUebAuthenticator(NsaApiDb<K> db, long requestTimeWindowMs) {
- fDb = db;
- fRequestTimeWindowMs = requestTimeWindowMs;
-
-
-
-
- }
-
- @Override
- public boolean qualify(HttpServletRequest req) {
- // accept anything that comes in with X-(Cambria)Auth in the header
- final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" });
- return xAuth != null;
- }
-
- /**
- * method for authentication
- *
- * @param req
- * @return
- */
- public K isAuthentic(HttpServletRequest req) {
- final String remoteAddr = req.getRemoteAddr();
- // Cambria originally used "Cambria..." headers, but as the API key
- // system is now more
- // general, we take either form.
- final String xAuth = getFirstHeader(req, new String[] { "X-CambriaAuth", "X-Auth" });
- final String xDate = getFirstHeader(req, new String[] { "X-CambriaDate", "X-Date" });
-
- final String httpDate = req.getHeader("Date");
-
- final String xNonce = getFirstHeader(req, new String[] { "X-Nonce" });
- return authenticate(remoteAddr, xAuth, xDate, httpDate, xNonce);
- }
-
- /**
- * Authenticate a user's request. This method returns the API key if the
- * user is authentic, null otherwise.
- *
- * @param remoteAddr
- * @param xAuth
- * @param xDate
- * @param httpDate
- * @param nonce
- * @return an api key record, or null
- */
- public K authenticate(String remoteAddr, String xAuth, String xDate, String httpDate, String nonce) {
- if (xAuth == null) {
- authLog("No X-Auth header on request", remoteAddr);
- return null;
- }
-
- final String[] xAuthParts = xAuth.split(":");
- if (xAuthParts.length != 2) {
- authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr);
- return null;
- }
-
-
- // get the api key and signature
- final String clientApiKey = xAuthParts[0];
- final String clientApiHash = xAuthParts[1];
- if (clientApiKey.length() == 0 || clientApiHash.length() == 0) {
- authLog("Bad X-Auth header format (" + xAuth + ")", remoteAddr);
- return null;
- }
- // if the user provided X-Date, use that. Otherwise, go for Date
- final String dateString = xDate != null ? xDate : httpDate;
- final Date clientDate = getClientDate(dateString);
- if (clientDate == null) {
- authLog("Couldn't parse client date '" + dateString + "'. Preferring X-Date over Date.", remoteAddr);
- return null;
- }
- // check the time range
- final long nowMs = System.currentTimeMillis();
- final long diffMs = Math.abs(nowMs - clientDate.getTime());
- if (diffMs > fRequestTimeWindowMs) {
- authLog("Client date is not in acceptable range of server date. Client:" + clientDate.getTime()
- + ", Server: " + nowMs + ", Threshold: " + fRequestTimeWindowMs + ".", remoteAddr);
- return null;
- }
- K apiRecord;
- try {
- apiRecord = fDb.loadApiKey(clientApiKey);
- if (apiRecord == null) {
- authLog("No such API key " + clientApiKey, remoteAddr);
- return null;
- }
- } catch (ConfigDbException e) {
- authLog("Couldn't load API key " + clientApiKey + ": " + e.getMessage(), remoteAddr);
- return null;
- }
- // make the signed content
- final StringBuilder sb = new StringBuilder();
- sb.append(dateString);
- if (nonce != null) {
- sb.append(":");
- sb.append(nonce);
- }
- final String signedContent = sb.toString();
- // now check the signed date string
- final String serverCalculatedSignature = sha1HmacSigner.sign(signedContent, apiRecord.getSecret());
- if (serverCalculatedSignature == null || !serverCalculatedSignature.equals(clientApiHash)) {
- authLog("Signatures don't match. Rec'd " + clientApiHash + ", expect " + serverCalculatedSignature + ".",
- remoteAddr);
- return null;
- }
- authLog("authenticated " + apiRecord.getKey(), remoteAddr);
- return apiRecord;
- }
-
- /**
- * Get the first value of the first existing header from the headers list
- *
- * @param req
- * @param headers
- * @return a header value, or null if none exist
- */
- private static String getFirstHeader(HttpServletRequest req, String[] headers) {
- for (String header : headers) {
- final String result = req.getHeader(header);
- if (result != null)
- return result;
- }
- return null;
- }
-
- /**
- * Parse the date string into a Date using one of the supported date
- * formats.
- *
- * @param dateHeader
- * @return a date, or null
- */
- private static Date getClientDate(String dateString) {
- if (dateString == null) {
- return null;
- }
-
- // parse the date
- Date result = null;
- for (String dateFormat : kDateFormats) {
- final SimpleDateFormat parser = new SimpleDateFormat(dateFormat, java.util.Locale.US);
- if (!dateFormat.contains("z") && !dateFormat.contains("Z")) {
- parser.setTimeZone(TIMEZONE_GMT);
- }
-
- try {
- result = parser.parse(dateString);
- break;
- } catch (ParseException e) {
- // presumably wrong format
- }
- }
- return result;
- }
-
- private static void authLog(String msg, String remoteAddr) {
- log.info("AUTH-LOG(" + remoteAddr + "): " + msg);
- }
-
- private final NsaApiDb<K> fDb;
- private final long fRequestTimeWindowMs;
-
- private static final java.util.TimeZone TIMEZONE_GMT = java.util.TimeZone.getTimeZone("GMT");
-
- private static final String kDateFormats[] =
- {
- // W3C date format (RFC 3339).
- "yyyy-MM-dd'T'HH:mm:ssz",
- "yyyy-MM-dd'T'HH:mm:ssXXX", // as of Java 7, reqd to handle colon in TZ offset
-
- // Preferred HTTP date format (RFC 1123).
- "EEE, dd MMM yyyy HH:mm:ss zzz",
-
- // simple unix command line 'date' format
- "EEE MMM dd HH:mm:ss z yyyy",
-
- // Common date format (RFC 822).
- "EEE, dd MMM yy HH:mm:ss z",
- "EEE, dd MMM yy HH:mm z",
- "dd MMM yy HH:mm:ss z",
- "dd MMM yy HH:mm z",
-
- // Obsoleted HTTP date format (ANSI C asctime() format).
- "EEE MMM dd HH:mm:ss yyyy",
-
- // Obsoleted HTTP date format (RFC 1036).
- "EEEE, dd-MMM-yy HH:mm:ss zzz",
- };
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- // logger declaration
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPOriginalUebAuthenticator.class);
- @Override
-
- // TODO Auto-generated method stub
-
- //}
-
- public K authenticate(DMaaPContext ctx) {
-
-
-
-
-
-
-
-
-
-
- return null;
- }
-
-
- public void addAuthenticator ( DMaaPAuthenticator<K> a )
- {
-
- }
-
-} \ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/service/AdminService.java b/src/main/java/com/att/dmf/mr/service/AdminService.java
deleted file mode 100644
index aaf7c0b..0000000
--- a/src/main/java/com/att/dmf/mr/service/AdminService.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service;
-
-import java.io.IOException;
-
-import org.json.JSONException;
-
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * @author muzainulhaque.qazi
- *
- */
-public interface AdminService {
- /**
- * method provide consumerCache
- *
- * @param dMaaPContext
- * @throws IOException
- */
- void showConsumerCache(DMaaPContext dMaaPContext) throws IOException,AccessDeniedException;
-
- /**
- * method drops consumer cache
- *
- * @param dMaaPContext
- * @throws JSONException
- * @throws IOException
- */
- void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException,AccessDeniedException;
-
-
- /**
- * Get list of blacklisted ips
- * @param dMaaPContext context
- * @throws IOException ex
- * @throws AccessDeniedException ex
- */
- void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException;
-
- /**
- * Add ip to blacklist
- * @param dMaaPContext context
- * @param ip ip
- * @throws IOException ex
- * @throws ConfigDbException ex
- * @throws AccessDeniedException ex
- */
- void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException;
-
- /**
- * Remove ip from blacklist
- * @param dMaaPContext context
- * @param ip ip
- * @throws IOException ex
- * @throws ConfigDbException ex
- * @throws AccessDeniedException ex
- */
- void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException;
-
-}
diff --git a/src/main/java/com/att/dmf/mr/service/ApiKeysService.java b/src/main/java/com/att/dmf/mr/service/ApiKeysService.java
deleted file mode 100644
index 57fc8be..0000000
--- a/src/main/java/com/att/dmf/mr/service/ApiKeysService.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service;
-
-import java.io.IOException;
-
-import com.att.dmf.mr.beans.ApiKeyBean;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
-
-/**
- * Declaring all the method in interface that is mainly used for authentication
- * purpose.
- *
- *
- */
-
-public interface ApiKeysService {
- /**
- * This method declaration for getting all ApiKey that has generated on
- * server.
- *
- * @param dmaapContext
- * @throws ConfigDbException
- * @throws IOException
- */
-
- public void getAllApiKeys(DMaaPContext dmaapContext)
- throws ConfigDbException, IOException;
-
- /**
- * Getting information about specific ApiKey
- *
- * @param dmaapContext
- * @param apikey
- * @throws ConfigDbException
- * @throws IOException
- */
-
- public void getApiKey(DMaaPContext dmaapContext, String apikey)
- throws ConfigDbException, IOException;
-
- /**
- * Thid method is used for create a particular ApiKey
- *
- * @param dmaapContext
- * @param nsaApiKey
- * @throws KeyExistsException
- * @throws ConfigDbException
- * @throws IOException
- */
-
- public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey)
- throws KeyExistsException, ConfigDbException, IOException;
-
- /**
- * This method is used for update ApiKey that is already generated on
- * server.
- *
- * @param dmaapContext
- * @param apikey
- * @param nsaApiKey
- * @throws ConfigDbException
- * @throws IOException
- * @throws AccessDeniedException
- * @throws com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException
- */
- public void updateApiKey(DMaaPContext dmaapContext, String apikey,
- ApiKeyBean nsaApiKey) throws ConfigDbException, IOException,AccessDeniedException
- ;
-
- /**
- * This method is used for delete specific ApiKey
- *
- * @param dmaapContext
- * @param apikey
- * @throws ConfigDbException
- * @throws IOException
- * @throws AccessDeniedException
- */
-
- public void deleteApiKey(DMaaPContext dmaapContext, String apikey)
- throws ConfigDbException, IOException,AccessDeniedException;
-}
diff --git a/src/main/java/com/att/dmf/mr/service/EventsService.java b/src/main/java/com/att/dmf/mr/service/EventsService.java
deleted file mode 100644
index 2f89bd2..0000000
--- a/src/main/java/com/att/dmf/mr/service/EventsService.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public interface EventsService {
- /**
- *
- * @param ctx
- * @param topic
- * @param consumerGroup
- * @param clientId
- * @throws ConfigDbException
- * @throws TopicExistsException
- * @throws AccessDeniedException
- * @throws UnavailableException
- * @throws CambriaApiException
- * @throws IOException
- */
- public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
- throws ConfigDbException, TopicExistsException,UnavailableException,
- CambriaApiException, IOException,AccessDeniedException;
-
- /**
- *
- * @param ctx
- * @param topic
- * @param msg
- * @param defaultPartition
- * @param requestTime
- * @throws ConfigDbException
- * @throws AccessDeniedException
- * @throws TopicExistsException
- * @throws CambriaApiException
- * @throws IOException
- */
- public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
- final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
- CambriaApiException, IOException,missingReqdSetting;
-
-}
diff --git a/src/main/java/com/att/dmf/mr/service/MMService.java b/src/main/java/com/att/dmf/mr/service/MMService.java
deleted file mode 100644
index ae01bbf..0000000
--- a/src/main/java/com/att/dmf/mr/service/MMService.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * Contains the logic for executing calls to the Mirror Maker agent tool.
- *
- * @author <a href="mailto:kawsar.jahan@att.com">Kawsar Jahan</a>
- *
- * @since May 25, 2016
- */
-
-public interface MMService {
-
- /*
- * this method calls the add white list method of a Mirror Maker agent API
- */
- public void addWhiteList();
-
- /*
- * this method calls the remove white list method of a Mirror Maker agent API
- */
- public void removeWhiteList();
-
- /*
- * This method calls the list white list method of a Mirror Maker agent API
- */
- public void listWhiteList();
-
- public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId) throws ConfigDbException, TopicExistsException,
- AccessDeniedException, UnavailableException, CambriaApiException, IOException;
-
- public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
- final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
- CambriaApiException, IOException, missingReqdSetting;
-}
diff --git a/src/main/java/com/att/dmf/mr/service/MetricsService.java b/src/main/java/com/att/dmf/mr/service/MetricsService.java
deleted file mode 100644
index b6cc60d..0000000
--- a/src/main/java/com/att/dmf/mr/service/MetricsService.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service;
-
-/**
- * @author amol.ramesh.dalne
- *
- */
-import java.io.IOException;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.beans.DMaaPContext;
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public interface MetricsService {
- /**
- *
- * @param ctx
- * @throws IOException
- */
- public void get(DMaaPContext ctx) throws IOException;
-
- /**
- *
- * @param ctx
- * @param name
- * @throws IOException
- * @throws CambriaApiException
- */
- public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException;
-}
diff --git a/src/main/java/com/att/dmf/mr/service/TopicService.java b/src/main/java/com/att/dmf/mr/service/TopicService.java
deleted file mode 100644
index b42d9c9..0000000
--- a/src/main/java/com/att/dmf/mr/service/TopicService.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service;
-
-import java.io.IOException;
-
-import org.json.JSONException;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.beans.TopicBean;
-import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * interface provide all the topic related operations
- *
- * @author anowarul.islam
- *
- */
-public interface TopicService {
- /**
- * method fetch details of all the topics
- *
- * @param dmaapContext
- * @throws JSONException
- * @throws ConfigDbException
- * @throws IOException
- */
- void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException;
- void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException;
-
- /**
- * method fetch details of specific topic
- *
- * @param dmaapContext
- * @param topicName
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- */
- void getTopic(DMaaPContext dmaapContext, String topicName)
- throws ConfigDbException, IOException, TopicExistsException;
-
- /**
- * method used to create the topic
- *
- * @param dmaapContext
- * @param topicBean
- * @throws CambriaApiException
- * @throws TopicExistsException
- * @throws IOException
- * @throws AccessDeniedException
- * @throws JSONException
- */
-
- void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
- throws CambriaApiException, TopicExistsException, IOException, AccessDeniedException;
-
- /**
- * method used to delete to topic
- *
- * @param dmaapContext
- * @param topicName
- * @throws IOException
- * @throws AccessDeniedException
- * @throws ConfigDbException
- * @throws CambriaApiException
- * @throws TopicExistsException
- */
-
- void deleteTopic(DMaaPContext dmaapContext, String topicName)
- throws IOException, AccessDeniedException, ConfigDbException, CambriaApiException, TopicExistsException;
-
- /**
- * method provides list of all the publishers associated with a topic
- *
- * @param dmaapContext
- * @param topicName
- * @throws IOException
- * @throws ConfigDbException
- * @throws TopicExistsException
- */
- void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName)
- throws IOException, ConfigDbException, TopicExistsException;
-
- /**
- * method provides details of all the consumer associated with a specific
- * topic
- *
- * @param dmaapContext
- * @param topicName
- * @throws IOException
- * @throws ConfigDbException
- * @throws TopicExistsException
- */
- void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName)
- throws IOException, ConfigDbException, TopicExistsException;
-
- /**
- * method provides publishing right to a specific topic
- *
- * @param dmaapContext
- * @param topicName
- * @param producerId
- * @throws AccessDeniedException
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- */
- void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
-
- /**
- * method denies any specific publisher from a topic
- *
- * @param dmaapContext
- * @param topicName
- * @param producerId
- * @throws AccessDeniedException
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- */
- void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
-
- /**
- * method provide consuming right to a specific user on a topic
- *
- * @param dmaapContext
- * @param topicName
- * @param consumerId
- * @throws AccessDeniedException
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- */
- void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
-
- /**
- * method denies a particular user's consuming right on a topic
- *
- * @param dmaapContext
- * @param topicName
- * @param consumerId
- * @throws AccessDeniedException
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- */
- void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,CambriaApiException;
-
-}
diff --git a/src/main/java/com/att/dmf/mr/service/TransactionService.java b/src/main/java/com/att/dmf/mr/service/TransactionService.java
deleted file mode 100644
index f2763a6..0000000
--- a/src/main/java/com/att/dmf/mr/service/TransactionService.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service;
-
-import java.io.IOException;
-
-import com.att.aft.dme2.internal.jettison.json.JSONException;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.transaction.TransactionObj;
-import com.att.nsa.configs.ConfigDbException;
-
-/**
- *
- * @author anowarul.islam
- *
- */
-public interface TransactionService {
- /**
- *
- * @param trnObj
- */
- void checkTransaction(TransactionObj trnObj);
-
- /**
- *
- * @param dmaapContext
- * @throws ConfigDbException
- * @throws IOException
- */
- void getAllTransactionObjs(DMaaPContext dmaapContext) throws ConfigDbException, IOException;
-
- /**
- *
- * @param dmaapContext
- * @param transactionId
- * @throws ConfigDbException
- * @throws JSONException
- * @throws IOException
- */
- void getTransactionObj(DMaaPContext dmaapContext, String transactionId)
- throws ConfigDbException, JSONException, IOException;
-}
diff --git a/src/main/java/com/att/dmf/mr/service/UIService.java b/src/main/java/com/att/dmf/mr/service/UIService.java
deleted file mode 100644
index 1155a2a..0000000
--- a/src/main/java/com/att/dmf/mr/service/UIService.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- *
- */
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service;
-
-import java.io.IOException;
-
-import org.apache.kafka.common.errors.TopicExistsException;
-import org.json.JSONException;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.nsa.configs.ConfigDbException;
-/**
- * @author muzainulhaque.qazi
- *
- */
-public interface UIService {
- /**
- * Returning template of hello page.
- *
- * @param dmaapContext
- * @throws IOException
- */
- void hello(DMaaPContext dmaapContext) throws IOException;
-
- /**
- * Fetching list of all api keys and returning in a templated form for
- * display
- *
- * @param dmaapContext
- * @throws ConfigDbException
- * @throws IOException
- */
- void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException,
- IOException;
-
- /**
- * Fetching detials of apikey in a templated form for display
- *
- * @param dmaapContext
- * @param apiKey
- * @throws Exception
- */
- void getApiKey(DMaaPContext dmaapContext, final String apiKey)
- throws CambriaApiException, ConfigDbException, JSONException, IOException;
-
- /**
- * Fetching list of all the topics and returning in a templated form for
- * display
- *
- * @param dmaapContext
- * @throws ConfigDbException
- * @throws IOException
- */
- void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException,
- IOException;
-
- /**
- * Fetching detials of topic in a templated form for display
- *
- * @param dmaapContext
- * @param topic
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- */
- void getTopic(DMaaPContext dmaapContext, final String topic)
- throws ConfigDbException, IOException, TopicExistsException;
-
-}
diff --git a/src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java
deleted file mode 100644
index f7c48de..0000000
--- a/src/main/java/com/att/dmf/mr/service/impl/AdminServiceImpl.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service.impl;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Set;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.springframework.stereotype.Component;
-
-import com.att.dmf.mr.backends.Consumer;
-import com.att.dmf.mr.backends.ConsumerFactory;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
-import com.att.dmf.mr.service.AdminService;
-import com.att.dmf.mr.utils.DMaaPResponseBuilder;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.limits.Blacklist;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-
-/**
- * @author muzainulhaque.qazi
- *
- */
-@Component
-public class AdminServiceImpl implements AdminService {
-
- //private Logger log = Logger.getLogger(AdminServiceImpl.class.toString());
- private static final EELFLogger log = EELFManager.getInstance().getLogger(AdminServiceImpl.class);
- /**
- * getConsumerCache returns consumer cache
- * @param dMaaPContext context
- * @throws IOException ex
- * @throws AccessDeniedException
- */
- @Override
- public void showConsumerCache(DMaaPContext dMaaPContext) throws IOException, AccessDeniedException {
- adminAuthenticate(dMaaPContext);
-
- JSONObject consumers = new JSONObject();
- JSONArray jsonConsumersList = new JSONArray();
-
- for (Consumer consumer : getConsumerFactory(dMaaPContext).getConsumers()) {
- JSONObject consumerObject = new JSONObject();
- consumerObject.put("name", consumer.getName());
- consumerObject.put("created", consumer.getCreateTimeMs());
- consumerObject.put("accessed", consumer.getLastAccessMs());
- jsonConsumersList.put(consumerObject);
- }
-
- consumers.put("consumers", jsonConsumersList);
- log.info("========== AdminServiceImpl: getConsumerCache: " + jsonConsumersList.toString() + "===========");
- DMaaPResponseBuilder.respondOk(dMaaPContext, consumers);
- }
-
- /**
- *
- * dropConsumerCache() method clears consumer cache
- * @param dMaaPContext context
- * @throws JSONException ex
- * @throws IOException ex
- * @throws AccessDeniedException
- *
- */
- @Override
- public void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException, AccessDeniedException {
- adminAuthenticate(dMaaPContext);
- getConsumerFactory(dMaaPContext).dropCache();
- DMaaPResponseBuilder.respondOkWithHtml(dMaaPContext, "Consumer cache cleared successfully");
- // log.info("========== AdminServiceImpl: dropConsumerCache: Consumer
- // Cache successfully dropped.===========");
- }
-
- /**
- * getfConsumerFactory returns CosnumerFactory details
- * @param dMaaPContext contxt
- * @return ConsumerFactory obj
- *
- */
- private ConsumerFactory getConsumerFactory(DMaaPContext dMaaPContext) {
- return dMaaPContext.getConfigReader().getfConsumerFactory();
- }
-
- /**
- * return ipblacklist
- * @param dMaaPContext context
- * @return blacklist obj
- */
- private static Blacklist getIpBlacklist(DMaaPContext dMaaPContext) {
- return dMaaPContext.getConfigReader().getfIpBlackList();
- }
-
-
- /**
- * Get list of blacklisted ips
- */
- @Override
- public void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException
- {
- adminAuthenticate ( dMaaPContext );
-
- DMaaPResponseBuilder.respondOk ( dMaaPContext,
- new JSONObject().put ( "blacklist",
- setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) );
- }
-
- public static JSONArray setToJsonArray ( Set<?> fields )
- {
- return collectionToJsonArray ( fields );
- }
-
- public static JSONArray collectionToJsonArray ( Collection<?> fields )
- {
- final JSONArray a = new JSONArray ();
- if ( fields != null )
- {
- for ( Object o : fields )
- {
- a.put ( o );
- }
- }
- return a;
- }
-
- /**
- * Add ip to blacklist
- */
- @Override
- public void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException
- {
- adminAuthenticate ( dMaaPContext );
-
- getIpBlacklist (dMaaPContext).add ( ip );
- DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext );
- }
-
- /**
- * Remove ip from blacklist
- */
- @Override
- public void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException
- {
- adminAuthenticate ( dMaaPContext );
-
- getIpBlacklist (dMaaPContext).remove ( ip );
- DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext );
- }
-
- /**
- * Authenticate if user is admin
- * @param dMaaPContext context
- * @throws AccessDeniedException ex
- */
- private static void adminAuthenticate ( DMaaPContext dMaaPContext ) throws AccessDeniedException
- {
-
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dMaaPContext);
- if ( user == null || !user.getKey ().equals ( "admin" ) )
- {
- throw new AccessDeniedException ();
- }
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java
deleted file mode 100644
index b0e8a86..0000000
--- a/src/main/java/com/att/dmf/mr/service/impl/ApiKeysServiceImpl.java
+++ /dev/null
@@ -1,320 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service.impl;
-
-import java.io.IOException;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.springframework.stereotype.Service;
-
-import com.att.dmf.mr.beans.ApiKeyBean;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
-import com.att.dmf.mr.service.ApiKeysService;
-import com.att.dmf.mr.utils.ConfigurationReader;
-import com.att.dmf.mr.utils.DMaaPResponseBuilder;
-import com.att.dmf.mr.utils.Emailer;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.security.db.NsaApiDb;
-import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-
-/**
- * Implementation of the ApiKeysService, this will provide the below operations,
- * getAllApiKeys, getApiKey, createApiKey, updateApiKey, deleteApiKey
- *
- * @author nilanjana.maity
- */
-@Service
-public class ApiKeysServiceImpl implements ApiKeysService {
-
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(ApiKeysServiceImpl.class.toString());
- /**
- * This method will provide all the ApiKeys present in kafka server.
- *
- * @param dmaapContext
- * @throws ConfigDbException
- * @throws IOException
- */
- public void getAllApiKeys(DMaaPContext dmaapContext)
- throws ConfigDbException, IOException {
-
- ConfigurationReader configReader = dmaapContext.getConfigReader();
-
- log.info("configReader : " + configReader.toString());
-
- final JSONObject result = new JSONObject();
- final JSONArray keys = new JSONArray();
- result.put("apiKeys", keys);
-
- NsaApiDb<NsaSimpleApiKey> apiDb = configReader.getfApiKeyDb();
-
- for (String key : apiDb.loadAllKeys()) {
- keys.put(key);
- }
- log.info("========== ApiKeysServiceImpl: getAllApiKeys: Api Keys are : "
- + keys.toString() + "===========");
- DMaaPResponseBuilder.respondOk(dmaapContext, result);
- }
-
- /**
- * @param dmaapContext
- * @param apikey
- * @throws ConfigDbException
- * @throws IOException
- */
- @Override
- public void getApiKey(DMaaPContext dmaapContext, String apikey)
- throws ConfigDbException, IOException {
-
- String errorMsg = "Api key name is not mentioned.";
- int errorCode = HttpStatusCodes.k400_badRequest;
-
- if (null != apikey) {
- NsaSimpleApiKey simpleApiKey = getApiKeyDb(dmaapContext)
- .loadApiKey(apikey);
-
-
- if (null != simpleApiKey) {
- JSONObject result = simpleApiKey.asJsonObject();
- DMaaPResponseBuilder.respondOk(dmaapContext, result);
- log.info("========== ApiKeysServiceImpl: getApiKey : "
- + result.toString() + "===========");
- return;
- } else {
- errorMsg = "Api key [" + apikey + "] does not exist.";
- errorCode = HttpStatusCodes.k404_notFound;
- log.info("========== ApiKeysServiceImpl: getApiKey: Error : API Key does not exist. "
- + "===========");
- DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
- errorMsg);
- throw new IOException();
- }
- }
-
- }
-
- /**
- * @param dmaapContext
- * @param nsaApiKey
- * @throws KeyExistsException
- * @throws ConfigDbException
- * @throws IOException
- */
- @Override
- public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey)
- throws KeyExistsException, ConfigDbException, IOException {
-
- log.debug("TopicService: : createApiKey....");
-
- String contactEmail = nsaApiKey.getEmail();
- final boolean emailProvided = contactEmail != null && contactEmail.length() > 0 && contactEmail.indexOf("@") > 1 ;
- String kSetting_AllowAnonymousKeys= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"apiKeys.allowAnonymous");
- if(null==kSetting_AllowAnonymousKeys) kSetting_AllowAnonymousKeys ="false";
-
-
- if ( kSetting_AllowAnonymousKeys.equalsIgnoreCase("true") && !emailProvided )
- {
- DMaaPResponseBuilder.respondWithErrorInJson(dmaapContext, 400, "You must provide an email address.");
- return;
- }
-
-
- final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
- String apiKey = nsaApiKey.getKey();
- String sharedSecret = nsaApiKey.getSharedSecret();
- final NsaSimpleApiKey key = apiKeyDb.createApiKey(apiKey,
- sharedSecret);
- if (null != key) {
-
- if (null != nsaApiKey.getEmail()) {
- key.setContactEmail(nsaApiKey.getEmail());
- }
-
- if (null != nsaApiKey.getDescription()) {
- key.setDescription(nsaApiKey.getDescription());
- }
-
- log.debug("=======ApiKeysServiceImpl: createApiKey : saving api key : "
- + key.toString() + "=====");
- apiKeyDb.saveApiKey(key);
-
- // email out the secret to validate the email address
- if ( emailProvided )
- {
- String body = "\n" + "Your email address was provided as the creator of new API key \""
- + apiKey + "\".\n" + "\n" + "If you did not make this request, please let us know."
- + " See http://sa2020.it.att.com:8888 for contact information, " + "but don't worry -"
- + " the API key is useless without the information below, which has been provided "
- + "only to you.\n" + "\n\n" + "For API key \"" + apiKey + "\", use API key secret:\n\n\t"
- + sharedSecret + "\n\n" + "Note that it's normal to share the API key"
- + " (" + apiKey + "). "
- + "This is how you are granted access to resources " + "like a UEB topic or Flatiron scope. "
- + "However, you should NOT share the API key's secret. " + "The API key is associated with your"
- + " email alone. ALL access to data made with this " + "key will be your responsibility. If you "
- + "share the secret, someone else can use the API key " + "to access proprietary data with your "
- + "identity.\n" + "\n" + "Enjoy!\n" + "\n" + "The GFP/SA-2020 Team";
-
- Emailer em = dmaapContext.getConfigReader().getSystemEmailer();
- em.send(contactEmail, "New API Key", body);
- }
- log.debug("TopicService: : sending response.");
-
- JSONObject o = key.asJsonObject();
-
- o.put ( NsaSimpleApiKey.kApiSecretField,
- emailProvided ?
- "Emailed to " + contactEmail + "." :
- key.getSecret ()
- );
- DMaaPResponseBuilder.respondOk(dmaapContext,
- o);
-
- return;
- } else {
- log.debug("=======ApiKeysServiceImpl: createApiKey : Error in creating API Key.=====");
- DMaaPResponseBuilder.respondWithError(dmaapContext,
- HttpStatusCodes.k500_internalServerError,
- "Failed to create api key.");
- throw new KeyExistsException(apiKey);
- }
- }
-
- /**
- * @param dmaapContext
- * @param apikey
- * @param nsaApiKey
- * @throws ConfigDbException
- * @throws IOException
- * @throws AccessDeniedException
- */
- @Override
- public void updateApiKey(DMaaPContext dmaapContext, String apikey,
- ApiKeyBean nsaApiKey) throws ConfigDbException, IOException, AccessDeniedException {
-
- String errorMsg = "Api key name is not mentioned.";
- int errorCode = HttpStatusCodes.k400_badRequest;
-
- if (null != apikey) {
- final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
- final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey);
- boolean shouldUpdate = false;
-
- if (null != key) {
- final NsaApiKey user = DMaaPAuthenticatorImpl
- .getAuthenticatedUser(dmaapContext);
-
- if (user == null || !user.getKey().equals(key.getKey())) {
- throw new AccessDeniedException("You must authenticate with the key you'd like to update.");
- }
-
- if (null != nsaApiKey.getEmail()) {
- key.setContactEmail(nsaApiKey.getEmail());
- shouldUpdate = true;
- }
-
- if (null != nsaApiKey.getDescription()) {
- key.setDescription(nsaApiKey.getDescription());
- shouldUpdate = true;
- }
-
- if (shouldUpdate) {
- apiKeyDb.saveApiKey(key);
- }
-
- log.info("======ApiKeysServiceImpl : updateApiKey : Key Updated Successfully :"
- + key.toString() + "=========");
- DMaaPResponseBuilder.respondOk(dmaapContext,
- key.asJsonObject());
- return;
- }
- } else {
- errorMsg = "Api key [" + apikey + "] does not exist.";
- errorCode = HttpStatusCodes.k404_notFound;
- DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
- errorMsg);
- log.info("======ApiKeysServiceImpl : updateApiKey : Error in Updating Key.============");
- throw new IOException();
- }
- }
-
- /**
- * @param dmaapContext
- * @param apikey
- * @throws ConfigDbException
- * @throws IOException
- * @throws AccessDeniedException
- */
- @Override
- public void deleteApiKey(DMaaPContext dmaapContext, String apikey)
- throws ConfigDbException, IOException, AccessDeniedException {
-
- String errorMsg = "Api key name is not mentioned.";
- int errorCode = HttpStatusCodes.k400_badRequest;
-
- if (null != apikey) {
- final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext);
- final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey);
-
- if (null != key) {
-
- final NsaApiKey user = DMaaPAuthenticatorImpl
- .getAuthenticatedUser(dmaapContext);
- if (user == null || !user.getKey().equals(key.getKey())) {
- throw new AccessDeniedException("You don't own the API key.");
- }
-
- apiKeyDb.deleteApiKey(key);
- log.info("======ApiKeysServiceImpl : deleteApiKey : Deleted Key successfully.============");
- DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
- "Api key [" + apikey + "] deleted successfully.");
- return;
- }
- } else {
- errorMsg = "Api key [" + apikey + "] does not exist.";
- errorCode = HttpStatusCodes.k404_notFound;
- DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode,
- errorMsg);
- log.info("======ApiKeysServiceImpl : deleteApiKey : Error while deleting key.============");
- throw new IOException();
- }
- }
-
- /**
- *
- * @param dmaapContext
- * @return
- */
- private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) {
- ConfigurationReader configReader = dmaapContext.getConfigReader();
- return configReader.getfApiKeyDb();
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java b/src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java
deleted file mode 100644
index 104d7de..0000000
--- a/src/main/java/com/att/dmf/mr/service/impl/BaseTransactionDbImpl.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service.impl;
-
-import java.util.Set;
-import java.util.TreeSet;
-
-import com.att.dmf.mr.transaction.DMaaPTransactionFactory;
-import com.att.dmf.mr.transaction.DMaaPTransactionObj;
-import com.att.dmf.mr.transaction.DMaaPTransactionObjDB;
-import com.att.dmf.mr.transaction.TransactionObj;
-import com.att.nsa.configs.ConfigDb;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.configs.ConfigPath;
-
-/**
- * Persistent storage for Transaction objects built over an abstract config db.
- *
- * @author anowarul.islam
- *
- * @param <K>
- */
-public class BaseTransactionDbImpl<K extends DMaaPTransactionObj> implements DMaaPTransactionObjDB<K> {
-
- private final ConfigDb fDb;
- private final ConfigPath fBasePath;
- private final DMaaPTransactionFactory<K> fKeyFactory;
-
- private static final String kStdRootPath = "/transaction";
-
- private ConfigPath makePath(String transactionId) {
- return fBasePath.getChild(transactionId);
- }
-
- /**
- * Construct an Transaction db over the given config db at the standard
- * location
- *
- * @param db
- * @param keyFactory
- * @throws ConfigDbException
- */
- public BaseTransactionDbImpl(ConfigDb db, DMaaPTransactionFactory<K> keyFactory) throws ConfigDbException {
- this(db, kStdRootPath, keyFactory);
- }
-
- /**
- * Construct an Transaction db over the given config db using the given root
- * location
- *
- * @param db
- * @param rootPath
- * @param keyFactory
- * @throws ConfigDbException
- */
- public BaseTransactionDbImpl(ConfigDb db, String rootPath, DMaaPTransactionFactory<K> keyFactory)
- throws ConfigDbException {
- fDb = db;
- fBasePath = db.parse(rootPath);
- fKeyFactory = keyFactory;
-
- if (!db.exists(fBasePath)) {
- db.store(fBasePath, "");
- }
- }
-
- /**
- * Create a new Transaction Obj. If one exists,
- *
- * @param id
- * @return the new Transaction record
- * @throws ConfigDbException
- */
- public synchronized K createTransactionObj(String id) throws KeyExistsException, ConfigDbException {
- final ConfigPath path = makePath(id);
- if (fDb.exists(path)) {
- throw new KeyExistsException(id);
- }
-
- // make one, store it, return it
- final K newKey = fKeyFactory.makeNewTransactionId(id);
- fDb.store(path, newKey.serialize());
- return newKey;
- }
-
- /**
- * Save an Transaction record. This must be used after changing auxiliary
- * data on the record. Note that the transaction object must exist (via
- * createTransactionObj).
- *
- * @param transaction
- * object
- * @throws ConfigDbException
- */
- @Override
- public synchronized void saveTransactionObj(K trnObj) throws ConfigDbException {
- final ConfigPath path = makePath(trnObj.getId());
- if (!fDb.exists(path) || !(trnObj instanceof TransactionObj)) {
- throw new IllegalStateException(trnObj.getId() + " is not known to this database");
- }
- fDb.store(path, ((TransactionObj) trnObj).serialize());
- }
-
- /**
- * Load an Transaction record based on the Transaction Id value
- *
- * @param transactionId
- * @return an Transaction Object record or null
- * @throws ConfigDbException
- */
- @Override
- public synchronized K loadTransactionObj(String transactionId) throws ConfigDbException {
- final String data = fDb.load(makePath(transactionId));
- if (data != null) {
- return fKeyFactory.makeNewTransactionObj(data);
- }
- return null;
- }
-
- /**
- * Load all transactions known to this database. (This could be expensive.)
- *
- * @return a set of all Transaction objects
- * @throws ConfigDbException
- */
- public synchronized Set<String> loadAllTransactionObjs() throws ConfigDbException {
- final TreeSet<String> result = new TreeSet<>();
- for (ConfigPath cp : fDb.loadChildrenNames(fBasePath)) {
- result.add(cp.getName());
- }
- return result;
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java
deleted file mode 100644
index 73a373e..0000000
--- a/src/main/java/com/att/dmf/mr/service/impl/EventsServiceImpl.java
+++ /dev/null
@@ -1,867 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service.impl;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.Properties;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.MediaType;
-
-import org.apache.http.HttpStatus;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecords;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.errors.TopicExistsException;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Service;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.Consumer;
-import com.att.dmf.mr.backends.ConsumerFactory;
-import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
-import com.att.dmf.mr.backends.MetricsSet;
-import com.att.dmf.mr.backends.Publisher;
-import com.att.dmf.mr.backends.Publisher.message;
-import com.att.dmf.mr.backends.kafka.KafkaLiveLockAvoider2;
-import com.att.dmf.mr.beans.DMaaPCambriaLimiter;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.beans.LogDetails;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.exception.DMaaPAccessDeniedException;
-import com.att.dmf.mr.exception.DMaaPErrorMessages;
-import com.att.dmf.mr.exception.DMaaPResponseCode;
-import com.att.dmf.mr.exception.ErrorResponse;
-
-import com.att.dmf.mr.metabroker.Topic;
-import com.att.dmf.mr.resources.CambriaEventSet;
-import com.att.dmf.mr.resources.CambriaOutboundEventStream;
-import com.att.dmf.mr.security.DMaaPAAFAuthenticator;
-import com.att.dmf.mr.security.DMaaPAAFAuthenticatorImpl;
-import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
-import com.att.dmf.mr.service.EventsService;
-import com.att.dmf.mr.utils.DMaaPResponseBuilder;
-import com.att.dmf.mr.utils.Utils;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.service.standards.MimeTypes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.util.rrConvertor;
-
-/**
- * This class provides the functinality to publish and subscribe message to
- * kafka
- *
- * @author Ramkumar Sembaiyam
- *
- */
-@Service
-public class EventsServiceImpl implements EventsService {
- // private static final Logger LOG =
-
- private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class);
-
- private static final String BATCH_LENGTH = "event.batch.length";
- private static final String TRANSFER_ENCODING = "Transfer-Encoding";
- @Autowired
- private DMaaPErrorMessages errorMessages;
-
- //@Autowired
-
-
- // @Value("${metrics.send.cambria.topic}")
-
-
- public DMaaPErrorMessages getErrorMessages() {
- return errorMessages;
- }
-
- public void setErrorMessages(DMaaPErrorMessages errorMessages) {
- this.errorMessages = errorMessages;
- }
-
- /**
- * @param ctx
- * @param topic
- * @param consumerGroup
- * @param clientId
- * @throws ConfigDbException,
- * TopicExistsException, AccessDeniedException,
- * UnavailableException, CambriaApiException, IOException
- *
- *
- */
- @Override
- public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
- throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
- CambriaApiException, IOException, DMaaPAccessDeniedException {
- final long startTime = System.currentTimeMillis();
- final HttpServletRequest req = ctx.getRequest();
-
- boolean isAAFTopic = false;
- // was this host blacklisted?
- final String remoteAddr = Utils.getRemoteAddress(ctx);
- if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
-
- int limit = CambriaConstants.kNoLimit;
- if (req.getParameter("limit") != null) {
- limit = Integer.parseInt(req.getParameter("limit"));
- }
-
- int timeoutMs = CambriaConstants.kNoTimeout;
- String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout");
- if (strtimeoutMS != null)
- timeoutMs = Integer.parseInt(strtimeoutMS);
- // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout",
-
- if (req.getParameter("timeout") != null) {
- timeoutMs = Integer.parseInt(req.getParameter("timeout"));
- }
-
- // By default no filter is applied if filter is not passed as a
- // parameter in the request URI
- String topicFilter = CambriaConstants.kNoFilter;
- if (null != req.getParameter("filter")) {
- topicFilter = req.getParameter("filter");
- }
- // pretty to print the messaages in new line
- String prettyval = "0";
- String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty");
- if (null != strPretty)
- prettyval = strPretty;
-
- String metaval = "0";
- String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta");
- if (null != strmeta)
- metaval = strmeta;
-
- final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval);
- // withMeta to print offset along with message
- final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval);
-
- final LogWrap logger = new LogWrap(topic, consumerGroup, clientId);
- logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter + " from Remote host "+ctx.getRequest().getRemoteHost());
-
- // is this user allowed to read this topic?
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
- final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
-
- if (metatopic == null) {
- // no such topic.
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
- DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
- errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()),
- topic, null, null, consumerGroup + "/" + clientId, ctx.getRequest().getRemoteHost());
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "metrics.send.cambria.topic");
- if (null == metricTopicname)
- metricTopicname = "msgrtr.apinode.metrics.dmaap";
-
- if (null == ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) {
- if (null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))) {
- // check permissions
- metatopic.checkUserRead(user);
- }
- }
- // if headers are not provided then user will be null
- if (user == null && null != ctx.getRequest().getHeader("Authorization")) {
- // the topic name will be sent by the client
-
- DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
- String permission = aaf.aafPermissionString(topic, "sub");
- if (!aaf.aafAuthentication(ctx.getRequest(), permission)) {
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- errorMessages.getNotPermitted1() + " read " + errorMessages.getNotPermitted2() + topic + " on "
- + permission,
- null, Utils.getFormattedDate(new Date()), topic, null, null, consumerGroup + "/" + clientId,
- ctx.getRequest().getRemoteHost());
- LOG.info(errRes.toString());
- throw new DMaaPAccessDeniedException(errRes);
-
- }
- isAAFTopic = true;
- }
- final long elapsedMs1 = System.currentTimeMillis() - startTime;
- logger.info("Time taken in getEvents Authorization " + elapsedMs1 + " ms for " + topic + " " + consumerGroup
- + " " + clientId);
- Consumer c = null;
-
- String lhostId = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "clusterhostid");
- if (null == lhostId) {
- try {
- lhostId = InetAddress.getLocalHost().getCanonicalHostName();
- } catch (UnknownHostException e) {
- LOG.info("Unknown Host Exception error occured while getting getting hostid");
- }
-
- }
- CambriaOutboundEventStream coes = null;
- try {
- final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
- final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter();
- rl.onCall(topic, consumerGroup, clientId, ctx.getRequest().getRemoteHost());
- c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,
- ctx.getRequest().getRemoteHost());
- coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs)
- .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
- coes.setDmaapContext(ctx);
- coes.setTopic(metatopic);
- if (isTransEnabled() || isAAFTopic) {
- coes.setTransEnabled(true);
- } else {
- coes.setTransEnabled(false);
- }
- coes.setTopicStyle(isAAFTopic);
- final long elapsedMs2 = System.currentTimeMillis() - startTime;
- logger.info("Time taken in getEvents getConsumerFor " + elapsedMs2 + " ms for " + topic + " "
- + consumerGroup + " " + clientId);
-
- DMaaPResponseBuilder.setNoCacheHeadings(ctx);
-
- DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes);
- // No IOException thrown during respondOkWithStream, so commit the
- // new offsets to all the brokers
- c.commitOffsets();
- final int sent = coes.getSentCount();
-
- metricsSet.consumeTick(sent);
- rl.onSend(topic, consumerGroup, clientId, sent);
- final long elapsedMs = System.currentTimeMillis() - startTime;
- logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + c.getOffset() + " for "
- + topic + " " + consumerGroup + " " + clientId + " on to the server "
- + ctx.getRequest().getRemoteHost());
-
- } catch (UnavailableException excp) {
- logger.warn(excp.getMessage(), excp);
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
- DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
- errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
- null, null, consumerGroup + "-" + clientId, ctx.getRequest().getRemoteHost());
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
-
- } catch (java.util.ConcurrentModificationException excp1) {
- LOG.info(excp1.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId + " from Remote"+ctx.getRequest().getRemoteHost());
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_CONFLICT,
- DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(),
- "Couldn't respond to client, possible of consumer requests from more than one server. Please contact MR team if you see this issue occurs continously", null,
- Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
- logger.info(errRes.toString());
- throw new CambriaApiException(errRes);
-
- } catch (CambriaApiException excp) {
- LOG.info(excp.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId);
-
- throw excp;
- }
- catch (Exception excp) {
- // System.out.println(excp + "------------------ " + topic+"
- // "+consumerGroup+" "+clientId);
-
- logger.info("Couldn't respond to client, closing cambria consumer " + " " + topic + " " + consumerGroup
- + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE + " ****** " + excp);
-
- ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
-
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
- DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
- "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null,
- Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
- logger.info(errRes.toString());
- throw new CambriaApiException(errRes);
- } finally {
- coes = null;
- // If no cache, close the consumer now that we're done with it.
- boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
- String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- ConsumerFactory.kSetting_EnableCache);
- if (null != strkSetting_EnableCache)
- kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
- // if
- // (!ctx.getConfigReader().getSettings().getBoolean(ConsumerFactory.kSetting_EnableCache,
- // ConsumerFactory.kDefault_IsCacheEnabled) && (c != null)) {
- if (!kSetting_EnableCache && (c != null)) {
- try {
- c.close();
- } catch (Exception e) {
- logger.info("***Exception occured in getEvents finaly block while closing the consumer " + " "
- + topic + " " + consumerGroup + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE
- + " " + e);
- }
- }
- }
- }
-
- /**
- * @throws missingReqdSetting
- *
- */
- @Override
- public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
- final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
- CambriaApiException, IOException, missingReqdSetting, DMaaPAccessDeniedException {
-
- // is this user allowed to write to this topic?
- final long startMs = System.currentTimeMillis();
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
- final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
- boolean isAAFTopic = false;
-
- // was this host blacklisted?
- final String remoteAddr = Utils.getRemoteAddress(ctx);
-
- if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
-
- String topicNameStd = null;
-
- // topicNameStd=
-
- topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
- "enforced.topic.name.AAF");
- String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "metrics.send.cambria.topic");
- if (null == metricTopicname)
- metricTopicname = "msgrtr.apinode.metrics.dmaap";
- boolean topicNameEnforced = false;
- if (null != topicNameStd && topic.startsWith(topicNameStd)) {
- topicNameEnforced = true;
- }
-
- // Here check if the user has rights to publish on the topic
- // ( This will be called when no auth is added or when UEB API Key
- // Authentication is used)
- // checkUserWrite(user) method will throw an error when there is no Auth
- // header added or when the
- // user has no publish rights
-
- if (null != metatopic && null != metatopic.getOwner() && !("".equals(metatopic.getOwner()))
- && null == ctx.getRequest().getHeader("Authorization") && !topic.equalsIgnoreCase(metricTopicname)) {
- metatopic.checkUserWrite(user);
- }
-
- // if headers are not provided then user will be null
- if (topicNameEnforced || (user == null && null != ctx.getRequest().getHeader("Authorization")
- && !topic.equalsIgnoreCase(metricTopicname))) {
- // the topic name will be sent by the client
-
- DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
- String permission = aaf.aafPermissionString(topic, "pub");
- if (!aaf.aafAuthentication(ctx.getRequest(), permission)) {
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- errorMessages.getNotPermitted1() + " publish " + errorMessages.getNotPermitted2() + topic
- + " on " + permission,
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new DMaaPAccessDeniedException(errRes);
- }
- isAAFTopic = true;
- }
-
- final HttpServletRequest req = ctx.getRequest();
-
- // check for chunked input
- boolean chunked = false;
- if (null != req.getHeader(TRANSFER_ENCODING)) {
- chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
- }
- // get the media type, or set it to a generic value if it wasn't
- // provided
- String mediaType = req.getContentType();
- if (mediaType == null || mediaType.length() == 0) {
- mediaType = MimeTypes.kAppGenericBinary;
- }
-
- if (mediaType.contains("charset=UTF-8")) {
- mediaType = mediaType.replace("; charset=UTF-8", "").trim();
- }
-
- String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "transidUEBtopicreqd");
- boolean istransidreqd = false;
- if (null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true")) {
- istransidreqd = true;
- }
-
- if (isAAFTopic || istransidreqd) {
- pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
- } else {
- pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
- }
- final long endMs = System.currentTimeMillis();
- final long totalMs = endMs - startMs;
-
- LOG.info("Overall Response time - Published " + " msgs in " + totalMs + " ms for topic " + topic);
-
- }
-
- /**
- *
- * @param ctx
- * @param topic
- * @param msg
- * @param defaultPartition
- * @param chunked
- * @param mediaType
- * @throws ConfigDbException
- * @throws AccessDeniedException
- * @throws TopicExistsException
- * @throws CambriaApiException
- * @throws IOException
- */
- private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked,
- String mediaType)
- throws ConfigDbException, AccessDeniedException, TopicExistsException, CambriaApiException, IOException {
- final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
- // setup the event set
- final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
-
- // start processing, building a batch to push to the backend
- final long startMs = System.currentTimeMillis();
- long count = 0;
- long maxEventBatch = 1024L* 16;
- String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
- if (null != batchlen)
- maxEventBatch = Long.parseLong(batchlen);
- // long maxEventBatch =
-
- final LinkedList<Publisher.message> batch = new LinkedList<>();
- // final ArrayList<KeyedMessage<String, String>> kms = new
-
- final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<>();
- try {
- // for each message...
- Publisher.message m = null;
- while ((m = events.next()) != null) {
- // add the message to the batch
- batch.add(m);
- // final KeyedMessage<String, String> data = new
- // KeyedMessage<String, String>(topic, m.getKey(),
-
- // kms.add(data);
- final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
- m.getMessage());
-
- pms.add(data);
- // check if the batch is full
- final int sizeNow = batch.size();
- if (sizeNow > maxEventBatch) {
- // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
-
- // kms.clear();
- ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
- pms.clear();
- batch.clear();
- metricsSet.publishTick(sizeNow);
- count += sizeNow;
- }
- }
-
- // send the pending batch
- final int sizeNow = batch.size();
- if (sizeNow > 0) {
- // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
-
- // kms.clear();
- ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
- pms.clear();
- batch.clear();
- metricsSet.publishTick(sizeNow);
- count += sizeNow;
- }
-
- final long endMs = System.currentTimeMillis();
- final long totalMs = endMs - startMs;
-
- LOG.info("Published " + count + " msgs in " + totalMs + " ms for topic " + topic + " from server "
- + ctx.getRequest().getRemoteHost());
-
- // build a responseP
- final JSONObject response = new JSONObject();
- response.put("count", count);
- response.put("serverTimeMs", totalMs);
- DMaaPResponseBuilder.respondOk(ctx, response);
-
- } catch (Exception excp) {
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg = null;
- if (excp instanceof CambriaApiException) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
-
- }
- ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count
- + "." + errorMsg,
- null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null,
- null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
-
- }
- }
-
- /**
- *
- * @param ctx
- * @param inputStream
- * @param topic
- * @param partitionKey
- * @param requestTime
- * @param chunked
- * @param mediaType
- * @throws ConfigDbException
- * @throws AccessDeniedException
- * @throws TopicExistsException
- * @throws IOException
- * @throws CambriaApiException
- */
- private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
- final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
- throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException, CambriaApiException {
-
- final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
- // setup the event set
- final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
-
- // start processing, building a batch to push to the backend
- final long startMs = System.currentTimeMillis();
- long count = 0;
- long maxEventBatch = 1024L * 16;
- String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
- if (null != evenlen)
- maxEventBatch = Long.parseLong(evenlen);
- // final long maxEventBatch =
-
- final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
- // final ArrayList<KeyedMessage<String, String>> kms = new
-
- final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
- Publisher.message m = null;
- int messageSequence = 1;
- Long batchId = 1L;
- final boolean transactionEnabled = true;
- int publishBatchCount = 0;
- SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
-
- // LOG.warn("Batch Start Id: " +
-
- try {
- // for each message...
- batchId = DMaaPContext.getBatchID();
-
- String responseTransactionId = null;
-
- while ((m = events.next()) != null) {
-
- // LOG.warn("Batch Start Id: " +
-
-
- addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
- transactionEnabled);
- messageSequence++;
-
-
- batch.add(m);
-
- responseTransactionId = m.getLogDetails().getTransactionId();
-
- JSONObject jsonObject = new JSONObject();
- jsonObject.put("msgWrapMR", m.getMessage());
- jsonObject.put("transactionId", responseTransactionId);
- // final KeyedMessage<String, String> data = new
- // KeyedMessage<String, String>(topic, m.getKey(),
-
- // kms.add(data);
- final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
- m.getMessage());
-
- pms.add(data);
- // check if the batch is full
- final int sizeNow = batch.size();
- if (sizeNow >= maxEventBatch) {
- String startTime = sdf.format(new Date());
- LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
- + batchId + "]");
- try {
- // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
- // kms);
- ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
- // transactionLogs(batch);
- for (message msg : batch) {
- LogDetails logDetails = msg.getLogDetails();
- LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
- }
- } catch (Exception excp) {
-
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg = null;
- if (excp instanceof CambriaApiException) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
- }
- ErrorResponse errRes = new ErrorResponse(status,
- DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
- + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- pms.clear();
- batch.clear();
- metricsSet.publishTick(sizeNow);
- publishBatchCount = sizeNow;
- count += sizeNow;
- // batchId++;
- String endTime = sdf.format(new Date());
- LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
- + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
- + ",Batch End Time=" + endTime + "]");
- batchId = DMaaPContext.getBatchID();
- }
- }
-
- // send the pending batch
- final int sizeNow = batch.size();
- if (sizeNow > 0) {
- String startTime = sdf.format(new Date());
- LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
- + batchId + "]");
- try {
- // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic,
- // kms);
- ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
- // transactionLogs(batch);
- for (message msg : batch) {
- LogDetails logDetails = msg.getLogDetails();
- LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
- }
- } catch (Exception excp) {
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg = null;
- if (excp instanceof CambriaApiException) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
- }
-
- ErrorResponse errRes = new ErrorResponse(status,
- DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
- + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- pms.clear();
- metricsSet.publishTick(sizeNow);
- count += sizeNow;
- // batchId++;
- String endTime = sdf.format(new Date());
- publishBatchCount = sizeNow;
- LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId
- + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time="
- + endTime + "]");
- }
-
- final long endMs = System.currentTimeMillis();
- final long totalMs = endMs - startMs;
-
- LOG.info("Published " + count + " msgs(with transaction id) in " + totalMs + " ms for topic " + topic);
-
- if (null != responseTransactionId) {
- ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId));
- }
-
- // build a response
- final JSONObject response = new JSONObject();
- response.put("count", count);
- response.put("serverTimeMs", totalMs);
- DMaaPResponseBuilder.respondOk(ctx, response);
-
- } catch (Exception excp) {
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg = null;
- if (excp instanceof CambriaApiException) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
- }
-
- ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
- + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- }
-
- /**
- *
- * @param msg
- * @param topic
- * @param request
- * @param messageCreationTime
- * @param messageSequence
- * @param batchId
- * @param transactionEnabled
- */
- private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
- final String messageCreationTime, final int messageSequence, final Long batchId,
- final boolean transactionEnabled) {
- LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
- transactionEnabled);
- logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
- msg.setTransactionEnabled(transactionEnabled);
- msg.setLogDetails(logDetails);
- }
-
- /**
- *
- * @author anowarul.islam
- *
- */
- private static class LogWrap {
- private final String fId;
-
- /**
- * constructor initialization
- *
- * @param topic
- * @param cgroup
- * @param cid
- */
- public LogWrap(String topic, String cgroup, String cid) {
- fId = "[" + topic + "/" + cgroup + "/" + cid + "] ";
- }
-
- /**
- *
- * @param msg
- */
- public void info(String msg) {
- LOG.info(fId + msg);
- }
-
- /**
- *
- * @param msg
- * @param t
- */
- public void warn(String msg, Exception t) {
- LOG.warn(fId + msg, t);
- }
-
- }
-
- public boolean isTransEnabled() {
- String istransidUEBtopicreqd = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "transidUEBtopicreqd");
- boolean istransidreqd = false;
- if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true"))) {
- istransidreqd = true;
- }
-
- return istransidreqd;
-
- }
-
- private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
- final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
- LogDetails logDetails = new LogDetails();
- logDetails.setTopicId(topicName);
- logDetails.setMessageTimestamp(messageTimestamp);
- logDetails.setPublisherId(Utils.getUserApiKey(request));
- logDetails.setPublisherIp(request.getRemoteHost());
- logDetails.setMessageBatchId(batchId);
- logDetails.setMessageSequence(String.valueOf(messageSequence));
- logDetails.setTransactionEnabled(transactionEnabled);
- logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
- logDetails.setServerIp(request.getLocalAddr());
- return logDetails;
- }
-
- /*
- * public String getMetricsTopic() { return metricsTopic; }
- *
- * public void setMetricsTopic(String metricsTopic) { this.metricsTopic =
- * metricsTopic; }
- */
-
-
-
-} \ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java
deleted file mode 100644
index 387d8b1..0000000
--- a/src/main/java/com/att/dmf/mr/service/impl/MMServiceImpl.java
+++ /dev/null
@@ -1,600 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service.impl;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.LinkedList;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.core.Context;
-
-import org.apache.http.HttpStatus;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Service;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.Consumer;
-import com.att.dmf.mr.backends.ConsumerFactory;
-import com.att.dmf.mr.backends.ConsumerFactory.UnavailableException;
-import com.att.dmf.mr.backends.MetricsSet;
-import com.att.dmf.mr.backends.Publisher;
-import com.att.dmf.mr.backends.Publisher.message;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.beans.LogDetails;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.exception.DMaaPErrorMessages;
-import com.att.dmf.mr.exception.DMaaPResponseCode;
-import com.att.dmf.mr.exception.ErrorResponse;
-import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
-import com.att.dmf.mr.metabroker.Topic;
-import com.att.dmf.mr.resources.CambriaEventSet;
-import com.att.dmf.mr.resources.CambriaOutboundEventStream;
-import com.att.dmf.mr.service.MMService;
-import com.att.dmf.mr.utils.ConfigurationReader;
-import com.att.dmf.mr.utils.DMaaPResponseBuilder;
-import com.att.dmf.mr.utils.Utils;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.service.standards.MimeTypes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-import com.att.nsa.util.rrConvertor;
-
-
-
-@Service
-public class MMServiceImpl implements MMService {
- private static final String BATCH_LENGTH = "event.batch.length";
- private static final String TRANSFER_ENCODING = "Transfer-Encoding";
- //private static final Logger LOG = Logger.getLogger(MMServiceImpl.class);
- private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MMServiceImpl.class);
- @Autowired
- private DMaaPErrorMessages errorMessages;
-
- @Autowired
- @Qualifier("configurationReader")
- private ConfigurationReader configReader;
-
- // HttpServletRequest object
- @Context
- private HttpServletRequest request;
-
- // HttpServletResponse object
- @Context
- private HttpServletResponse response;
-
- @Override
- public void addWhiteList() {
-
- }
-
- @Override
- public void removeWhiteList() {
-
- }
-
- @Override
- public void listWhiteList() {
-
- }
-
- @Override
- public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId)
- throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException,
- CambriaApiException, IOException {
-
-
- final HttpServletRequest req = ctx.getRequest();
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
-
- // was this host blacklisted?
- final String remoteAddr = Utils.getRemoteAddress(ctx);
-
- if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
-
- int limit = CambriaConstants.kNoLimit;
-
- if (req.getParameter("limit") != null) {
- limit = Integer.parseInt(req.getParameter("limit"));
- }
- limit = 1;
-
- int timeoutMs = CambriaConstants.kNoTimeout;
- String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout");
- if (strtimeoutMS != null)
- timeoutMs = Integer.parseInt(strtimeoutMS);
- // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout",
-
- if (req.getParameter("timeout") != null) {
- timeoutMs = Integer.parseInt(req.getParameter("timeout"));
- }
-
- // By default no filter is applied if filter is not passed as a
- // parameter in the request URI
- String topicFilter = CambriaConstants.kNoFilter;
- if (null != req.getParameter("filter")) {
- topicFilter = req.getParameter("filter");
- }
- // pretty to print the messaages in new line
- String prettyval = "0";
- String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty");
- if (null != strPretty)
- prettyval = strPretty;
-
- String metaval = "0";
- String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta");
- if (null != strmeta)
- metaval = strmeta;
-
- final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval);
- // withMeta to print offset along with message
- final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval);
-
- // is this user allowed to read this topic?
- //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
- final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
-
- if (metatopic == null) {
- // no such topic.
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND,
- DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(),
- errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()),
- topic, null, null, clientId, ctx.getRequest().getRemoteHost());
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- //String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic");
- /*
- * if (null==metricTopicname)
- * metricTopicname="msgrtr.apinode.metrics.dmaap"; //else if(user!=null)
- * if(null==ctx.getRequest().getHeader("Authorization")&&
- * !topic.equalsIgnoreCase(metricTopicname)) { if (null !=
- * metatopic.getOwner() && !("".equals(metatopic.getOwner()))){ // check
- * permissions metatopic.checkUserRead(user); } }
- */
-
- Consumer c = null;
- try {
- final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
- c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,ctx.getRequest().getRemoteHost());
-
- final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs)
- .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build();
- coes.setDmaapContext(ctx);
- coes.setTopic(metatopic);
-
- DMaaPResponseBuilder.setNoCacheHeadings(ctx);
-
- try {
- coes.write(baos);
- } catch (Exception ex) {
-
- }
-
- c.commitOffsets();
- final int sent = coes.getSentCount();
-
- metricsSet.consumeTick(sent);
-
- } catch (UnavailableException excp) {
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
- DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
- errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic,
- null, null, clientId, ctx.getRequest().getRemoteHost());
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
-
- } catch (CambriaApiException excp) {
-
- throw excp;
- } catch (Exception excp) {
-
- ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId);
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE,
- DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(),
- "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null,
- Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost());
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- } finally {
-
- boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled;
- String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- ConsumerFactory.kSetting_EnableCache);
- if (null != strkSetting_EnableCache)
- kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache);
-
- if (!kSetting_EnableCache && (c != null)) {
- c.close();
-
- }
- }
- return baos.toString();
- }
-
- @Override
- public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition,
- final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException,
- CambriaApiException, IOException, missingReqdSetting {
-
- //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx);
- //final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic);
-
- final String remoteAddr = Utils.getRemoteAddress(ctx);
-
- if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) {
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.",
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
-
- String topicNameStd = null;
-
- topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
- "enforced.topic.name.AAF");
- String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "metrics.send.cambria.topic");
- if (null == metricTopicname)
- metricTopicname = "msgrtr.apinode.metrics.dmaap";
- boolean topicNameEnforced = false;
- if (null != topicNameStd && topic.startsWith(topicNameStd)) {
- topicNameEnforced = true;
- }
-
- final HttpServletRequest req = ctx.getRequest();
-
- boolean chunked = false;
- if (null != req.getHeader(TRANSFER_ENCODING)) {
- chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked");
- }
-
- String mediaType = req.getContentType();
- if (mediaType == null || mediaType.length() == 0) {
- mediaType = MimeTypes.kAppGenericBinary;
- }
-
- if (mediaType.contains("charset=UTF-8")) {
- mediaType = mediaType.replace("; charset=UTF-8", "").trim();
- }
-
- if (!topic.equalsIgnoreCase(metricTopicname)) {
- pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType);
- } else {
- pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType);
- }
- }
-
- private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request,
- final String messageCreationTime, final int messageSequence, final Long batchId,
- final boolean transactionEnabled) {
- LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId,
- transactionEnabled);
- logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage()));
- msg.setTransactionEnabled(transactionEnabled);
- msg.setLogDetails(logDetails);
- }
-
- private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request,
- final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) {
- LogDetails logDetails = new LogDetails();
- logDetails.setTopicId(topicName);
- logDetails.setMessageTimestamp(messageTimestamp);
- logDetails.setPublisherId(Utils.getUserApiKey(request));
- logDetails.setPublisherIp(request.getRemoteHost());
- logDetails.setMessageBatchId(batchId);
- logDetails.setMessageSequence(String.valueOf(messageSequence));
- logDetails.setTransactionEnabled(transactionEnabled);
- logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date()));
- logDetails.setServerIp(request.getLocalAddr());
- return logDetails;
- }
-
- private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked,
- String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException,
- CambriaApiException, IOException {
- final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
- // setup the event set
- final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);
-
- // start processing, building a batch to push to the backend
- final long startMs = System.currentTimeMillis();
- long count = 0;
-
- long maxEventBatch = 1024 * 16;
- String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
- if (null != batchlen)
- maxEventBatch = Long.parseLong(batchlen);
-
- // long maxEventBatch =
- // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
- final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
- final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
- //final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();
-
- try {
- // for each message...
- Publisher.message m = null;
- while ((m = events.next()) != null) {
- // add the message to the batch
- batch.add(m);
- final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
- m.getMessage());
- // check if the batch is full
- final int sizeNow = batch.size();
- if (sizeNow > maxEventBatch) {
- ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
- pms.clear();
- batch.clear();
- metricsSet.publishTick(sizeNow);
- count += sizeNow;
- }
- }
-
- // send the pending batch
- final int sizeNow = batch.size();
- if (sizeNow > 0) {
- ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
- pms.clear();
- batch.clear();
- metricsSet.publishTick(sizeNow);
- count += sizeNow;
- }
-
- final long endMs = System.currentTimeMillis();
- final long totalMs = endMs - startMs;
-
- LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
-
- // build a responseP
- final JSONObject response = new JSONObject();
- response.put("count", count);
- response.put("serverTimeMs", totalMs);
- // DMaaPResponseBuilder.respondOk(ctx, response);
-
- } catch (Exception excp) {
-
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg = null;
- if (excp.getClass().toString().contains("CambriaApiException")) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
-
- }
- ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count
- + "." + errorMsg,
- null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null,
- null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
-
- }
- }
-
- private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic,
- final String partitionKey, final String requestTime, final boolean chunked, final String mediaType)
- throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException,
- CambriaApiException {
-
- final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();
-
- // setup the event set
- final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey);
-
- // start processing, building a batch to push to the backend
- final long startMs = System.currentTimeMillis();
- long count = 0;
- long maxEventBatch = 1024 * 16;
- String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
- if (null != evenlen)
- maxEventBatch = Long.parseLong(evenlen);
-
- final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
- final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>();
-
- Publisher.message m = null;
- int messageSequence = 1;
- Long batchId = 1L;
- final boolean transactionEnabled = true;
- int publishBatchCount = 0;
- SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS");
-
- // LOG.warn("Batch Start Id: " +
- // Utils.getFromattedBatchSequenceId(batchId));
- try {
- // for each message...
- batchId = DMaaPContext.getBatchID();
-
- String responseTransactionId = null;
-
- while ((m = events.next()) != null) {
-
- // LOG.warn("Batch Start Id: " +
- // Utils.getFromattedBatchSequenceId(batchId));
-
- addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId,
- transactionEnabled);
- messageSequence++;
-
- // add the message to the batch
- batch.add(m);
-
- responseTransactionId = m.getLogDetails().getTransactionId();
-
- JSONObject jsonObject = new JSONObject();
- jsonObject.put("message", m.getMessage());
- jsonObject.put("transactionId", responseTransactionId);
- final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(),
- m.getMessage());
- pms.add(data);
-
- // check if the batch is full
- final int sizeNow = batch.size();
- if (sizeNow >= maxEventBatch) {
- String startTime = sdf.format(new Date());
- LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
- + batchId + "]");
- try {
- ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
- // transactionLogs(batch);
- for (message msg : batch) {
- LogDetails logDetails = msg.getLogDetails();
- LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
- }
- } catch (Exception excp) {
-
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg = null;
- if (excp.getClass().toString().contains("CambriaApiException")) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
- }
- ErrorResponse errRes = new ErrorResponse(status,
- DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
- + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- pms.clear();
- batch.clear();
- metricsSet.publishTick(sizeNow);
- publishBatchCount = sizeNow;
- count += sizeNow;
- // batchId++;
- String endTime = sdf.format(new Date());
- LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id="
- + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime
- + ",Batch End Time=" + endTime + "]");
- batchId = DMaaPContext.getBatchID();
- }
- }
-
- // send the pending batch
- final int sizeNow = batch.size();
- if (sizeNow > 0) {
- String startTime = sdf.format(new Date());
- LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id="
- + batchId + "]");
- try {
- ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms);
- // transactionLogs(batch);
- for (message msg : batch) {
- LogDetails logDetails = msg.getLogDetails();
- LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails());
- }
- } catch (Exception excp) {
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg = null;
- if (excp.getClass().toString().contains("CambriaApiException")) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
- }
-
- ErrorResponse errRes = new ErrorResponse(status,
- DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
- + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- pms.clear();
- metricsSet.publishTick(sizeNow);
- count += sizeNow;
- // batchId++;
- String endTime = sdf.format(new Date());
- publishBatchCount = sizeNow;
- LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId
- + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time="
- + endTime + "]");
- }
-
- final long endMs = System.currentTimeMillis();
- final long totalMs = endMs - startMs;
-
- LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);
-
- // build a response
- final JSONObject response = new JSONObject();
- response.put("count", count);
- response.put("serverTimeMs", totalMs);
-
- } catch (Exception excp) {
- int status = HttpStatus.SC_NOT_FOUND;
- String errorMsg = null;
- if (excp.getClass().toString().contains("CambriaApiException")) {
- status = ((CambriaApiException) excp).getStatus();
- JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
- JSONObject errObject = new JSONObject(jsonTokener);
- errorMsg = (String) errObject.get("message");
- }
-
- ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
- "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "."
- + errorMessages.getPublishMsgCount() + count + "." + errorMsg,
- null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()),
- ctx.getRequest().getRemoteHost(), null, null);
- LOG.info(errRes.toString());
- throw new CambriaApiException(errRes);
- }
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java
deleted file mode 100644
index d867ea8..0000000
--- a/src/main/java/com/att/dmf/mr/service/impl/MetricsServiceImpl.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service.impl;
-
-import java.io.IOException;
-
-import org.json.JSONObject;
-import org.springframework.stereotype.Component;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.backends.MetricsSet;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.service.MetricsService;
-import com.att.dmf.mr.utils.DMaaPResponseBuilder;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.metrics.CdmMeasuredItem;
-
-/**
- *
- *
- * This will provide all the generated metrics details also it can provide the
- * get metrics details
- *
- *
- * @author nilanjana.maity
- *
- *
- */
-@Component
-public class MetricsServiceImpl implements MetricsService {
-
-
- private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MetricsService.class);
- /**
- *
- *
- * @param ctx
- * @throws IOException
- *
- *
- * get Metric details
- *
- */
- @Override
-
- public void get(DMaaPContext ctx) throws IOException {
- LOG.info("Inside : MetricsServiceImpl : get()");
- final MetricsSet metrics = ctx.getConfigReader().getfMetrics();
- DMaaPResponseBuilder.setNoCacheHeadings(ctx);
- final JSONObject result = metrics.toJson();
- DMaaPResponseBuilder.respondOk(ctx, result);
- LOG.info("============ Metrics generated : " + result.toString() + "=================");
-
- }
-
-
- @Override
- /**
- *
- * get Metric by name
- *
- *
- * @param ctx
- * @param name
- * @throws IOException
- * @throws CambriaApiException
- *
- *
- */
- public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException {
- LOG.info("Inside : MetricsServiceImpl : getMetricByName()");
- final MetricsSet metrics = ctx.getConfigReader().getfMetrics();
-
- final CdmMeasuredItem item = metrics.getItem(name);
- /**
- * check if item is null
- */
- if (item == null) {
- throw new CambriaApiException(404, "No metric named [" + name + "].");
- }
-
- final JSONObject entry = new JSONObject();
- entry.put("summary", item.summarize());
- entry.put("raw", item.getRawValueString());
-
- DMaaPResponseBuilder.setNoCacheHeadings(ctx);
-
- final JSONObject result = new JSONObject();
- result.put(name, entry);
-
- DMaaPResponseBuilder.respondOk(ctx, result);
- LOG.info("============ Metrics generated : " + entry.toString() + "=================");
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java
deleted file mode 100644
index 983af7e..0000000
--- a/src/main/java/com/att/dmf/mr/service/impl/TopicServiceImpl.java
+++ /dev/null
@@ -1,694 +0,0 @@
-/**
- *
- */
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service.impl;
-
-import java.io.IOException;
-
-import org.apache.http.HttpStatus;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
-import com.att.dmf.mr.beans.TopicBean;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.exception.DMaaPAccessDeniedException;
-import com.att.dmf.mr.exception.DMaaPErrorMessages;
-import com.att.dmf.mr.exception.DMaaPResponseCode;
-import com.att.dmf.mr.exception.ErrorResponse;
-import com.att.dmf.mr.metabroker.Broker.TopicExistsException;
-import com.att.dmf.mr.metabroker.Broker1;
-
-import com.att.dmf.mr.metabroker.Topic;
-import com.att.dmf.mr.security.DMaaPAAFAuthenticator;
-import com.att.dmf.mr.security.DMaaPAAFAuthenticatorImpl;
-import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
-import com.att.dmf.mr.service.TopicService;
-import com.att.dmf.mr.utils.DMaaPResponseBuilder;
-import com.att.dmf.mr.utils.Utils;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.NsaAcl;
-import com.att.nsa.security.NsaApiKey;
-import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException;
-
-/**
- * @author muzainulhaque.qazi
- *
- */
-@Service
-public class TopicServiceImpl implements TopicService {
-
- // private static final Logger LOGGER =
-
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicServiceImpl.class);
- @Autowired
- private DMaaPErrorMessages errorMessages;
-
- // @Value("${msgRtr.topicfactory.aaf}")
-
-
- public DMaaPErrorMessages getErrorMessages() {
- return errorMessages;
- }
-
- public void setErrorMessages(DMaaPErrorMessages errorMessages) {
- this.errorMessages = errorMessages;
- }
-
- /**
- * @param dmaapContext
- * @throws JSONException
- * @throws ConfigDbException
- * @throws IOException
- *
- */
- @Override
- public void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
- LOGGER.info("Fetching list of all the topics.");
- JSONObject json = new JSONObject();
-
- JSONArray topicsList = new JSONArray();
-
- for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
- topicsList.put(topic.getName());
- }
-
- json.put("topics", topicsList);
-
- LOGGER.info("Returning list of all the topics.");
- DMaaPResponseBuilder.respondOk(dmaapContext, json);
-
- }
-
- /**
- * @param dmaapContext
- * @throws JSONException
- * @throws ConfigDbException
- * @throws IOException
- *
- */
- public void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException {
-
- LOGGER.info("Fetching list of all the topics.");
- JSONObject json = new JSONObject();
-
- JSONArray topicsList = new JSONArray();
-
- for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) {
- JSONObject obj = new JSONObject();
- obj.put("topicName", topic.getName());
-
- obj.put("owner", topic.getOwner());
- obj.put("txenabled", topic.isTransactionEnabled());
- topicsList.put(obj);
- }
-
- json.put("topics", topicsList);
-
- LOGGER.info("Returning list of all the topics.");
- DMaaPResponseBuilder.respondOk(dmaapContext, json);
-
- }
-
- /**
- * @param dmaapContext
- * @param topicName
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- */
- @Override
- public void getTopic(DMaaPContext dmaapContext, String topicName)
- throws ConfigDbException, IOException, TopicExistsException {
-
- LOGGER.info("Fetching details of topic " + topicName);
- Topic t = getMetaBroker(dmaapContext).getTopic(topicName);
-
- if (null == t) {
- LOGGER.error("Topic [" + topicName + "] does not exist.");
- throw new TopicExistsException("Topic [" + topicName + "] does not exist.");
- }
-
- JSONObject o = new JSONObject();
- o.put("name", t.getName());
- o.put("description", t.getDescription());
-
- if (null != t.getOwners())
- o.put("owner", t.getOwners().iterator().next());
- if (null != t.getReaderAcl())
- o.put("readerAcl", aclToJson(t.getReaderAcl()));
- if (null != t.getWriterAcl())
- o.put("writerAcl", aclToJson(t.getWriterAcl()));
-
- LOGGER.info("Returning details of topic " + topicName);
- DMaaPResponseBuilder.respondOk(dmaapContext, o);
-
- }
-
- /**
- * @param dmaapContext
- * @param topicBean
- * @throws CambriaApiException
- * @throws AccessDeniedException
- * @throws IOException
- * @throws TopicExistsException
- * @throws JSONException
- *
- *
- *
- */
- @Override
- public void createTopic(DMaaPContext dmaapContext, TopicBean topicBean)
- throws CambriaApiException, DMaaPAccessDeniedException, IOException, TopicExistsException {
- LOGGER.info("Creating topic " + topicBean.getTopicName());
-
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
- String key = null;
- String appName = dmaapContext.getRequest().getHeader("AppName");
- String enfTopicName = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop,
- "enforced.topic.name.AAF");
-
- if (user != null) {
- key = user.getKey();
-
- if (enfTopicName != null && topicBean.getTopicName().indexOf(enfTopicName) >= 0) {
-
- LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed.");
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- "Failed to create topic: Access Denied.User does not have permission to perform create topic");
-
- LOGGER.info(errRes.toString());
- // throw new DMaaPAccessDeniedException(errRes);
-
- }
- }
- // else if (user==null &&
- // (null==dmaapContext.getRequest().getHeader("Authorization") && null
- // == dmaapContext.getRequest().getHeader("cookie")) ) {
- else if (Utils.isCadiEnabled()&&user == null && null == dmaapContext.getRequest().getHeader("Authorization")
- && (null == appName && null == dmaapContext.getRequest().getHeader("cookie"))) {
- LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed.");
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- "Failed to create topic: Access Denied.User does not have permission to perform create topic");
-
- LOGGER.info(errRes.toString());
- // throw new DMaaPAccessDeniedException(errRes);
- }
-
- if (user == null && (null != dmaapContext.getRequest().getHeader("Authorization")
- )) {
- // if (user == null &&
- // (null!=dmaapContext.getRequest().getHeader("Authorization") ||
- // null != dmaapContext.getRequest().getHeader("cookie"))) {
- // ACL authentication is not provided so we will use the aaf
- // authentication
- LOGGER.info("Authorization the topic");
-
- String permission = "";
- String nameSpace = "";
- if (topicBean.getTopicName().indexOf(".") > 1)
- nameSpace = topicBean.getTopicName().substring(0, topicBean.getTopicName().lastIndexOf("."));
-
- String mrFactoryVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "msgRtr.topicfactory.aaf");
-
- // AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSettings_KafkaZookeeper);
-
- permission = mrFactoryVal + nameSpace + "|create";
- DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
-
- if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) {
-
- LOGGER.error("Failed to create topic" + topicBean.getTopicName() + ", Authentication failed.");
-
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_UNAUTHORIZED,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- "Failed to create topic: Access Denied.User does not have permission to create topic with perm "
- + permission);
-
- LOGGER.info(errRes.toString());
- throw new DMaaPAccessDeniedException(errRes);
-
- } else {
- // if user is null and aaf authentication is ok then key should
- // be ""
- // key = "";
- /**
- * Added as part of AAF user it should return username
- */
-
- key = dmaapContext.getRequest().getUserPrincipal().getName().toString();
- LOGGER.info("key ==================== " + key);
-
- }
- }
-
- try {
- final String topicName = topicBean.getTopicName();
- final String desc = topicBean.getTopicDescription();
- int partition = topicBean.getPartitionCount();
- // int replica = topicBean.getReplicationCount();
- if (partition == 0) {
- partition = 1;
- }
- final int partitions = partition;
-
- int replica = topicBean.getReplicationCount();
- if (replica == 0) {
- replica = 1;
- }
- final int replicas = replica;
- boolean transactionEnabled = topicBean.isTransactionEnabled();
-
- final Broker1 metabroker = getMetaBroker(dmaapContext);
- final Topic t = metabroker.createTopic(topicName, desc, key, partitions, replicas, transactionEnabled);
-
- LOGGER.info("Topic created successfully. Sending response");
- DMaaPResponseBuilder.respondOk(dmaapContext, topicToJson(t));
- } catch (JSONException excp) {
-
- LOGGER.error("Failed to create topic. Couldn't parse JSON data.", excp);
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,
- DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson());
- LOGGER.info(errRes.toString());
- throw new CambriaApiException(errRes);
-
- } catch (ConfigDbException excp1) {
-
- LOGGER.error("Failed to create topic. Config DB Exception", excp1);
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST,
- DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson());
- LOGGER.info(errRes.toString());
- throw new CambriaApiException(errRes);
- } catch (com.att.dmf.mr.metabroker.Broker1.TopicExistsException e) {
- // TODO Auto-generated catch block
- LOGGER.error( e.getMessage());
- }
- }
-
- /**
- * @param dmaapContext
- * @param topicName
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- * @throws CambriaApiException
- * @throws AccessDeniedException
- */
- @Override
- public void deleteTopic(DMaaPContext dmaapContext, String topicName) throws IOException, ConfigDbException,
- CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException {
-
-
- LOGGER.info(" Deleting topic " + topicName);
- /*if (true) { // {
- LOGGER.error("Failed to delete topi" + topicName + ". Authentication failed.");
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), errorMessages.getCreateTopicFail() + " "
- + errorMessages.getNotPermitted1() + " delete " + errorMessages.getNotPermitted2());
- LOGGER.info(errRes.toString());
- throw new DMaaPAccessDeniedException(errRes);
- }*/
-
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-
- if (user == null && null != dmaapContext.getRequest().getHeader("Authorization")) {
- LOGGER.info("Authenticating the user, as ACL authentication is not provided");
- // String permission =
-
- String permission = "";
- String nameSpace = topicName.substring(0, topicName.lastIndexOf("."));
- String mrFactoryVal = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,
- "msgRtr.topicfactory.aaf");
-
- permission = mrFactoryVal + nameSpace + "|destroy";
- DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
- if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) {
- LOGGER.error("Failed to delete topi" + topicName + ". Authentication failed.");
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- errorMessages.getCreateTopicFail() + " " + errorMessages.getNotPermitted1() + " delete "
- + errorMessages.getNotPermitted2());
- LOGGER.info(errRes.toString());
- throw new DMaaPAccessDeniedException(errRes);
- }
-
- }
-
- final Broker1 metabroker = getMetaBroker(dmaapContext);
- final Topic topic = metabroker.getTopic(topicName);
-
- if (topic == null) {
- LOGGER.error("Failed to delete topic. Topic [" + topicName + "] does not exist.");
- throw new TopicExistsException("Failed to delete topic. Topic [" + topicName + "] does not exist.");
- }
-
- // metabroker.deleteTopic(topicName);
-
- LOGGER.info("Topic [" + topicName + "] deleted successfully. Sending response.");
- DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Topic [" + topicName + "] deleted successfully");
- }
-
- /**
- *
- * @param dmaapContext
- * @return
- */
- private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) {
- return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker();
- }
-
- /**
- * @param dmaapContext
- * @param topicName
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- *
- */
- @Override
- public void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName)
- throws ConfigDbException, IOException, TopicExistsException {
- LOGGER.info("Retrieving list of all the publishers for topic " + topicName);
- Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
- if (topic == null) {
- LOGGER.error("Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
- throw new TopicExistsException(
- "Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist.");
- }
-
- final NsaAcl acl = topic.getWriterAcl();
-
- LOGGER.info("Returning list of all the publishers for topic " + topicName + ". Sending response.");
- DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl));
-
- }
-
- /**
- *
- * @param acl
- * @return
- */
- private static JSONObject aclToJson(NsaAcl acl) {
- final JSONObject o = new JSONObject();
- if (acl == null) {
- o.put("enabled", false);
- o.put("users", new JSONArray());
- } else {
- o.put("enabled", acl.isActive());
-
- final JSONArray a = new JSONArray();
- for (String user : acl.getUsers()) {
- a.put(user);
- }
- o.put("users", a);
- }
- return o;
- }
-
- /**
- * @param dmaapContext
- * @param topicName
- */
- @Override
- public void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName)
- throws IOException, ConfigDbException, TopicExistsException {
- LOGGER.info("Retrieving list of all the consumers for topic " + topicName);
- Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
- if (topic == null) {
- LOGGER.error("Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist.");
- throw new TopicExistsException(
- "Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist.");
- }
-
- final NsaAcl acl = topic.getReaderAcl();
-
- LOGGER.info("Returning list of all the consumers for topic " + topicName + ". Sending response.");
- DMaaPResponseBuilder.respondOk(dmaapContext, aclToJson(acl));
-
- }
-
- /**
- *
- * @param t
- * @return
- */
- private static JSONObject topicToJson(Topic t) {
- final JSONObject o = new JSONObject();
-
- o.put("name", t.getName());
- o.put("description", t.getDescription());
- o.put("owner", t.getOwner());
- o.put("readerAcl", aclToJson(t.getReaderAcl()));
- o.put("writerAcl", aclToJson(t.getWriterAcl()));
-
- return o;
- }
-
- /**
- * @param dmaapContext
- * @param topicName @param producerId @throws
- * ConfigDbException @throws IOException @throws
- * TopicExistsException @throws AccessDeniedException @throws
- *
- */
- @Override
- public void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, CambriaApiException {
-
- LOGGER.info("Granting write access to producer [" + producerId + "] for topic " + topicName);
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-
-
- //
- // LOGGER.info("Authenticating the user, as ACL authentication is not
-
- //// String permission =
-
- //
-
-
-
- // {
- // LOGGER.error("Failed to permit write access to producer [" +
- // producerId + "] for topic " + topicName
-
- // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- // errorMessages.getNotPermitted1()+" <Grant publish permissions>
-
-
-
- // }
- // }
-
- Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
- if (null == topic) {
- LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic. Topic [" + topicName
- + "] does not exist.");
- throw new TopicExistsException("Failed to permit write access to producer [" + producerId
- + "] for topic. Topic [" + topicName + "] does not exist.");
- }
-
- topic.permitWritesFromUser(producerId, user);
-
- LOGGER.info("Write access has been granted to producer [" + producerId + "] for topic [" + topicName
- + "]. Sending response.");
- DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been granted to publisher.");
-
- }
-
- /**
- * @param dmaapContext
- * @param topicName
- * @param producerId
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- * @throws AccessDeniedException
- * @throws DMaaPAccessDeniedException
- *
- */
- @Override
- public void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
- DMaaPAccessDeniedException {
-
- LOGGER.info("Revoking write access to producer [" + producerId + "] for topic " + topicName);
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-
- //
- //// String permission =
-
- // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl();
- // String permission = aaf.aafPermissionString(topicName, "manage");
- // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
- // {
- // LOGGER.error("Failed to revoke write access to producer [" +
- // producerId + "] for topic " + topicName
-
- // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- // errorMessages.getNotPermitted1()+" <Revoke publish permissions>
-
-
- // throw new DMaaPAccessDeniedException(errRes);
- //
-
- // }
-
- Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
- if (null == topic) {
- LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic. Topic [" + topicName
- + "] does not exist.");
- throw new TopicExistsException("Failed to revoke write access to producer [" + producerId
- + "] for topic. Topic [" + topicName + "] does not exist.");
- }
-
- topic.denyWritesFromUser(producerId, user);
-
- LOGGER.info("Write access has been revoked to producer [" + producerId + "] for topic [" + topicName
- + "]. Sending response.");
- DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "Write access has been revoked for publisher.");
-
- }
-
- /**
- * @param dmaapContext
- * @param topicName
- * @param consumerId
- * @throws DMaaPAccessDeniedException
- */
- @Override
- public void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
- DMaaPAccessDeniedException {
-
- LOGGER.info("Granting read access to consumer [" + consumerId + "] for topic " + topicName);
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-
- //
- //// String permission =
-
-
- // String permission = aaf.aafPermissionString(topicName, "manage");
- // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
- // {
- // LOGGER.error("Failed to permit read access to consumer [" +
- // consumerId + "] for topic " + topicName
-
- // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- // errorMessages.getNotPermitted1()+" <Grant consume permissions>
-
-
-
- // }
- // }
-
- Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
- if (null == topic) {
- LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic. Topic [" + topicName
- + "] does not exist.");
- throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId
- + "] for topic. Topic [" + topicName + "] does not exist.");
- }
-
- topic.permitReadsByUser(consumerId, user);
-
- LOGGER.info("Read access has been granted to consumer [" + consumerId + "] for topic [" + topicName
- + "]. Sending response.");
- DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
- "Read access has been granted for consumer [" + consumerId + "] for topic [" + topicName + "].");
- }
-
- /**
- * @param dmaapContext
- * @param topicName
- * @param consumerId
- * @throws DMaaPAccessDeniedException
- */
- @Override
- public void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId)
- throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException,
- DMaaPAccessDeniedException {
-
- LOGGER.info("Revoking read access to consumer [" + consumerId + "] for topic " + topicName);
- final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext);
-
- //// String permission =
-
-
- // String permission = aaf.aafPermissionString(topicName, "manage");
- // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission))
- // {
- // LOGGER.error("Failed to revoke read access to consumer [" +
- // consumerId + "] for topic " + topicName
-
- // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN,
- // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(),
- // errorMessages.getNotPermitted1()+" <Grant consume permissions>
-
-
- // throw new DMaaPAccessDeniedException(errRes);
- // }
- //
- //
-
- Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
- if (null == topic) {
- LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic. Topic [" + topicName
- + "] does not exist.");
- throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId
- + "] for topic. Topic [" + topicName + "] does not exist.");
- }
-
- topic.denyReadsByUser(consumerId, user);
-
- LOGGER.info("Read access has been revoked to consumer [" + consumerId + "] for topic [" + topicName
- + "]. Sending response.");
- DMaaPResponseBuilder.respondOkWithHtml(dmaapContext,
- "Read access has been revoked for consumer [" + consumerId + "] for topic [" + topicName + "].");
-
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java
deleted file mode 100644
index 3065928..0000000
--- a/src/main/java/com/att/dmf/mr/service/impl/TransactionServiceImpl.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service.impl;
-
-import java.io.IOException;
-
-import org.springframework.stereotype.Service;
-
-import com.att.aft.dme2.internal.jettison.json.JSONException;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.service.TransactionService;
-import com.att.dmf.mr.transaction.TransactionObj;
-import com.att.nsa.configs.ConfigDbException;
-
-/**
- * Once the transaction rest gateway will be using that time it will provide all
- * the transaction details like fetching all the transactional objects or get
- * any particular transaction object details
- *
- * @author nilanjana.maity
- *
- */
-@Service
-public class TransactionServiceImpl implements TransactionService {
-
- @Override
- public void checkTransaction(TransactionObj trnObj) {
- /* Need to implement the method */
- }
-
- @Override
- public void getAllTransactionObjs(DMaaPContext dmaapContext)
- throws ConfigDbException, IOException {
-
- /*
-
- *
- * LOG.info("configReader : "+configReader.toString());
- *
- * final JSONObject result = new JSONObject (); final JSONArray
- * transactionIds = new JSONArray (); result.put ( "transactionIds",
- * transactionIds );
- *
- * DMaaPTransactionObjDB<DMaaPTransactionObj> transDb =
- * configReader.getfTranDb();
- *
- * for (String transactionId : transDb.loadAllTransactionObjs()) {
- * transactionIds.put (transactionId); } LOG.info(
- * "========== TransactionServiceImpl: getAllTransactionObjs: Transaction objects are : "
- * + transactionIds.toString()+"===========");
- * DMaaPResponseBuilder.respondOk(dmaapContext, result);
- */
- }
-
- @Override
- public void getTransactionObj(DMaaPContext dmaapContext,
- String transactionId) throws ConfigDbException, JSONException,
- IOException {
-
- /*
-
- *
- * ConfigurationReader configReader = dmaapContext.getConfigReader();
- *
- * DMaaPTransactionObj trnObj;
- *
- * trnObj = configReader.getfTranDb().loadTransactionObj(transactionId);
- *
- *
- * if (null != trnObj) { trnObj.serialize(); JSONObject result =
- * trnObj.asJsonObject(); DMaaPResponseBuilder.respondOk(dmaapContext,
- * result);
- * LOG.info("========== TransactionServiceImpl: getTransactionObj : "+
- * result.toString()+"==========="); return; }
- *
- * } LOG.info(
- * "========== TransactionServiceImpl: getTransactionObj: Error : Transaction object does not exist. "
- * +"===========");
- */
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java b/src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java
deleted file mode 100644
index 73ad83b..0000000
--- a/src/main/java/com/att/dmf/mr/service/impl/UIServiceImpl.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.service.impl;
-
-import java.io.IOException;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-
-import org.apache.kafka.common.errors.TopicExistsException;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.springframework.stereotype.Service;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
-import com.att.dmf.mr.metabroker.Topic;
-import com.att.dmf.mr.service.UIService;
-import com.att.dmf.mr.utils.DMaaPResponseBuilder;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.db.NsaApiDb;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-/**
- * @author muzainulhaque.qazi
- *
- */
-@Service
-public class UIServiceImpl implements UIService {
-
-
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(UIServiceImpl.class);
- /**
- * Returning template of hello page
- * @param dmaapContext
- * @throws IOException
- */
- @Override
- public void hello(DMaaPContext dmaapContext) throws IOException {
- LOGGER.info("Returning template of hello page.");
- DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "templates/hello.html");
- }
-
- /**
- * Fetching list of all api keys and returning in a templated form for display.
- * @param dmaapContext
- * @throws ConfigDbException
- * @throws IOException
- */
- @Override
- public void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException {
- // TODO - We need to work on the templates and how data will be set in
- // the template
- LOGGER.info("Fetching list of all api keys and returning in a templated form for display.");
- Map<String, NsaSimpleApiKey> keyMap = getApiKeyDb(dmaapContext).loadAllKeyRecords();
-
- LinkedList<JSONObject> keyList = new LinkedList<>();
-
- JSONObject jsonList = new JSONObject();
-
- for (Entry<String, NsaSimpleApiKey> e : keyMap.entrySet()) {
- final NsaSimpleApiKey key = e.getValue();
- final JSONObject jsonObject = new JSONObject();
- jsonObject.put("key", key.getKey());
- jsonObject.put("email", key.getContactEmail());
- jsonObject.put("description", key.getDescription());
- keyList.add(jsonObject);
- }
-
- jsonList.put("apiKeys", keyList);
-
- LOGGER.info("Returning list of all the api keys in JSON format for the template.");
- // "templates/apiKeyList.html"
- DMaaPResponseBuilder.respondOk(dmaapContext, jsonList);
-
- }
-
- /**
- * @param dmaapContext
- * @param apiKey
- * @throws ConfigDbException
- * @throws IOException
- * @throws JSONException
- * @throws Exception
- */
- @Override
- public void getApiKey(DMaaPContext dmaapContext, String apiKey) throws CambriaApiException, ConfigDbException, JSONException, IOException {
- // TODO - We need to work on the templates and how data will be set in
- // the template
- LOGGER.info("Fetching detials of apikey: " + apiKey);
- final NsaSimpleApiKey key = getApiKeyDb(dmaapContext).loadApiKey(apiKey);
-
- if (null != key) {
- LOGGER.info("Details of apikey [" + apiKey + "] found. Returning response");
- DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject());
- } else {
- LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response");
- throw new CambriaApiException(400,"Key [" + apiKey + "] not found.");
- }
-
- }
-
- /**
- * Fetching list of all the topics
- * @param dmaapContext
- * @throws ConfigDbException
- * @throws IOException
- */
- @Override
- public void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException {
- // TODO - We need to work on the templates and how data will be set in
- // the template
- LOGGER.info("Fetching list of all the topics and returning in a templated form for display");
- List<Topic> topicsList = getMetaBroker(dmaapContext).getAllTopics();
-
- JSONObject jsonObject = new JSONObject();
-
- JSONArray topicsArray = new JSONArray();
-
- List<Topic> topicList = getMetaBroker(dmaapContext).getAllTopics();
-
- for (Topic topic : topicList) {
- JSONObject obj = new JSONObject();
- obj.put("topicName", topic.getName());
- obj.put("description", topic.getDescription());
- obj.put("owner", topic.getOwner());
- topicsArray.put(obj);
- }
-
- jsonObject.put("topics", topicsList);
-
- LOGGER.info("Returning the list of topics in templated format for display.");
- DMaaPResponseBuilder.respondOk(dmaapContext, jsonObject);
-
- }
-
- /**
- * @param dmaapContext
- * @param topicName
- * @throws ConfigDbException
- * @throws IOException
- * @throws TopicExistsException
- */
- @Override
- public void getTopic(DMaaPContext dmaapContext, String topicName)
- throws ConfigDbException, IOException, TopicExistsException {
- // TODO - We need to work on the templates and how data will be set in
- // the template
- LOGGER.info("Fetching detials of apikey: " + topicName);
- Topic topic = getMetaBroker(dmaapContext).getTopic(topicName);
-
- if (null == topic) {
- LOGGER.error("Topic [" + topicName + "] does not exist.");
- throw new TopicExistsException("Topic [" + topicName + "] does not exist.");
- }
-
- JSONObject json = new JSONObject();
- json.put("topicName", topic.getName());
- json.put("description", topic.getDescription());
- json.put("owner", topic.getOwner());
-
- LOGGER.info("Returning details of topic [" + topicName + "]. Sending response.");
- DMaaPResponseBuilder.respondOk(dmaapContext, json);
-
- }
-
- /**
- *
- * @param dmaapContext
- * @return
- */
- private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) {
- return dmaapContext.getConfigReader().getfApiKeyDb();
-
- }
-
- /**
- *
- * @param dmaapContext
- * @return
- */
- private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) {
- return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker();
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java
deleted file mode 100644
index 8ae4c12..0000000
--- a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionFactory.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.transaction;
-/**
- *
- * @author anowarul.islam
- *
- * @param <K>
- */
-public interface DMaaPTransactionFactory<K extends DMaaPTransactionObj> {
-
- /**
- *
- * @param data
- * @return
- */
- K makeNewTransactionObj ( String data );
- /**
- *
- * @param id
- * @return
- */
- K makeNewTransactionId ( String id );
-
-}
diff --git a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java
deleted file mode 100644
index 7f5dd3a..0000000
--- a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObj.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.transaction;
-
-import org.json.JSONObject;
-/**
- * This is an interface for DMaaP transactional logging object class.
- * @author nilanjana.maity
- *
- */
-public interface DMaaPTransactionObj {
- /**
- * This will get the transaction id
- * @return id transactionId
- */
- String getId();
- /**
- * This will set the transaction id
- * @param id transactionId
- */
- void setId(String id);
- /**
- * This will sync the transaction object mapping
- * @return String or null
- */
- String serialize();
- /**
- * get the total message count once the publisher published
- * @return long totalMessageCount
- */
- long getTotalMessageCount();
- /**
- * set the total message count once the publisher published
- * @param totalMessageCount
- */
- void setTotalMessageCount(long totalMessageCount);
- /**
- * get the total Success Message Count once the publisher published
- * @return getSuccessMessageCount
- */
- long getSuccessMessageCount();
- /**
- * set the total Success Message Count once the publisher published
- * @param successMessageCount
- */
- void setSuccessMessageCount(long successMessageCount);
- /**
- * get the failure Message Count once the publisher published
- * @return failureMessageCount
- */
- long getFailureMessageCount();
- /**
- * set the failure Message Count once the publisher published
- * @param failureMessageCount
- */
- void setFailureMessageCount(long failureMessageCount);
-
- /**
- * wrapping the data into json object
- * @return JSONObject
- */
- JSONObject asJsonObject();
-
-}
diff --git a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java b/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java
deleted file mode 100644
index abebaba..0000000
--- a/src/main/java/com/att/dmf/mr/transaction/DMaaPTransactionObjDB.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.transaction;
-
-import java.util.Set;
-
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.security.NsaSecurityManagerException;
-
-
-/**
- * Persistent storage for Transaction Object and secrets built over an abstract config db. Instances
- * of this DB must support concurrent access.
- * @author nilanjana.maity
- *
- * @param <K> DMaaPTransactionObj
- */
-public interface DMaaPTransactionObjDB <K extends DMaaPTransactionObj> {
-
-
- /**
- * Create a new Transaction Object. If one exists,
- * @param id
- * @return the new Transaction record
- * @throws ConfigDbException
- */
- K createTransactionObj (String id) throws KeyExistsException, ConfigDbException;
-
-
- /**
- * An exception to signal a Transaction object already exists
- * @author nilanjana.maity
- *
- */
- public static class KeyExistsException extends NsaSecurityManagerException
- {
- /**
- * If the key exists
- * @param key
- */
- public KeyExistsException ( String key ) { super ( "Transaction Object " + key + " exists" ); }
- private static final long serialVersionUID = 1L;
- }
-
- /**
- * Save a Transaction Object record. This must be used after changing auxiliary data on the record.
- * Note that the transaction must exist (via createTransactionObj).
- * @param transactionObj
- * @throws ConfigDbException
- */
- void saveTransactionObj ( K transactionObj ) throws ConfigDbException;
-
- /**
- * Load an Transaction Object record based on the Transaction ID value
- * @param transactionId
- * @return a transaction record or null
- * @throws ConfigDbException
- */
- K loadTransactionObj ( String transactionId ) throws ConfigDbException;
-
- /**
- * Load all Transaction objects.
- * @return
- * @throws ConfigDbException
- */
- Set<String> loadAllTransactionObjs () throws ConfigDbException;
-} \ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/transaction/TransactionObj.java b/src/main/java/com/att/dmf/mr/transaction/TransactionObj.java
deleted file mode 100644
index 7223f0f..0000000
--- a/src/main/java/com/att/dmf/mr/transaction/TransactionObj.java
+++ /dev/null
@@ -1,202 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.transaction;
-
-import org.json.JSONObject;
-
-/**
- * This is the class which will have the transaction enabled logging object
- * details
- *
- * @author nilanjana.maity
- *
- */
-public class TransactionObj implements DMaaPTransactionObj {
-
- private String id;
- private String createTime;
- private long totalMessageCount;
- private long successMessageCount;
- private long failureMessageCount;
- private JSONObject fData = new JSONObject();
- private TrnRequest trnRequest;
- private static final String kAuxData = "transaction";
-
- /**
- * Initializing constructor
- * put the json data for transaction enabled logging
- *
- * @param data
- */
- public TransactionObj(JSONObject data) {
- fData = data;
-
- // check for required fields (these throw if not present)
- getId();
- getTotalMessageCount();
- getSuccessMessageCount();
- getFailureMessageCount();
-
- // make sure we've got an aux data object
- final JSONObject aux = fData.optJSONObject(kAuxData);
- if (aux == null) {
- fData.put(kAuxData, new JSONObject());
- }
- }
-
- /**
- * this constructor will have the details of transaction id,
- * totalMessageCount successMessageCount, failureMessageCount to get the
- * transaction object
- *
- * @param id
- * @param totalMessageCount
- * @param successMessageCount
- * @param failureMessageCount
- */
- public TransactionObj(String id, long totalMessageCount, long successMessageCount, long failureMessageCount) {
- this.id = id;
- this.totalMessageCount = totalMessageCount;
- this.successMessageCount = successMessageCount;
- this.failureMessageCount = failureMessageCount;
-
- }
-
- /**
- * The constructor passing only transaction id
- *
- * @param id
- */
- public TransactionObj(String id) {
- this.id = id;
- }
-
- /**
- * Wrapping the data into json object
- *
- * @return JSONObject
- */
- public JSONObject asJsonObject() {
- final JSONObject full = new JSONObject(fData, JSONObject.getNames(fData));
- return full;
- }
-
- /**
- * To get the transaction id
- */
- public String getId() {
- return id;
- }
-
- /**
- * To set the transaction id
- */
- public void setId(String id) {
- this.id = id;
- }
-
- /**
- *
- * @return
- */
- public String getCreateTime() {
- return createTime;
- }
-
- /**
- *
- * @param createTime
- */
- public void setCreateTime(String createTime) {
- this.createTime = createTime;
- }
-
- @Override
- public String serialize() {
- fData.put("transactionId", id);
- fData.put("totalMessageCount", totalMessageCount);
- fData.put("successMessageCount", successMessageCount);
- fData.put("failureMessageCount", failureMessageCount);
- return fData.toString();
- }
-
- public long getTotalMessageCount() {
- return totalMessageCount;
- }
-
- public void setTotalMessageCount(long totalMessageCount) {
- this.totalMessageCount = totalMessageCount;
- }
-
- public long getSuccessMessageCount() {
- return successMessageCount;
- }
-
- public void setSuccessMessageCount(long successMessageCount) {
- this.successMessageCount = successMessageCount;
- }
-
- public long getFailureMessageCount() {
- return failureMessageCount;
- }
-
- /**
- * @param failureMessageCount
- */
- public void setFailureMessageCount(long failureMessageCount) {
- this.failureMessageCount = failureMessageCount;
- }
-
- /**
- *
- * @return JSOnObject fData
- */
- public JSONObject getfData() {
- return fData;
- }
-
- /**
- * set the json object into data
- *
- * @param fData
- */
- public void setfData(JSONObject fData) {
- this.fData = fData;
- }
-
- /**
- *
- * @return
- */
- public TrnRequest getTrnRequest() {
- return trnRequest;
- }
-
- /**
- *
- * @param trnRequest
- */
- public void setTrnRequest(TrnRequest trnRequest) {
- this.trnRequest = trnRequest;
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/transaction/TrnRequest.java b/src/main/java/com/att/dmf/mr/transaction/TrnRequest.java
deleted file mode 100644
index f7f18a2..0000000
--- a/src/main/java/com/att/dmf/mr/transaction/TrnRequest.java
+++ /dev/null
@@ -1,183 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.transaction;
-
-/**
- * Created for transaction enable logging details, this is nothing but a bean
- * class.
- *
- * @author nilanjana.maity
- *
- */
-public class TrnRequest {
-
- private String id;
- private String requestCreate;
- private String requestHost;
- private String serverHost;
- private String messageProceed;
- private String totalMessage;
- private String clientType;
- private String url;
-
- /**
- *
- *
- *
- * @return id
- *
- */
- public String getId() {
- return id;
- }
-
- /**
- *
- *
- * @param id
- */
- public void setId(String id) {
- this.id = id;
- }
-
- /**
- *
- *
- * @return requestCreate
- */
- public String getRequestCreate() {
- return requestCreate;
- }
-
- /**
- *
- * @param requestCreate
- */
- public void setRequestCreate(String requestCreate) {
- this.requestCreate = requestCreate;
- }
-
- /**
- *
- * @return
- */
- public String getRequestHost() {
- return requestHost;
- }
-
- /**
- *
- * @param requestHost
- */
- public void setRequestHost(String requestHost) {
- this.requestHost = requestHost;
- }
-
- /**
- *
- *
- *
- * @return
- */
- public String getServerHost() {
- return serverHost;
- }
-
- /**
- *
- * @param serverHost
- */
- public void setServerHost(String serverHost) {
- this.serverHost = serverHost;
- }
-
- /**
- *
- *
- *
- * @return
- */
- public String getMessageProceed() {
- return messageProceed;
- }
-
- /**
- *
- * @param messageProceed
- */
- public void setMessageProceed(String messageProceed) {
- this.messageProceed = messageProceed;
- }
-
- /**
- *
- * @return
- */
- public String getTotalMessage() {
- return totalMessage;
- }
-
- /**
- *
- * @param totalMessage
- *
- *
- */
- public void setTotalMessage(String totalMessage) {
- this.totalMessage = totalMessage;
- }
-
- /**
- *
- * @return
- */
- public String getClientType() {
- return clientType;
- }
-
- /**
- *
- * @param clientType
- *
- */
- public void setClientType(String clientType) {
- this.clientType = clientType;
- }
-
- /**
- *
- * @return
- */
- public String getUrl() {
- return url;
- }
-
- /**
- *
- * @param url
- *
- */
- public void setUrl(String url) {
- this.url = url;
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java b/src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java
deleted file mode 100644
index c54f2db..0000000
--- a/src/main/java/com/att/dmf/mr/transaction/impl/DMaaPSimpleTransactionFactory.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.transaction.impl;
-
-import org.json.JSONObject;
-
-import com.att.dmf.mr.transaction.DMaaPTransactionFactory;
-import com.att.dmf.mr.transaction.DMaaPTransactionObj;
-import com.att.dmf.mr.transaction.TransactionObj;
-
-/**
- * A factory for the simple Transaction implementation
- *
- *
- * @author nilanjana.maity
- *
- */
-public class DMaaPSimpleTransactionFactory implements DMaaPTransactionFactory<DMaaPTransactionObj> {
- /**
- *
- * @param data
- * @return DMaaPTransactionObj
- */
- @Override
- public DMaaPTransactionObj makeNewTransactionObj(String data) {
- JSONObject jsonObject = new JSONObject(data);
- return new TransactionObj(jsonObject.getString("transactionId"), jsonObject.getLong("totalMessageCount"),
- jsonObject.getLong("successMessageCount"), jsonObject.getLong("failureMessageCount"));
- }
-
- /**
- *
- * @param id
- * @return TransactionObj
- *
- *
- */
- @Override
- public DMaaPTransactionObj makeNewTransactionId(String id) {
- return new TransactionObj(id);
- }
-
-}
diff --git a/src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java b/src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java
deleted file mode 100644
index aebca34..0000000
--- a/src/main/java/com/att/dmf/mr/utils/ConfigurationReader.java
+++ /dev/null
@@ -1,492 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.utils;
-
-import javax.servlet.ServletException;
-
-import org.I0Itec.zkclient.ZkClient;
-import org.apache.curator.framework.CuratorFramework;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
-import org.springframework.stereotype.Component;
-
-import com.att.dmf.mr.backends.ConsumerFactory;
-import com.att.dmf.mr.backends.MetricsSet;
-import com.att.dmf.mr.backends.Publisher;
-import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
-import com.att.dmf.mr.backends.memory.MemoryConsumerFactory;
-import com.att.dmf.mr.backends.memory.MemoryMetaBroker;
-import com.att.dmf.mr.backends.memory.MemoryQueue;
-import com.att.dmf.mr.backends.memory.MemoryQueuePublisher;
-import com.att.dmf.mr.beans.DMaaPCambriaLimiter;
-import com.att.dmf.mr.beans.DMaaPKafkaMetaBroker;
-import com.att.dmf.mr.beans.DMaaPZkConfigDb;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.metabroker.Broker;
-
-import com.att.dmf.mr.metabroker.Broker1;
-import com.att.dmf.mr.security.DMaaPAuthenticator;
-import com.att.dmf.mr.security.impl.DMaaPOriginalUebAuthenticator;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.configs.confimpl.MemConfigDb;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.limits.Blacklist;
-import com.att.nsa.security.NsaAuthenticatorService;
-
-import com.att.nsa.security.db.BaseNsaApiDbImpl;
-import com.att.nsa.security.db.NsaApiDb;
-import com.att.nsa.security.db.NsaApiDb.KeyExistsException;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
-
-/**
- * Class is created for all the configuration for rest and service layer
- * integration.
- *
- */
-@Component
-public class ConfigurationReader {
-
-
- private Broker1 fMetaBroker;
- private ConsumerFactory fConsumerFactory;
- private Publisher fPublisher;
- private MetricsSet fMetrics;
- @Autowired
- private DMaaPCambriaLimiter fRateLimiter;
- private NsaApiDb<NsaSimpleApiKey> fApiKeyDb;
-
- private DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager;
- private NsaAuthenticatorService<NsaSimpleApiKey> nsaSecurityManager;
- private static CuratorFramework curator;
- private ZkClient zk;
- private DMaaPZkConfigDb fConfigDb;
- private MemoryQueue q;
- private MemoryMetaBroker mmb;
- private Blacklist fIpBlackList;
- private Emailer fEmailer;
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(ConfigurationReader.class);
-
-
- /**
- * constructor to initialize all the values
- *
- * @param settings
- * @param fMetrics
- * @param zk
- * @param fConfigDb
- * @param fPublisher
- * @param curator
- * @param fConsumerFactory
- * @param fMetaBroker
- * @param q
- * @param mmb
- * @param fApiKeyDb
- * @param fSecurityManager
- * @throws missingReqdSetting
- * @throws invalidSettingValue
- * @throws ServletException
- * @throws KafkaConsumerCacheException
- * @throws ConfigDbException
- * @throws KeyExistsException
- */
- @Autowired
- public ConfigurationReader(@Qualifier("propertyReader") rrNvReadable settings,
- @Qualifier("dMaaPMetricsSet") MetricsSet fMetrics, @Qualifier("dMaaPZkClient") ZkClient zk,
- @Qualifier("dMaaPZkConfigDb") DMaaPZkConfigDb fConfigDb, @Qualifier("kafkaPublisher") Publisher fPublisher,
- @Qualifier("curator") CuratorFramework curator,
- @Qualifier("dMaaPKafkaConsumerFactory") ConsumerFactory fConsumerFactory,
- @Qualifier("dMaaPKafkaMetaBroker") Broker1 fMetaBroker,
- @Qualifier("q") MemoryQueue q,
- @Qualifier("mmb") MemoryMetaBroker mmb, @Qualifier("dMaaPNsaApiDb") NsaApiDb<NsaSimpleApiKey> fApiKeyDb,
- /*
- * @Qualifier("dMaaPTranDb")
- * DMaaPTransactionObjDB<DMaaPTransactionObj> fTranDb,
- */
- @Qualifier("dMaaPAuthenticatorImpl") DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager
- )
- throws missingReqdSetting, invalidSettingValue, ServletException, KafkaConsumerCacheException, ConfigDbException, KeyExistsException {
-
- this.fMetrics = fMetrics;
- this.zk = zk;
- this.fConfigDb = fConfigDb;
- this.fPublisher = fPublisher;
- ConfigurationReader.curator = curator;
- this.fConsumerFactory = fConsumerFactory;
- this.fMetaBroker = fMetaBroker;
-
- this.q = q;
- this.mmb = mmb;
- this.fApiKeyDb = fApiKeyDb;
-
- this.fSecurityManager = fSecurityManager;
-
- long allowedtimeSkewMs=600000L;
- String strallowedTimeSkewM= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"authentication.allowedTimeSkewMs");
- if(null!=strallowedTimeSkewM)allowedtimeSkewMs= Long.parseLong(strallowedTimeSkewM);
-
-
- //String strrequireSecureChannel= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"aauthentication.requireSecureChannel");
- //if(strrequireSecureChannel!=null)requireSecureChannel=Boolean.parseBoolean(strrequireSecureChannel);
- //this.nsaSecurityManager = new NsaAuthenticatorService<NsaSimpleApiKey>(this.fApiKeyDb, settings.getLong("authentication.allowedTimeSkewMs", 600000L), settings.getBoolean("authentication.requireSecureChannel", true));
- //this.nsaSecurityManager = new NsaAuthenticatorService<NsaSimpleApiKey>(this.fApiKeyDb, allowedtimeSkewMs, requireSecureChannel);
-
- servletSetup();
- }
-
- protected void servletSetup()
- throws rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, ConfigDbException, KeyExistsException {
- try {
-
- fMetrics.toJson();
- fMetrics.setupCambriaSender();
- // add the admin authenticator
-
- final String adminSecret = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_AdminSecret);
-
- if ( adminSecret != null && adminSecret.length () > 0 )
- {
-
- final NsaApiDb<NsaSimpleApiKey> adminDb = new BaseNsaApiDbImpl<NsaSimpleApiKey> ( new MemConfigDb(), new NsaSimpleApiKeyFactory() );
- adminDb.createApiKey ( "admin", adminSecret );
-
- fSecurityManager.addAuthenticator ( new DMaaPOriginalUebAuthenticator<NsaSimpleApiKey> ( adminDb, 10*60*1000 ) );
-
- }
-
- // setup a backend
-
- String type = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kBrokerType);
- if (type==null) type = CambriaConstants.kBrokerType_Kafka;
- if (CambriaConstants.kBrokerType_Kafka.equalsIgnoreCase(type)) {
- log.info("Broker Type is:" + CambriaConstants.kBrokerType_Kafka);
- } else if (CambriaConstants.kBrokerType_Memory.equalsIgnoreCase(type)) {
- log.info("Broker Type is:" + CambriaConstants.kBrokerType_Memory);
- fPublisher = new MemoryQueuePublisher(q, mmb);
- //Ramkumar remove below
- // fMetaBroker = mmb;
- fConsumerFactory = new MemoryConsumerFactory(q);
- } else {
- throw new IllegalArgumentException(
- "Unrecognized type for " + CambriaConstants.kBrokerType + ": " + type + ".");
- }
- fIpBlackList = new Blacklist ( getfConfigDb(), getfConfigDb().parse ( "/ipBlacklist" ) );
- this.fEmailer = new Emailer();
- log.info("Broker Type is:" + type);
-
- } catch (SecurityException e) {
- throw new ServletException(e);
- }
- }
-
- /**
- * method returns metaBroker
- *
- * @return
- */
- public Broker1 getfMetaBroker() {
- return fMetaBroker;
- }
-
- /**
- * method to set the metaBroker
- *
- * @param fMetaBroker
- */
- public void setfMetaBroker(Broker1 fMetaBroker) {
- this.fMetaBroker = fMetaBroker;
- }
-
- /**
- * method to get ConsumerFactory Object
- *
- * @return
- */
- public ConsumerFactory getfConsumerFactory() {
- return fConsumerFactory;
- }
-
- /**
- * method to set the consumerfactory object
- *
- * @param fConsumerFactory
- */
- public void setfConsumerFactory(ConsumerFactory fConsumerFactory) {
- this.fConsumerFactory = fConsumerFactory;
- }
-
- /**
- * method to get Publisher object
- *
- * @return
- */
- public Publisher getfPublisher() {
- return fPublisher;
- }
-
- /**
- * method to set Publisher object
- *
- * @param fPublisher
- */
- public void setfPublisher(Publisher fPublisher) {
- this.fPublisher = fPublisher;
- }
-
- /**
- * method to get MetricsSet Object
- *
- * @return
- */
- public MetricsSet getfMetrics() {
- return fMetrics;
- }
-
- /**
- * method to set MetricsSet Object
- *
- * @param fMetrics
- */
- public void setfMetrics(MetricsSet fMetrics) {
- this.fMetrics = fMetrics;
- }
-
- /**
- * method to get DMaaPCambriaLimiter object
- *
- * @return
- */
- public DMaaPCambriaLimiter getfRateLimiter() {
- return fRateLimiter;
- }
-
- /**
- * method to set DMaaPCambriaLimiter object
- *
- * @param fRateLimiter
- */
- public void setfRateLimiter(DMaaPCambriaLimiter fRateLimiter) {
- this.fRateLimiter = fRateLimiter;
- }
-
- /**
- * Method to get DMaaPAuthenticator object
- *
- * @return
- */
- public DMaaPAuthenticator<NsaSimpleApiKey> getfSecurityManager() {
- return fSecurityManager;
- }
-
- /**
- * method to set DMaaPAuthenticator object
- *
- * @param fSecurityManager
- */
- public void setfSecurityManager(DMaaPAuthenticator<NsaSimpleApiKey> fSecurityManager) {
- this.fSecurityManager = fSecurityManager;
- }
-
- /**
- * method to get rrNvReadable object
- *
- * @return
- */
- /*public rrNvReadable getSettings() {
- return settings;
- }*/
-
- /**
- * method to set rrNvReadable object
- *
- * @param settings
- */
- /*public void setSettings(rrNvReadable settings) {
- this.settings = settings;
- }*/
-
- /**
- * method to get CuratorFramework object
- *
- * @return
- */
- public static CuratorFramework getCurator() {
- return curator;
- }
-
- /**
- * method to set CuratorFramework object
- *
- * @param curator
- */
- public static void setCurator(CuratorFramework curator) {
- ConfigurationReader.curator = curator;
- }
-
- /**
- * method to get ZkClient object
- *
- * @return
- */
- public ZkClient getZk() {
- return zk;
- }
-
- /**
- * method to set ZkClient object
- *
- * @param zk
- */
- public void setZk(ZkClient zk) {
- this.zk = zk;
- }
-
- /**
- * method to get DMaaPZkConfigDb object
- *
- * @return
- */
- public DMaaPZkConfigDb getfConfigDb() {
- return fConfigDb;
- }
-
- /**
- * method to set DMaaPZkConfigDb object
- *
- * @param fConfigDb
- */
- public void setfConfigDb(DMaaPZkConfigDb fConfigDb) {
- this.fConfigDb = fConfigDb;
- }
-
- /**
- * method to get MemoryQueue object
- *
- * @return
- */
- public MemoryQueue getQ() {
- return q;
- }
-
- /**
- * method to set MemoryQueue object
- *
- * @param q
- */
- public void setQ(MemoryQueue q) {
- this.q = q;
- }
-
- /**
- * method to get MemoryMetaBroker object
- *
- * @return
- */
- public MemoryMetaBroker getMmb() {
- return mmb;
- }
-
- /**
- * method to set MemoryMetaBroker object
- *
- * @param mmb
- */
- public void setMmb(MemoryMetaBroker mmb) {
- this.mmb = mmb;
- }
-
- /**
- * method to get NsaApiDb object
- *
- * @return
- */
- public NsaApiDb<NsaSimpleApiKey> getfApiKeyDb() {
- return fApiKeyDb;
- }
-
- /**
- * method to set NsaApiDb object
- *
- * @param fApiKeyDb
- */
- public void setfApiKeyDb(NsaApiDb<NsaSimpleApiKey> fApiKeyDb) {
- this.fApiKeyDb = fApiKeyDb;
- }
-
- /*
- * public DMaaPTransactionObjDB<DMaaPTransactionObj> getfTranDb() { return
- * fTranDb; }
- *
- * public void setfTranDb(DMaaPTransactionObjDB<DMaaPTransactionObj>
- * fTranDb) { this.fTranDb = fTranDb; }
- */
- /**
- * method to get the zookeeper connection String
- *
- * @param settings
- * @return
- */
- public static String getMainZookeeperConnectionString() {
- //return settings.getString(CambriaConstants.kSetting_ZkConfigDbServers, CambriaConstants.kDefault_ZkConfigDbServers);
-
- String zkServername = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbServers);
- if (zkServername==null) zkServername=CambriaConstants.kDefault_ZkConfigDbServers;
- return zkServername;
- }
-
- public static String getMainZookeeperConnectionSRoot(){
- String strVal=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,CambriaConstants.kSetting_ZkConfigDbRoot);
-
- if (null==strVal)
- strVal=CambriaConstants.kDefault_ZkConfigDbRoot;
-
- return strVal;
- }
-
- public Blacklist getfIpBlackList() {
- return fIpBlackList;
- }
-
- public void setfIpBlackList(Blacklist fIpBlackList) {
- this.fIpBlackList = fIpBlackList;
- }
-
- public NsaAuthenticatorService<NsaSimpleApiKey> getNsaSecurityManager() {
- return nsaSecurityManager;
- }
-
- public void setNsaSecurityManager(NsaAuthenticatorService<NsaSimpleApiKey> nsaSecurityManager) {
- this.nsaSecurityManager = nsaSecurityManager;
- }
-
- public Emailer getSystemEmailer()
- {
- return this.fEmailer;
- }
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java b/src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java
deleted file mode 100644
index 5a9968d..0000000
--- a/src/main/java/com/att/dmf/mr/utils/DMaaPCuratorFactory.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.utils;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-
-/**
- *
- *
- * @author anowarul.islam
- *
- *
- */
-public class DMaaPCuratorFactory {
- /**
- *
- * method provide CuratorFramework object
- *
- * @param settings
- * @return
- *
- *
- *
- */
- public static CuratorFramework getCurator(rrNvReadable settings) {
- String Setting_ZkConfigDbServers =com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkConfigDbServers);
-
- if(null==Setting_ZkConfigDbServers)
- Setting_ZkConfigDbServers =CambriaConstants.kDefault_ZkConfigDbServers;
-
- String strSetting_ZkSessionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs);
- if (strSetting_ZkSessionTimeoutMs==null) strSetting_ZkSessionTimeoutMs = CambriaConstants.kDefault_ZkSessionTimeoutMs+"";
- int Setting_ZkSessionTimeoutMs = Integer.parseInt(strSetting_ZkSessionTimeoutMs);
-
- String str_ZkConnectionTimeoutMs = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, CambriaConstants.kSetting_ZkSessionTimeoutMs);
- if (str_ZkConnectionTimeoutMs==null) str_ZkConnectionTimeoutMs = CambriaConstants.kDefault_ZkConnectionTimeoutMs+"";
- int setting_ZkConnectionTimeoutMs = Integer.parseInt(str_ZkConnectionTimeoutMs);
-
-
- CuratorFramework curator = CuratorFrameworkFactory.newClient(
- Setting_ZkConfigDbServers,Setting_ZkSessionTimeoutMs,setting_ZkConnectionTimeoutMs
- ,new ExponentialBackoffRetry(1000, 5));
- return curator;
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java b/src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java
deleted file mode 100644
index 72db9de..0000000
--- a/src/main/java/com/att/dmf/mr/utils/DMaaPResponseBuilder.java
+++ /dev/null
@@ -1,370 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.utils;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.PrintWriter;
-import java.io.Writer;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * class is used to create response object which is given to user
- *
- * @author nilanjana.maity
- *
- */
-
-public class DMaaPResponseBuilder {
-
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(DMaaPResponseBuilder.class);
- protected static final int kBufferLength = 4096;
-
- public static void setNoCacheHeadings(DMaaPContext ctx) {
- HttpServletResponse response = ctx.getResponse();
- response.addHeader("Cache-Control", "no-store, no-cache, must-revalidate");
- response.addHeader("Pragma", "no-cache");
- response.addHeader("Expires", "0");
- }
-
- /**
- * static method is used to create response object associated with
- * JSONObject
- *
- * @param ctx
- * @param result
- * @throws JSONException
- * @throws IOException
- */
- public static void respondOk(DMaaPContext ctx, JSONObject result) throws JSONException, IOException {
-
- respondOkWithStream(ctx, "application/json", new ByteArrayInputStream(result.toString(4).getBytes()));
-
- }
-
- /**
- * method used to set staus to 204
- *
- * @param ctx
- */
- public static void respondOkNoContent(DMaaPContext ctx) {
- try {
- ctx.getResponse().setStatus(204);
- } catch (Exception excp) {
- log.error(excp.getMessage(), excp);
- }
- }
-
- /**
- * static method is used to create response object associated with html
- *
- * @param ctx
- * @param html
- */
- public static void respondOkWithHtml(DMaaPContext ctx, String html) {
- try {
- respondOkWithStream(ctx, "text/html", new ByteArrayInputStream(html.toString().getBytes()));
- } catch (Exception excp) {
- log.error(excp.getMessage(), excp);
- }
- }
-
- /**
- * method used to create response object associated with InputStream
- *
- * @param ctx
- * @param mediaType
- * @param is
- * @throws IOException
- */
- public static void respondOkWithStream(DMaaPContext ctx, String mediaType, final InputStream is)
- throws IOException {
- /*
- * creates response object associated with streamwriter
- */
- respondOkWithStream(ctx, mediaType, new StreamWriter() {
-
- public void write(OutputStream os) throws IOException {
- copyStream(is, os);
- }
- });
-
- }
-
- /**
- *
- * @param ctx
- * @param mediaType
- * @param writer
- * @throws IOException
- */
- public static void respondOkWithStream(DMaaPContext ctx, String mediaType, StreamWriter writer) throws IOException {
- ctx.getResponse().setStatus(200);
- try(OutputStream os = getStreamForBinaryResponse(ctx, mediaType)) {
- writer.write(os);
- }
-
-
- }
-
- /**
- * static method to create error objects
- *
- * @param ctx
- * @param errCode
- * @param msg
- */
- public static void respondWithError(DMaaPContext ctx, int errCode, String msg) {
- try {
- ctx.getResponse().sendError(errCode, msg);
- } catch (IOException excp) {
- log.error(excp.getMessage(), excp);
- }
- }
-
- /**
- * method to create error objects
- *
- * @param ctx
- * @param errCode
- * @param body
- */
- public static void respondWithError(DMaaPContext ctx, int errCode, JSONObject body) {
- try {
- sendErrorAndBody(ctx, errCode, body.toString(4), "application/json");
- } catch (Exception excp) {
- log.error(excp.getMessage(), excp);
- }
- }
-
- /**
- * static method creates error object in JSON
- *
- * @param ctx
- * @param errCode
- * @param msg
- */
- public static void respondWithErrorInJson(DMaaPContext ctx, int errCode, String msg) {
- try {
- JSONObject o = new JSONObject();
- o.put("status", errCode);
- o.put("message", msg);
- respondWithError(ctx, errCode, o);
-
- } catch (Exception excp) {
- log.error(excp.getMessage(), excp);
- }
- }
-
- /**
- * static method used to copy the stream with the help of another method
- * copystream
- *
- * @param in
- * @param out
- * @throws IOException
- */
- public static void copyStream(InputStream in, OutputStream out) throws IOException {
- copyStream(in, out, 4096);
- }
-
- /**
- * static method to copy the streams
- *
- * @param in
- * @param out
- * @param bufferSize
- * @throws IOException
- */
- public static void copyStream(InputStream in, OutputStream out, int bufferSize) throws IOException {
- byte[] buffer = new byte[bufferSize];
- int len;
- while ((len = in.read(buffer)) != -1) {
- out.write(buffer, 0, len);
- }
- out.close();
- }
-
- /**
- * interface used to define write method for outputStream
- */
- public abstract static interface StreamWriter {
- /**
- * abstract method used to write the response
- *
- * @param paramOutputStream
- * @throws IOException
- */
- public abstract void write(OutputStream paramOutputStream) throws IOException;
- }
-
- /**
- * static method returns stream for binary response
- *
- * @param ctx
- * @return
- * @throws IOException
- */
- public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx) throws IOException {
- return getStreamForBinaryResponse(ctx, "application/octet-stream");
- }
-
- /**
- * static method returns stream for binaryResponses
- *
- * @param ctx
- * @param contentType
- * @return
- * @throws IOException
- */
- public static OutputStream getStreamForBinaryResponse(DMaaPContext ctx, String contentType) throws IOException {
- ctx.getResponse().setContentType(contentType);
-
-
- boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
-
- if (fResponseEntityAllowed) {
- try(OutputStream os = ctx.getResponse().getOutputStream()){
- return os;
- }catch (Exception e){
- log.error("Exception in getStreamForBinaryResponse",e);
- throw new IOException();
- }
- } else {
- try(OutputStream os = new NullStream()){
- return os;
- }catch (Exception e){
- log.error("Exception in getStreamForBinaryResponse",e);
- throw new IOException();
- }
- }
- }
-
- /**
- *
- * @author anowarul.islam
- *
- */
- private static class NullStream extends OutputStream {
- /**
- * @param b
- * integer
- */
- public void write(int b) {
- }
- }
-
- private static class NullWriter extends Writer {
- /**
- * write method
- * @param cbuf
- * @param off
- * @param len
- */
- public void write(char[] cbuf, int off, int len) {
- }
-
- /**
- * flush method
- */
- public void flush() {
- }
-
- /**
- * close method
- */
- public void close() {
- }
- }
-
- /**
- * sttaic method fetch stream for text
- *
- * @param ctx
- * @param err
- * @param content
- * @param mimeType
- */
- public static void sendErrorAndBody(DMaaPContext ctx, int err, String content, String mimeType) {
- try {
- setStatus(ctx, err);
- getStreamForTextResponse(ctx, mimeType).println(content);
- } catch (IOException e) {
- log.error(new StringBuilder().append("Error sending error response: ").append(e.getMessage()).toString(),
- e);
- }
- }
-
- /**
- * method to set the code
- *
- * @param ctx
- * @param code
- */
- public static void setStatus(DMaaPContext ctx, int code) {
- ctx.getResponse().setStatus(code);
- }
-
- /**
- * static method returns stream for text response
- *
- * @param ctx
- * @return
- * @throws IOException
- */
- public static PrintWriter getStreamForTextResponse(DMaaPContext ctx) throws IOException {
- return getStreamForTextResponse(ctx, "text/html");
- }
-
- /**
- * static method returns stream for text response
- *
- * @param ctx
- * @param contentType
- * @return
- * @throws IOException
- */
- public static PrintWriter getStreamForTextResponse(DMaaPContext ctx, String contentType) throws IOException {
- ctx.getResponse().setContentType(contentType);
-
- PrintWriter pw = null;
- boolean fResponseEntityAllowed = (!(ctx.getRequest().getMethod().equalsIgnoreCase("HEAD")));
-
- if (fResponseEntityAllowed) {
- pw = ctx.getResponse().getWriter();
- } else {
- pw = new PrintWriter(new NullWriter());
- }
- return pw;
- }
-} \ No newline at end of file
diff --git a/src/main/java/com/att/dmf/mr/utils/Emailer.java b/src/main/java/com/att/dmf/mr/utils/Emailer.java
deleted file mode 100644
index 1b68216..0000000
--- a/src/main/java/com/att/dmf/mr/utils/Emailer.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.utils;
-
-import java.io.IOException;
-import java.util.Properties;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-import javax.mail.BodyPart;
-import javax.mail.Message;
-import javax.mail.Multipart;
-import javax.mail.PasswordAuthentication;
-import javax.mail.Session;
-import javax.mail.Transport;
-import javax.mail.internet.InternetAddress;
-import javax.mail.internet.MimeBodyPart;
-import javax.mail.internet.MimeMessage;
-import javax.mail.internet.MimeMultipart;
-
-
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * Send an email from a message.
- *
- * @author peter
- */
-public class Emailer
-{
- public static final String kField_To = "to";
- public static final String kField_Subject = "subject";
- public static final String kField_Message = "message";
-
- public Emailer()
- {
- fExec = Executors.newCachedThreadPool ();
-
- }
-
- public void send ( String to, String subj, String body ) throws IOException
- {
- final String[] addrs = to.split ( "," );
-
- if ( to.length () > 0 )
- {
- final MailTask mt = new MailTask ( addrs, subj, body );
- fExec.submit ( mt );
- }
- else
- {
- log.warn ( "At least one address is required." );
- }
- }
-
- public void close ()
- {
- fExec.shutdown ();
- }
-
- private final ExecutorService fExec;
-
-
-
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(Emailer.class);
-
- public static final String kSetting_MailAuthUser = "mailLogin";
- public static final String kSetting_MailFromEmail = "mailFromEmail";
- public static final String kSetting_MailFromName = "mailFromName";
- public static final String kSetting_SmtpServer = "mailSmtpServer";
- public static final String kSetting_SmtpServerPort = "mailSmtpServerPort";
- public static final String kSetting_SmtpServerSsl = "mailSmtpServerSsl";
- public static final String kSetting_SmtpServerUseAuth = "mailSmtpServerUseAuth";
-
- private class MailTask implements Runnable
- {
- public MailTask ( String[] to, String subject, String msgBody )
- {
- fToAddrs = to;
- fSubject = subject;
- fBody = msgBody;
- }
-
- private String getSetting ( String settingKey, String defval )
- {
-
- String strSet = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,settingKey);
- if(strSet==null)strSet=defval;
- return strSet;
- }
-
- // we need to get setting values from the evaluator but also the channel config
- private void makeSetting ( Properties props, String propKey, String settingKey, String defval )
- {
- props.put ( propKey, getSetting ( settingKey, defval ) );
- }
-
- private void makeSetting ( Properties props, String propKey, String settingKey, int defval )
- {
- makeSetting ( props, propKey, settingKey, "" + defval );
- }
-
- private void makeSetting ( Properties props, String propKey, String settingKey, boolean defval )
- {
- makeSetting ( props, propKey, settingKey, "" + defval );
- }
-
- @Override
- public void run ()
- {
- final StringBuffer tag = new StringBuffer ();
- final StringBuffer addrList = new StringBuffer ();
- tag.append ( "(" );
- for ( String to : fToAddrs )
- {
- if ( addrList.length () > 0 )
- {
- addrList.append ( ", " );
- }
- addrList.append ( to );
- }
- tag.append ( addrList.toString () );
- tag.append ( ") \"" );
- tag.append ( fSubject );
- tag.append ( "\"" );
-
- log.info ( "sending mail to " + tag );
-
- try
- {
- final Properties prop = new Properties ();
- makeSetting ( prop, "mail.smtp.port", kSetting_SmtpServerPort, 587 );
- prop.put ( "mail.smtp.socketFactory.fallback", "false" );
- prop.put ( "mail.smtp.quitwait", "false" );
- makeSetting ( prop, "mail.smtp.host", kSetting_SmtpServer, "smtp.it.att.com" );
- makeSetting ( prop, "mail.smtp.auth", kSetting_SmtpServerUseAuth, true );
- makeSetting ( prop, "mail.smtp.starttls.enable", kSetting_SmtpServerSsl, true );
-
- final String un = getSetting ( kSetting_MailAuthUser, "" );
- final String value=(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"mailPassword")!=null)?AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"mailPassword"):"";
- final Session session = Session.getInstance ( prop,
- new javax.mail.Authenticator()
- {
- @Override
- protected PasswordAuthentication getPasswordAuthentication()
- {
- return new PasswordAuthentication ( un, value );
- }
- }
- );
-
- final Message msg = new MimeMessage ( session );
-
- final InternetAddress from = new InternetAddress (
- getSetting ( kSetting_MailFromEmail, "team@sa2020.it.att.com" ),
- getSetting ( kSetting_MailFromName, "The GFP/SA2020 Team" ) );
- msg.setFrom ( from );
- msg.setReplyTo ( new InternetAddress[] { from } );
- msg.setSubject ( fSubject );
-
- for ( String toAddr : fToAddrs )
- {
- final InternetAddress to = new InternetAddress ( toAddr );
- msg.addRecipient ( Message.RecipientType.TO, to );
- }
-
- final Multipart multipart = new MimeMultipart ( "related" );
- final BodyPart htmlPart = new MimeBodyPart ();
- htmlPart.setContent ( fBody, "text/plain" );
- multipart.addBodyPart ( htmlPart );
- msg.setContent ( multipart );
-
- Transport.send ( msg );
-
- log.info ( "mailing " + tag + " off without error" );
- }
- catch ( Exception e )
- {
- log.warn ( "Exception caught for " + tag, e );
- }
- }
-
- private final String[] fToAddrs;
- private final String fSubject;
- private final String fBody;
- }
-}
diff --git a/src/main/java/com/att/dmf/mr/utils/PropertyReader.java b/src/main/java/com/att/dmf/mr/utils/PropertyReader.java
deleted file mode 100644
index 000869e..0000000
--- a/src/main/java/com/att/dmf/mr/utils/PropertyReader.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.utils;
-
-import java.util.Map;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.drumlin.till.nv.impl.nvReadableStack;
-
-/**
- *
- * @author nilesh.labde
- *
- *
- */
-public class PropertyReader extends nvReadableStack {
- /**
- *
- * initializing logger
- *
- */
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(PropertyReader.class);
-
-
- /**
- * constructor initialization
- *
- * @throws loadException
- *
- */
- public PropertyReader() throws loadException {
-
-
-
-
-
- }
-
- /**
- *
- *
- * @param argMap
- * @param key
- * @param defaultValue
- * @return
- *
- */
- @SuppressWarnings("unused")
- private static String getSetting(Map<String, String> argMap, final String key, final String defaultValue) {
- String val = (String) argMap.get(key);
- if (null == val) {
- return defaultValue;
- }
- return val;
- }
-
- /**
- *
- * @param resourceName
- * @param clazz
- * @return
- * @exception MalformedURLException
- *
- */
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-}
diff --git a/src/main/java/com/att/dmf/mr/utils/Utils.java b/src/main/java/com/att/dmf/mr/utils/Utils.java
deleted file mode 100644
index 5f84d85..0000000
--- a/src/main/java/com/att/dmf/mr/utils/Utils.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.dmf.mr.utils;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.text.DecimalFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Enumeration;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Properties;
-
-import javax.servlet.http.HttpServletRequest;
-
-import com.att.dmf.mr.backends.kafka.KafkaPublisher;
-import com.att.dmf.mr.beans.DMaaPContext;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-/**
- * This is an utility class for various operations for formatting
- * @author nilanjana.maity
- *
- */
-public class Utils {
-
- private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS";
- public static final String CAMBRIA_AUTH_HEADER = "X-CambriaAuth";
- private static final String BATCH_ID_FORMAT = "000000";
- private static final EELFLogger log = EELFManager.getInstance().getLogger(Utils.class);
-
- private Utils() {
- super();
- }
-
- /**
- * Formatting the date
- * @param date
- * @return date or null
- */
- public static String getFormattedDate(Date date) {
- SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
- if (null != date){
- return sdf.format(date);
- }
- return null;
- }
- /**
- * to get the details of User Api Key
- * @param request
- * @return authkey or null
- */
- public static String getUserApiKey(HttpServletRequest request) {
- final String auth = request.getHeader(CAMBRIA_AUTH_HEADER);
- if (null != auth) {
- final String[] splittedAuthKey = auth.split(":");
- return splittedAuthKey[0];
- }else if (null!=request.getHeader("Authorization")){
- /**
- * AAF implementation enhancement
- */
- String user= request.getUserPrincipal().getName().toString();
- return user.substring(0, user.lastIndexOf("@"));
- }
- return null;
- }
- /**
- * to format the batch sequence id
- * @param batchId
- * @return batchId
- */
- public static String getFromattedBatchSequenceId(Long batchId) {
- DecimalFormat format = new DecimalFormat(BATCH_ID_FORMAT);
- return format.format(batchId);
- }
-
- /**
- * to get the message length in bytes
- * @param message
- * @return bytes or 0
- */
- public static long messageLengthInBytes(String message) {
- if (null != message) {
- return message.getBytes().length;
- }
- return 0;
- }
- /**
- * To get transaction id details
- * @param transactionId
- * @return transactionId or null
- */
- public static String getResponseTransactionId(String transactionId) {
- if (null != transactionId && !transactionId.isEmpty()) {
- return transactionId.substring(0, transactionId.lastIndexOf("::"));
- }
- return null;
- }
-
- /**
- * get the thread sleep time
- * @param ratePerMinute
- * @return ratePerMinute or 0
- */
- public static long getSleepMsForRate ( double ratePerMinute )
- {
- if ( ratePerMinute <= 0.0 ) return 0;
- return Math.max ( 1000, Math.round ( 60 * 1000 / ratePerMinute ) );
- }
-
- public static String getRemoteAddress(DMaaPContext ctx)
- {
- String reqAddr = ctx.getRequest().getRemoteAddr();
- String fwdHeader = getFirstHeader("X-Forwarded-For",ctx);
- return ((fwdHeader != null) ? fwdHeader : reqAddr);
- }
- public static String getFirstHeader(String h,DMaaPContext ctx)
- {
- List l = getHeader(h,ctx);
- return ((l.size() > 0) ? (String)l.iterator().next() : null);
- }
- public static List<String> getHeader(String h,DMaaPContext ctx)
- {
- LinkedList list = new LinkedList();
- Enumeration e = ctx.getRequest().getHeaders(h);
- while (e.hasMoreElements())
- {
- list.add(e.nextElement().toString());
- }
- return list;
- }
-
- public static String getKafkaproperty(){
- InputStream input = new Utils().getClass().getResourceAsStream("/kafka.properties");
- Properties props = new Properties();
- try {
- props.load(input);
- } catch (IOException e) {
- log.error("failed to read kafka.properties");
- }
- return props.getProperty("key");
-
-
- }
-
- public static boolean isCadiEnabled(){
- boolean enableCadi=false;
- if(System.getenv("enableCadi")!=null){
- enableCadi=Boolean.getBoolean(System.getenv("enableCadi"));
- }
-
- return enableCadi;
- }
-
-}
diff --git a/src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java b/src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java
deleted file mode 100644
index 0e2804e..0000000
--- a/src/main/java/com/att/mr/apiServer/metrics/cambria/DMaaPMetricsSender.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.mr.apiServer.metrics.cambria;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.TimeUnit;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.metrics.publisher.CambriaPublisher;
-import com.att.dmf.mr.metrics.publisher.DMaaPCambriaClientFactory;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.nsa.apiServer.metrics.cambria.MetricsSender;
-import com.att.nsa.metrics.CdmMetricsRegistry;
-import com.att.nsa.metrics.impl.CdmConstant;
-
-/**
- * MetricsSender will send the given metrics registry content as an event on the
- * Cambria event broker to the given topic.
- *
- * @author peter
- *
- */
-public class DMaaPMetricsSender implements Runnable {
- public static final String kSetting_CambriaEnabled = "metrics.send.cambria.enabled";
- public static final String kSetting_CambriaBaseUrl = "metrics.send.cambria.baseUrl";
- public static final String kSetting_CambriaTopic = "metrics.send.cambria.topic";
- public static final String kSetting_CambriaSendFreqSecs = "metrics.send.cambria.sendEverySeconds";
-
- /**
- * Schedule a periodic send of the given metrics registry using the given
- * settings container for the Cambria location, topic, and send frequency.
- * <br/>
- * <br/>
- * If the enabled flag is false, this method returns null.
- *
- * @param scheduler
- * @param metrics
- * @param settings
- * @param defaultTopic
- * @return a handle to the scheduled task
- */
- public static ScheduledFuture<?> sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics,
- String defaultTopic) {
- log.info("Inside : DMaaPMetricsSender : sendPeriodically");
- String cambriaSetting= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
- boolean setEnable=true;
- if (cambriaSetting!=null && cambriaSetting.equals("false") )
- setEnable= false;
-
- if (setEnable) {
- String Setting_CambriaBaseUrl=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaEnabled);
-
- Setting_CambriaBaseUrl=Setting_CambriaBaseUrl==null?"localhost":Setting_CambriaBaseUrl;
-
- String Setting_CambriaTopic=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaTopic);
- if(Setting_CambriaTopic==null) Setting_CambriaTopic = "msgrtr.apinode.metrics.dmaap";
-
-
-
- String Setting_CambriaSendFreqSecs=com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,kSetting_CambriaSendFreqSecs);
-
- int _CambriaSendFreqSecs =30;
- if(Setting_CambriaSendFreqSecs!=null){
- _CambriaSendFreqSecs = Integer.parseInt(Setting_CambriaSendFreqSecs);
- }
-
-
- return DMaaPMetricsSender.sendPeriodically(scheduler, metrics,
- Setting_CambriaBaseUrl,Setting_CambriaTopic,_CambriaSendFreqSecs
- );
- /*return DMaaPMetricsSender.sendPeriodically(scheduler, metrics,
- settings.getString(kSetting_CambriaBaseUrl, "localhost"),
- settings.getString(kSetting_CambriaTopic, defaultTopic),
- settings.getInt(kSetting_CambriaSendFreqSecs, 30));*/
- } else {
- return null;
- }
- }
-
- /**
- * Schedule a periodic send of the metrics registry to the given Cambria
- * broker and topic.
- *
- * @param scheduler
- * @param metrics
- * the registry to send
- * @param cambriaBaseUrl
- * the base URL for Cambria
- * @param topic
- * the topic to publish on
- * @param everySeconds
- * how frequently to publish
- * @return a handle to the scheduled task
- */
- public static ScheduledFuture<?> sendPeriodically(ScheduledExecutorService scheduler, CdmMetricsRegistry metrics,
- String cambriaBaseUrl, String topic, int everySeconds) {
- return scheduler.scheduleAtFixedRate(new DMaaPMetricsSender(metrics, cambriaBaseUrl, topic), everySeconds,
- everySeconds, TimeUnit.SECONDS);
- }
-
- /**
- * Create a metrics sender.
- *
- * @param metrics
- * @param cambriaBaseUrl
- * @param topic
- */
- public DMaaPMetricsSender(CdmMetricsRegistry metrics, String cambriaBaseUrl, String topic) {
- try {
- fMetrics = metrics;
- fHostname = InetAddress.getLocalHost().getHostName();
-
- // setup a "simple" publisher that will send metrics immediately
- fCambria = DMaaPCambriaClientFactory.createSimplePublisher(cambriaBaseUrl, topic);
- } catch (UnknownHostException e) {
- log.warn("Unable to get localhost address in MetricsSender constructor.", e);
- throw new RuntimeException(e);
- }
- }
-
- /**
- * Send on demand.
- */
- public void send() {
- try {
- final JSONObject o = fMetrics.toJson();
- o.put("hostname", fHostname);
- o.put("now", System.currentTimeMillis());
- o.put("metricsSendTime", addTimeStamp());
- o.put("transactionEnabled", false);
- fCambria.send(fHostname, o.toString());
- } catch (JSONException e) {
- log.warn("Error posting metrics to Cambria: " + e.getMessage());
- } catch (IOException e) {
- log.warn("Error posting metrics to Cambria: " + e.getMessage());
- }
- }
-
- /**
- * Run() calls send(). It's meant for use in a background-scheduled task.
- */
- @Override
- public void run() {
- send();
- }
-
- private final CdmMetricsRegistry fMetrics;
- private final CambriaPublisher fCambria;
- private final String fHostname;
-
-
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(MetricsSender.class);
- /**
- * method creates and returnd CdmConstant object using current timestamp
- *
- * @return
- */
- public CdmConstant addTimeStamp() {
- // Add the timestamp with every metrics send
- final long metricsSendTime = System.currentTimeMillis();
- final Date d = new Date(metricsSendTime);
- final String text = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz").format(d);
- return new CdmConstant(metricsSendTime / 1000, "Metrics Send Time (epoch); " + text);
- }
-}
diff --git a/src/main/java/com/att/mr/filter/ContentLengthFilter.java b/src/main/java/com/att/mr/filter/ContentLengthFilter.java
deleted file mode 100644
index 26f58e0..0000000
--- a/src/main/java/com/att/mr/filter/ContentLengthFilter.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.mr.filter;
-
-import java.io.IOException;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.http.HttpStatus;
-import org.json.JSONObject;
-import org.springframework.context.ApplicationContext;
-import org.springframework.web.context.support.WebApplicationContextUtils;
-
-import com.att.dmf.mr.CambriaApiException;
-import com.att.dmf.mr.exception.DMaaPErrorMessages;
-import com.att.dmf.mr.exception.DMaaPResponseCode;
-import com.att.dmf.mr.exception.ErrorResponse;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-/**
- * Servlet Filter implementation class ContentLengthFilter
- */
-public class ContentLengthFilter implements Filter {
-
- private DefaultLength defaultLength;
-
- private FilterConfig filterConfig = null;
- DMaaPErrorMessages errorMessages = null;
-
- private static final EELFLogger log = EELFManager.getInstance().getLogger(ContentLengthFilter.class);
- /**
- * Default constructor.
- */
-
- public ContentLengthFilter() {
- // TODO Auto-generated constructor stub
- }
-
- /**
- * @see Filter#destroy()
- */
- public void destroy() {
- // TODO Auto-generated method stub
- }
-
- /**
- * @see Filter#doFilter(ServletRequest, ServletResponse, FilterChain)
- */
- public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException,
- ServletException {
- // TODO Auto-generated method stub
- // place your code here
- log.info("inside servlet do filter content length checking before pub/sub");
- HttpServletRequest request = (HttpServletRequest) req;
- JSONObject jsonObj = null;
- int requestLength = 0;
- try {
- // retrieving content length from message header
-
- if (null != request.getHeader("Content-Length")) {
- requestLength = Integer.parseInt(request.getHeader("Content-Length"));
- }
- // retrieving encoding from message header
- String transferEncoding = request.getHeader("Transfer-Encoding");
- // checking for no encoding, chunked and requestLength greater then
- // default length
- if (null != transferEncoding && !(transferEncoding.contains("chunked"))
- && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) {
- jsonObj = new JSONObject().append("defaultlength", defaultLength)
- .append("requestlength", requestLength);
- log.error("message length is greater than default");
- throw new CambriaApiException(jsonObj);
- } else if (null == transferEncoding && (requestLength > Integer.parseInt(defaultLength.getDefaultLength()))) {
- jsonObj = new JSONObject().append("defaultlength", defaultLength.getDefaultLength()).append(
- "requestlength", requestLength);
- log.error("Request message is not chunked or request length is greater than default length");
- throw new CambriaApiException(jsonObj);
- } else {
- chain.doFilter(req, res);
- }
- } catch (CambriaApiException | NumberFormatException e) {
- log.error("message size is greater then default");
- ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_EXPECTATION_FAILED,
- DMaaPResponseCode.MSG_SIZE_EXCEEDS_MSG_LIMIT.getResponseCode(), errorMessages.getMsgSizeExceeds()
- + jsonObj.toString());
- log.info(errRes.toString());
-
- }
-
- }
-
- /**
- * @see Filter#init(FilterConfig)
- */
- public void init(FilterConfig fConfig) throws ServletException {
- // TODO Auto-generated method stub
- this.filterConfig = fConfig;
- log.info("Filter Content Length Initialize");
- ApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(fConfig
- .getServletContext());
- DefaultLength defLength = (DefaultLength) ctx.getBean("defLength");
- DMaaPErrorMessages errorMessages = (DMaaPErrorMessages) ctx.getBean("DMaaPErrorMessages");
- this.errorMessages = errorMessages;
- this.defaultLength = defLength;
-
- }
-
-}
diff --git a/src/main/java/com/att/mr/filter/DefaultLength.java b/src/main/java/com/att/mr/filter/DefaultLength.java
deleted file mode 100644
index 43169e5..0000000
--- a/src/main/java/com/att/mr/filter/DefaultLength.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
- *******************************************************************************/
-package com.att.mr.filter;
-
-
-public class DefaultLength {
-
- String defaultLength;
-
- public String getDefaultLength() {
- return defaultLength;
- }
-
- public void setDefaultLength(String defaultLength) {
- this.defaultLength = defaultLength;
- }
-
-}