diff options
author | 2017-09-26 14:00:57 -0400 | |
---|---|---|
committer | 2017-09-26 14:01:41 -0400 | |
commit | c593dfe4c59d37d5d4ea14e3ac31da3318029562 (patch) | |
tree | 76cc5a494f02e14b809caad9c050fbfd6cd61a51 /src/main/java/org/openecomp/sparky/synchronizer | |
parent | 6777c6092050a0271c5d7de9c239cf1580d41fa8 (diff) |
Renaming openecomp to onap
Issue-ID: AAI-208
Change-Id: I2bd02287bed376111156aca0100e2b7b74e368e3
Signed-off-by: Arul.Nambi <arul.nambi@amdocs.com>
Diffstat (limited to 'src/main/java/org/openecomp/sparky/synchronizer')
45 files changed, 0 insertions, 10178 deletions
diff --git a/src/main/java/org/openecomp/sparky/synchronizer/AbstractEntitySynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/AbstractEntitySynchronizer.java deleted file mode 100644 index 55578b4..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/AbstractEntitySynchronizer.java +++ /dev/null @@ -1,568 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.EnumSet; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; - -import org.openecomp.cl.api.Logger; -import org.openecomp.sparky.config.oxm.OxmModelLoader; -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; -import org.openecomp.sparky.dal.aai.ActiveInventoryEntityStatistics; -import org.openecomp.sparky.dal.aai.ActiveInventoryProcessingExceptionStatistics; -import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; -import org.openecomp.sparky.dal.elasticsearch.ElasticSearchDataProvider; -import org.openecomp.sparky.dal.elasticsearch.ElasticSearchEntityStatistics; -import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.openecomp.sparky.dal.rest.HttpMethod; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.dal.rest.RestOperationalStatistics; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.util.NodeUtils; - -import org.openecomp.cl.mdc.MdcContext; -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * The Class AbstractEntitySynchronizer. - * - * @author davea. - */ -public abstract class AbstractEntitySynchronizer { - - protected static final int VERSION_CONFLICT_EXCEPTION_CODE = 409; - protected static final Integer RETRY_COUNT_PER_ENTITY_LIMIT = new Integer(3); - - protected final Logger logger; - protected ObjectMapper mapper; - protected OxmModelLoader oxmModelLoader; - protected long syncDurationInMs; - /** - * The Enum StatFlag. - */ - protected enum StatFlag { - AAI_REST_STATS, AAI_ENTITY_STATS, AAI_PROCESSING_EXCEPTION_STATS, - AAI_TASK_PROCESSING_STATS, ES_REST_STATS, ES_ENTITY_STATS, ES_TASK_PROCESSING_STATS - } - - protected EnumSet<StatFlag> enabledStatFlags; - - protected ActiveInventoryDataProvider aaiDataProvider; - protected ElasticSearchDataProvider esDataProvider; - - protected ExecutorService synchronizerExecutor; - protected ExecutorService aaiExecutor; - protected ExecutorService esExecutor; - - private RestOperationalStatistics esRestStats; - protected ElasticSearchEntityStatistics esEntityStats; - - private RestOperationalStatistics aaiRestStats; - protected ActiveInventoryEntityStatistics aaiEntityStats; - private ActiveInventoryProcessingExceptionStatistics aaiProcessingExceptionStats; - - private TaskProcessingStats aaiTaskProcessingStats; - private TaskProcessingStats esTaskProcessingStats; - - private TransactionRateController aaiTransactionRateController; - private TransactionRateController esTransactionRateController; - - protected AtomicInteger aaiWorkOnHand; - protected AtomicInteger esWorkOnHand; - protected String synchronizerName; - - protected abstract boolean isSyncDone(); - protected boolean shouldSkipSync; - - public String getActiveInventoryStatisticsReport() { - - StringBuilder sb = new StringBuilder(128); - - if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) { - sb.append("\n\n ").append("REST Operational Stats:"); - sb.append(aaiRestStats.getStatisticsReport()); - } - - if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) { - sb.append("\n\n ").append("Entity Stats:"); - sb.append(aaiEntityStats.getStatisticsReport()); - } - - if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) { - sb.append("\n\n ").append("Processing Exception Stats:"); - sb.append(aaiProcessingExceptionStats.getStatisticsReport()); - } - - return sb.toString(); - - } - - public String getElasticSearchStatisticsReport() { - - StringBuilder sb = new StringBuilder(128); - - if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) { - sb.append("\n\n ").append("REST Operational Stats:"); - sb.append(esRestStats.getStatisticsReport()); - } - - if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) { - sb.append("\n\n ").append("Entity Stats:"); - sb.append(esEntityStats.getStatisticsReport()); - } - - return sb.toString(); - - } - - /** - * Adds the active inventory stat report. - * - * @param sb the sb - */ - private void addActiveInventoryStatReport(StringBuilder sb) { - - if (sb == null) { - return; - } - - sb.append("\n\n AAI"); - sb.append(getActiveInventoryStatisticsReport()); - - double currentTps = 0; - if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { - sb.append("\n\n ").append("Task Processor Stats:"); - sb.append(aaiTaskProcessingStats.getStatisticsReport(false, " ")); - - currentTps = aaiTransactionRateController.getCurrentTps(); - - sb.append("\n ").append("Current TPS: ").append(currentTps); - } - - sb.append("\n ").append("Current WOH: ").append(aaiWorkOnHand.get()); - - if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { - if (currentTps > 0) { - double numMillisecondsToCompletion = (aaiWorkOnHand.get() / currentTps) * 1000; - sb.append("\n ").append("SyncDurationRemaining=") - .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion)); - } - } - - } - - /** - * Adds the elastic stat report. - * - * @param sb the sb - */ - private void addElasticStatReport(StringBuilder sb) { - - if (sb == null) { - return; - } - - sb.append("\n\n ELASTIC"); - sb.append(getElasticSearchStatisticsReport()); - - double currentTps = 0; - - if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { - sb.append("\n\n ").append("Task Processor Stats:"); - sb.append(esTaskProcessingStats.getStatisticsReport(false, " ")); - - currentTps = esTransactionRateController.getCurrentTps(); - - sb.append("\n ").append("Current TPS: ").append(currentTps); - } - - sb.append("\n ").append("Current WOH: ").append(esWorkOnHand.get()); - - if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { - if (currentTps > 0) { - double numMillisecondsToCompletion = (esWorkOnHand.get() / currentTps) * 1000; - sb.append("\n ").append("SyncDurationRemaining=") - .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion)); - } - } - - - } - - /** - * Gets the stat report. - * - * @param syncOpTimeInMs the sync op time in ms - * @param showFinalReport the show final report - * @return the stat report - */ - protected String getStatReport(long syncOpTimeInMs, boolean showFinalReport) { - - StringBuilder sb = new StringBuilder(128); - - sb.append("\n").append(synchronizerName + " Statistics: ( Sync Operation Duration = " - + NodeUtils.getDurationBreakdown(syncOpTimeInMs) + " )"); - - addActiveInventoryStatReport(sb); - addElasticStatReport(sb); - - if (showFinalReport) { - sb.append("\n\n ").append("Sync Completed!\n"); - } else { - sb.append("\n\n ").append("Sync in Progress...\n"); - } - - return sb.toString(); - - } - - protected String indexName; - protected long syncStartedTimeStampInMs; - - /** - * Instantiates a new abstract entity synchronizer. - * - * @param logger the logger - * @param syncName the sync name - * @param numSyncWorkers the num sync workers - * @param numActiveInventoryWorkers the num active inventory workers - * @param numElasticsearchWorkers the num elasticsearch workers - * @param indexName the index name - * @throws Exception the exception - */ - protected AbstractEntitySynchronizer(Logger logger, String syncName, int numSyncWorkers, - int numActiveInventoryWorkers, int numElasticsearchWorkers, String indexName) - throws Exception { - this.logger = logger; - this.synchronizerExecutor = - NodeUtils.createNamedExecutor(syncName + "-INTERNAL", numSyncWorkers, logger); - this.aaiExecutor = - NodeUtils.createNamedExecutor(syncName + "-AAI", numActiveInventoryWorkers, logger); - this.esExecutor = - NodeUtils.createNamedExecutor(syncName + "-ES", numElasticsearchWorkers, logger); - this.mapper = new ObjectMapper(); - this.oxmModelLoader = OxmModelLoader.getInstance(); - this.indexName = indexName; - this.esRestStats = new RestOperationalStatistics(); - this.esEntityStats = new ElasticSearchEntityStatistics(oxmModelLoader); - this.aaiRestStats = new RestOperationalStatistics(); - this.aaiEntityStats = new ActiveInventoryEntityStatistics(oxmModelLoader); - this.aaiProcessingExceptionStats = new ActiveInventoryProcessingExceptionStatistics(); - this.aaiTaskProcessingStats = - new TaskProcessingStats(ActiveInventoryConfig.getConfig().getTaskProcessorConfig()); - this.esTaskProcessingStats = - new TaskProcessingStats(ElasticSearchConfig.getConfig().getProcessorConfig()); - - this.aaiTransactionRateController = - new TransactionRateController(ActiveInventoryConfig.getConfig().getTaskProcessorConfig()); - this.esTransactionRateController = - new TransactionRateController(ElasticSearchConfig.getConfig().getProcessorConfig()); - - this.aaiWorkOnHand = new AtomicInteger(0); - this.esWorkOnHand = new AtomicInteger(0); - - enabledStatFlags = EnumSet.allOf(StatFlag.class); - - this.synchronizerName = "Abstact Entity Synchronizer"; - - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "AbstractEntitySynchronizer", "", "Sync", ""); - - this.shouldSkipSync = false; - this.syncStartedTimeStampInMs = System.currentTimeMillis(); - this.syncDurationInMs = -1; - } - - public boolean shouldSkipSync() { - return shouldSkipSync; - } - - public void setShouldSkipSync(boolean shouldSkipSync) { - this.shouldSkipSync = shouldSkipSync; - } - - /** - * Inc active inventory work on hand counter. - */ - protected void incActiveInventoryWorkOnHandCounter() { - aaiWorkOnHand.incrementAndGet(); - } - - /** - * Dec active inventory work on hand counter. - */ - protected void decActiveInventoryWorkOnHandCounter() { - aaiWorkOnHand.decrementAndGet(); - } - - /** - * Inc elastic search work on hand counter. - */ - protected void incElasticSearchWorkOnHandCounter() { - esWorkOnHand.incrementAndGet(); - } - - /** - * Dec elastic search work on hand counter. - */ - protected void decElasticSearchWorkOnHandCounter() { - esWorkOnHand.decrementAndGet(); - } - - /** - * Shutdown executors. - */ - protected void shutdownExecutors() { - try { - synchronizerExecutor.shutdown(); - aaiExecutor.shutdown(); - esExecutor.shutdown(); - aaiDataProvider.shutdown(); - esDataProvider.shutdown(); - } catch (Exception exc) { - logger.error(AaiUiMsgs.ERROR_SHUTDOWN_EXECUTORS, exc ); - } - } - - /** - * Clear cache. - */ - public void clearCache() { - if (aaiDataProvider != null) { - aaiDataProvider.clearCache(); - } - } - - protected ActiveInventoryDataProvider getAaiDataProvider() { - return aaiDataProvider; - } - - public void setAaiDataProvider(ActiveInventoryDataProvider aaiDataProvider) { - this.aaiDataProvider = aaiDataProvider; - } - - protected ElasticSearchDataProvider getEsDataProvider() { - return esDataProvider; - } - - public void setEsDataProvider(ElasticSearchDataProvider provider) { - this.esDataProvider = provider; - } - - /** - * Gets the elastic full url. - * - * @param resourceUrl the resource url - * @param indexName the index name - * @param indexType the index type - * @return the elastic full url - * @throws Exception the exception - */ - protected String getElasticFullUrl(String resourceUrl, String indexName, String indexType) - throws Exception { - return ElasticSearchConfig.getConfig().getElasticFullUrl(resourceUrl, indexName, indexType); - } - - /** - * Gets the elastic full url. - * - * @param resourceUrl the resource url - * @param indexName the index name - * @return the elastic full url - * @throws Exception the exception - */ - protected String getElasticFullUrl(String resourceUrl, String indexName) throws Exception { - return ElasticSearchConfig.getConfig().getElasticFullUrl(resourceUrl, indexName); - } - - public String getIndexName() { - return indexName; - } - - public void setIndexName(String indexName) { - this.indexName = indexName; - } - - - /** - * Gets the response length. - * - * @param txn the txn - * @return the response length - */ - private long getResponseLength(NetworkTransaction txn) { - - if (txn == null) { - return -1; - } - - OperationResult result = txn.getOperationResult(); - - if (result == null) { - return -1; - } - - if (result.getResult() != null) { - return result.getResult().length(); - } - - return -1; - } - - /** - * Update elastic search counters. - * - * @param method the method - * @param or the or - */ - protected void updateElasticSearchCounters(HttpMethod method, OperationResult or) { - updateElasticSearchCounters(new NetworkTransaction(method, null, or)); - } - - /** - * Update elastic search counters. - * - * @param method the method - * @param entityType the entity type - * @param or the or - */ - protected void updateElasticSearchCounters(HttpMethod method, String entityType, - OperationResult or) { - updateElasticSearchCounters(new NetworkTransaction(method, entityType, or)); - } - - /** - * Update elastic search counters. - * - * @param txn the txn - */ - protected void updateElasticSearchCounters(NetworkTransaction txn) { - - if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) { - esRestStats.updateCounters(txn); - } - - if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) { - esEntityStats.updateCounters(txn); - } - - if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { - - esTransactionRateController.trackResponseTime(txn.getOperationResult().getResponseTimeInMs()); - - esTaskProcessingStats - .updateTaskResponseStatsHistogram(txn.getOperationResult().getResponseTimeInMs()); - esTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs()); - - // don't know the cost of the lengh calc, we'll see if it causes a - // problem - - long responsePayloadSizeInBytes = getResponseLength(txn); - if (responsePayloadSizeInBytes >= 0) { - esTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes); - } - - esTaskProcessingStats - .updateTransactionsPerSecondHistogram((long) esTransactionRateController.getCurrentTps()); - } - } - - /** - * Update active inventory counters. - * - * @param method the method - * @param or the or - */ - protected void updateActiveInventoryCounters(HttpMethod method, OperationResult or) { - updateActiveInventoryCounters(new NetworkTransaction(method, null, or)); - } - - /** - * Update active inventory counters. - * - * @param method the method - * @param entityType the entity type - * @param or the or - */ - protected void updateActiveInventoryCounters(HttpMethod method, String entityType, - OperationResult or) { - updateActiveInventoryCounters(new NetworkTransaction(method, entityType, or)); - } - - /** - * Update active inventory counters. - * - * @param txn the txn - */ - protected void updateActiveInventoryCounters(NetworkTransaction txn) { - - if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) { - aaiRestStats.updateCounters(txn); - } - - if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) { - aaiEntityStats.updateCounters(txn); - } - - if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) { - aaiProcessingExceptionStats.updateCounters(txn); - } - - if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { - aaiTransactionRateController - .trackResponseTime(txn.getOperationResult().getResponseTimeInMs()); - - aaiTaskProcessingStats - .updateTaskResponseStatsHistogram(txn.getOperationResult().getResponseTimeInMs()); - aaiTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs()); - - // don't know the cost of the lengh calc, we'll see if it causes a - // problem - - long responsePayloadSizeInBytes = getResponseLength(txn); - if (responsePayloadSizeInBytes >= 0) { - aaiTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes); - } - - aaiTaskProcessingStats.updateTransactionsPerSecondHistogram( - (long) aaiTransactionRateController.getCurrentTps()); - } - } - - /** - * Reset counters. - */ - protected void resetCounters() { - aaiRestStats.reset(); - aaiEntityStats.reset(); - aaiProcessingExceptionStats.reset(); - - esRestStats.reset(); - esEntityStats.reset(); - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/AggregationSuggestionSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/AggregationSuggestionSynchronizer.java deleted file mode 100644 index 7900193..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/AggregationSuggestionSynchronizer.java +++ /dev/null @@ -1,182 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.util.Map; -import java.util.concurrent.ExecutorService; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.cl.mdc.MdcContext; -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.rest.HttpMethod; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.synchronizer.entity.AggregationSuggestionEntity; -import org.openecomp.sparky.synchronizer.enumeration.OperationState; -import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchPut; -import org.openecomp.sparky.util.NodeUtils; -import org.slf4j.MDC; - -public class AggregationSuggestionSynchronizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(AggregationSuggestionSynchronizer.class); - - private boolean isSyncInProgress; - private boolean shouldPerformRetry; - private Map<String, String> contextMap; - protected ExecutorService esPutExecutor; - - public AggregationSuggestionSynchronizer(String indexName) throws Exception { - super(LOG, "ASS-" + indexName.toUpperCase(), 2, 5, 5, indexName); - - this.isSyncInProgress = false; - this.shouldPerformRetry = false; - this.synchronizerName = "Aggregation Suggestion Synchronizer"; - this.contextMap = MDC.getCopyOfContextMap(); - this.esPutExecutor = NodeUtils.createNamedExecutor("ASS-ES-PUT", 2, LOG); - } - - @Override - protected boolean isSyncDone() { - int totalWorkOnHand = esWorkOnHand.get(); - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, - indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand); - } - - if (totalWorkOnHand > 0 || !isSyncInProgress) { - return false; - } - - return true; - } - - @Override - public OperationState doSync() { - isSyncInProgress = true; - this.syncDurationInMs = -1; - syncStartedTimeStampInMs = System.currentTimeMillis(); - syncEntity(); - - while (!isSyncDone()) { - try { - if (shouldPerformRetry) { - syncEntity(); - } - Thread.sleep(1000); - } catch (Exception exc) { - // We don't care about this exception - } - } - - return OperationState.OK; - } - - private void syncEntity() { - String txnId = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnId, "AggregationSuggestionSynchronizer", "", "Sync", ""); - - AggregationSuggestionEntity syncEntity = new AggregationSuggestionEntity(); - syncEntity.deriveFields(); - - String link = null; - try { - link = getElasticFullUrl("/" + syncEntity.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); - } - - try { - String jsonPayload = null; - jsonPayload = syncEntity.getIndexDocumentJson(); - if (link != null && jsonPayload != null) { - - NetworkTransaction elasticPutTxn = new NetworkTransaction(); - elasticPutTxn.setLink(link); - elasticPutTxn.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - final Map<String, String> contextMap = MDC.getCopyOfContextMap(); - supplyAsync(new PerformElasticSearchPut(jsonPayload, elasticPutTxn, - esDataProvider, contextMap), esPutExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = "Aggregation suggestion entity sync UPDATE PUT error - " - + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message); - } else { - updateElasticSearchCounters(result); - wasEsOperationSuccessful(result); - } - }); - } - } catch (Exception exc) { - String message = - "Exception caught during aggregation suggestion entity sync PUT operation. Message - " - + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message); - } - } - - private void wasEsOperationSuccessful(NetworkTransaction result) { - if (result != null) { - OperationResult opResult = result.getOperationResult(); - - if (!opResult.wasSuccessful()) { - shouldPerformRetry = true; - } else { - isSyncInProgress = false; - shouldPerformRetry = false; - } - } - } - - @Override - public SynchronizerState getState() { - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - } - - @Override - public String getStatReport(boolean shouldDisplayFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return getStatReport(syncDurationInMs, shouldDisplayFinalReport); - } - - @Override - public void shutdown() { - this.shutdownExecutors(); - } -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/AggregationSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/AggregationSynchronizer.java deleted file mode 100644 index 2fd67c3..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/AggregationSynchronizer.java +++ /dev/null @@ -1,773 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.sql.Timestamp; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Deque; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Supplier; - -import javax.json.Json; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; -import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.openecomp.sparky.dal.rest.HttpMethod; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.synchronizer.config.SynchronizerConfiguration; -import org.openecomp.sparky.synchronizer.entity.AggregationEntity; -import org.openecomp.sparky.synchronizer.entity.MergableEntity; -import org.openecomp.sparky.synchronizer.entity.SelfLinkDescriptor; -import org.openecomp.sparky.synchronizer.enumeration.OperationState; -import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; -import org.openecomp.sparky.synchronizer.task.PerformActiveInventoryRetrieval; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchPut; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchRetrieval; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchUpdate; -import org.openecomp.sparky.util.NodeUtils; -import org.slf4j.MDC; - -import org.openecomp.cl.mdc.MdcContext; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectReader; -import com.fasterxml.jackson.databind.node.ArrayNode; - -/** - * The Class AutosuggestionSynchronizer. - */ -public class AggregationSynchronizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - /** - * The Class RetryAggregationEntitySyncContainer. - */ - private class RetryAggregationEntitySyncContainer { - NetworkTransaction txn; - AggregationEntity ae; - - /** - * Instantiates a new retry aggregation entity sync container. - * - * @param txn the txn - * @param ae the se - */ - public RetryAggregationEntitySyncContainer(NetworkTransaction txn, AggregationEntity ae) { - this.txn = txn; - this.ae = ae; - } - - public NetworkTransaction getNetworkTransaction() { - return txn; - } - - public AggregationEntity getAggregationEntity() { - return ae; - } - } - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(AggregationSynchronizer.class); - private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; - - private boolean allWorkEnumerated; - private Deque<SelfLinkDescriptor> selflinks; - private Deque<RetryAggregationEntitySyncContainer> retryQueue; - private Map<String, Integer> retryLimitTracker; - protected ExecutorService esPutExecutor; - private ConcurrentHashMap<String, AtomicInteger> entityCounters; - private boolean syncInProgress; - private Map<String, String> contextMap; - private String entityType; - - /** - * Instantiates a new entity aggregation synchronizer. - * - * @param indexName the index name - * @throws Exception the exception - */ - public AggregationSynchronizer(String entityType, String indexName) throws Exception { - super(LOG, "AGGES-" + indexName.toUpperCase(), 2, 5, 5, indexName); // multiple Autosuggestion - // Entity Synchronizer will - // run for different indices - - this.entityType = entityType; - this.allWorkEnumerated = false; - this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>(); - this.synchronizerName = "Entity Aggregation Synchronizer"; - this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); - this.syncInProgress = false; - this.allWorkEnumerated = false; - this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); - this.retryQueue = new ConcurrentLinkedDeque<RetryAggregationEntitySyncContainer>(); - this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); - - this.esPutExecutor = NodeUtils.createNamedExecutor("AGGES-ES-PUT", 1, LOG); - Map<String, OxmEntityDescriptor> descriptor = new HashMap<String, OxmEntityDescriptor>(); - descriptor.put(entityType, oxmModelLoader.getEntityDescriptors().get(entityType)); - this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors( - descriptor); - this.esEntityStats.initializeCountersFromOxmEntityDescriptors( - descriptor); - this.contextMap = MDC.getCopyOfContextMap(); - } - - /** - * Collect all the work. - * - * @return the operation state - */ - private OperationState collectAllTheWork() { - final Map<String, String> contextMap = MDC.getCopyOfContextMap(); - final String entity = this.getEntityType(); - try { - - aaiWorkOnHand.set(1); - - supplyAsync(new Supplier<Void>() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - OperationResult typeLinksResult = null; - try { - typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(entity); - aaiWorkOnHand.decrementAndGet(); - processEntityTypeSelfLinks(typeLinksResult); - } catch (Exception exc) { - // TODO -> LOG, what should be logged here? - } - - return null; - } - - }, aaiExecutor).whenComplete((result, error) -> { - - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "An error occurred getting data from AAI. Error = " + error.getMessage()); - } - }); - - while (aaiWorkOnHand.get() != 0) { - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); - } - - Thread.sleep(1000); - } - - aaiWorkOnHand.set(selflinks.size()); - allWorkEnumerated = true; - syncEntityTypes(); - - while (!isSyncDone()) { - performRetrySync(); - Thread.sleep(1000); - } - - /* - * Make sure we don't hang on to retries that failed which could cause issues during future - * syncs - */ - retryLimitTracker.clear(); - - } catch (Exception exc) { - // TODO -> LOG, waht should be logged here? - } - - return OperationState.OK; - } - - - /** - * Perform retry sync. - */ - private void performRetrySync() { - while (retryQueue.peek() != null) { - - RetryAggregationEntitySyncContainer rsc = retryQueue.poll(); - if (rsc != null) { - - AggregationEntity ae = rsc.getAggregationEntity(); - NetworkTransaction txn = rsc.getNetworkTransaction(); - - String link = null; - try { - /* - * In this retry flow the se object has already derived its fields - */ - link = getElasticFullUrl("/" + ae.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction retryTransaction = new NetworkTransaction(); - retryTransaction.setLink(link); - retryTransaction.setEntityType(txn.getEntityType()); - retryTransaction.setDescriptor(txn.getDescriptor()); - retryTransaction.setOperationType(HttpMethod.GET); - - /* - * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already - * called incrementAndGet when queuing the failed PUT! - */ - - supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), - esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, ae); - } - }); - } - - } - } - } - - /** - * Perform document upsert. - * - * @param esGetTxn the es get txn - * @param ae the ae - */ - protected void performDocumentUpsert(NetworkTransaction esGetTxn, AggregationEntity ae) { - /** - * <p> - * <ul> - * As part of the response processing we need to do the following: - * <li>1. Extract the version (if present), it will be the ETAG when we use the - * Search-Abstraction-Service - * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version - * tag - * <li>a) if version is null or RC=404, then standard put, no _update with version tag - * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic - * </ul> - * </p> - */ - String link = null; - try { - link = getElasticFullUrl("/" + ae.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); - return; - } - - String versionNumber = null; - boolean wasEntryDiscovered = false; - if (esGetTxn.getOperationResult().getResultCode() == 404) { - LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, ae.getEntityPrimaryKeyValue()); - } else if (esGetTxn.getOperationResult().getResultCode() == 200) { - wasEntryDiscovered = true; - try { - versionNumber = NodeUtils.extractFieldValueFromObject( - NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), - "_version"); - } catch (IOException exc) { - String message = - "Error extracting version number from response, aborting aggregation entity sync of " - + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); - return; - } - } else { - /* - * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we - * return. - */ - LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, - String.valueOf(esGetTxn.getOperationResult().getResultCode())); - return; - } - - try { - String jsonPayload = null; - if (wasEntryDiscovered) { - try { - ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>(); - NodeUtils.extractObjectsByKey( - NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), - "_source", sourceObject); - - if (!sourceObject.isEmpty()) { - String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); - MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); - ObjectReader updater = mapper.readerForUpdating(me); - MergableEntity merged = updater.readValue(ae.getIndexDocumentJson()); - jsonPayload = mapper.writeValueAsString(merged); - } - } catch (IOException exc) { - String message = - "Error extracting source value from response, aborting aggregation entity sync of " - + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); - return; - } - } else { - jsonPayload = ae.getIndexDocumentJson(); - } - - if (wasEntryDiscovered) { - if (versionNumber != null && jsonPayload != null) { - - String requestPayload = esDataProvider.buildBulkImportOperationRequest(getIndexName(), - ElasticSearchConfig.getConfig().getType(), ae.getId(), versionNumber, jsonPayload); - - NetworkTransaction transactionTracker = new NetworkTransaction(); - transactionTracker.setEntityType(esGetTxn.getEntityType()); - transactionTracker.setDescriptor(esGetTxn.getDescriptor()); - transactionTracker.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), - requestPayload, esDataProvider, transactionTracker), esPutExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = "Aggregation entity sync UPDATE PUT error - " - + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetTxn, ae); - } - }); - } - - } else { - if (link != null && jsonPayload != null) { - - NetworkTransaction updateElasticTxn = new NetworkTransaction(); - updateElasticTxn.setLink(link); - updateElasticTxn.setEntityType(esGetTxn.getEntityType()); - updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); - updateElasticTxn.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), - esPutExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = - "Aggregation entity sync UPDATE PUT error - " + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetTxn, ae); - } - }); - } - } - } catch (Exception exc) { - String message = "Exception caught during aggregation entity sync PUT operation. Message - " - + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - /** - * Should allow retry. - * - * @param id the id - * @return true, if successful - */ - private boolean shouldAllowRetry(String id) { - boolean isRetryAllowed = true; - if (retryLimitTracker.get(id) != null) { - Integer currentCount = retryLimitTracker.get(id); - if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { - isRetryAllowed = false; - String message = "Aggregation entity re-sync limit reached for " + id - + ", re-sync will no longer be attempted for this entity"; - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } else { - Integer newCount = new Integer(currentCount.intValue() + 1); - retryLimitTracker.put(id, newCount); - } - } else { - Integer firstRetryCount = new Integer(1); - retryLimitTracker.put(id, firstRetryCount); - } - - return isRetryAllowed; - } - - /** - * Process store document result. - * - * @param esPutResult the es put result - * @param esGetResult the es get result - * @param ae the ae - */ - private void processStoreDocumentResult(NetworkTransaction esPutResult, - NetworkTransaction esGetResult, AggregationEntity ae) { - - OperationResult or = esPutResult.getOperationResult(); - - if (!or.wasSuccessful()) { - if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { - - if (shouldAllowRetry(ae.getId())) { - esWorkOnHand.incrementAndGet(); - - RetryAggregationEntitySyncContainer rsc = - new RetryAggregationEntitySyncContainer(esGetResult, ae); - retryQueue.push(rsc); - - String message = "Store document failed during aggregation entity synchronization" - + " due to version conflict. Entity will be re-synced."; - LOG.warn(AaiUiMsgs.ERROR_GENERIC, message); - } - } else { - String message = - "Store document failed during aggregation entity synchronization with result code " - + or.getResultCode() + " and result message " + or.getResult(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - } - - /** - * Sync entity types. - */ - private void syncEntityTypes() { - - while (selflinks.peek() != null) { - - SelfLinkDescriptor linkDescriptor = selflinks.poll(); - aaiWorkOnHand.decrementAndGet(); - - OxmEntityDescriptor descriptor = null; - - if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); - // go to next element in iterator - continue; - } - - NetworkTransaction txn = new NetworkTransaction(); - txn.setDescriptor(descriptor); - txn.setLink(linkDescriptor.getSelfLink()); - txn.setOperationType(HttpMethod.GET); - txn.setEntityType(linkDescriptor.getEntityType()); - - aaiWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) - .whenComplete((result, error) -> { - - aaiWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); - } else { - if (result == null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, - linkDescriptor.getSelfLink()); - } else { - updateActiveInventoryCounters(result); - fetchDocumentForUpsert(result); - } - } - }); - } - - } - - } - - /** - * Fetch document for upsert. - * - * @param txn the txn - */ - private void fetchDocumentForUpsert(NetworkTransaction txn) { - // modified - if (!txn.getOperationResult().wasSuccessful()) { - String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - return; - } - - try { - final String jsonResult = txn.getOperationResult().getResult(); - if (jsonResult != null && jsonResult.length() > 0) { - - AggregationEntity ae = new AggregationEntity(oxmModelLoader); - ae.setLink(ActiveInventoryConfig.extractResourcePath(txn.getLink())); - populateAggregationEntityDocument(ae, jsonResult, txn.getDescriptor()); - ae.deriveFields(); - - String link = null; - try { - link = getElasticFullUrl("/" + ae.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction n2 = new NetworkTransaction(); - n2.setLink(link); - n2.setEntityType(txn.getEntityType()); - n2.setDescriptor(txn.getDescriptor()); - n2.setOperationType(HttpMethod.GET); - - esWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), esExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, ae); - } - }); - } - } - - } catch (JsonProcessingException exc) { - // TODO -> LOG, waht should be logged here? - } catch (IOException exc) { - // TODO -> LOG, waht should be logged here? - } - } - - - /** - * Populate aggregation entity document. - * - * @param doc the doc - * @param result the result - * @param resultDescriptor the result descriptor - * @throws JsonProcessingException the json processing exception - * @throws IOException Signals that an I/O exception has occurred. - */ - protected void populateAggregationEntityDocument(AggregationEntity doc, String result, - OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { - doc.setEntityType(resultDescriptor.getEntityName()); - JsonNode entityNode = mapper.readTree(result); - Map<String, Object> map = mapper.convertValue(entityNode, Map.class); - doc.copyAttributeKeyValuePair(map); - } - - /** - * Process entity type self links. - * - * @param operationResult the operation result - */ - private void processEntityTypeSelfLinks(OperationResult operationResult) { - - JsonNode rootNode = null; - - final String jsonResult = operationResult.getResult(); - - if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { - - try { - rootNode = mapper.readTree(jsonResult); - } catch (IOException exc) { - String message = - "Could not deserialize JSON (representing operation result) as node tree. " + - "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); - return; - } - - JsonNode resultData = rootNode.get("result-data"); - ArrayNode resultDataArrayNode = null; - - if (resultData.isArray()) { - resultDataArrayNode = (ArrayNode) resultData; - - Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); - JsonNode element = null; - - while (elementIterator.hasNext()) { - element = elementIterator.next(); - - final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); - final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); - - OxmEntityDescriptor descriptor = null; - - if (resourceType != null && resourceLink != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(resourceType); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); - // go to next element in iterator - continue; - } - - selflinks.add(new SelfLinkDescriptor(resourceLink, SynchronizerConfiguration.NODES_ONLY_MODIFIER, resourceType)); - - - } - } - } - } - - } - - /* - * (non-Javadoc) - * - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() - */ - @Override - public OperationState doSync() { - this.syncDurationInMs = -1; - syncStartedTimeStampInMs = System.currentTimeMillis(); - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "AggregationSynchronizer", "", "Sync", ""); - - return collectAllTheWork(); - } - - @Override - public SynchronizerState getState() { - - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - - } - - /* - * (non-Javadoc) - * - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) - */ - @Override - public String getStatReport(boolean showFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return getStatReport(syncDurationInMs, showFinalReport); - } - - public String getEntityType() { - return entityType; - } - - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - /* - * (non-Javadoc) - * - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() - */ - @Override - public void shutdown() { - this.shutdownExecutors(); - } - - @Override - protected boolean isSyncDone() { - - int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " - + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); - } - - if (totalWorkOnHand > 0 || !allWorkEnumerated) { - return false; - } - - this.syncInProgress = false; - - return true; - } - - /* - * (non-Javadoc) - * - * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() - */ - @Override - public void clearCache() { - - if (syncInProgress) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, - "Autosuggestion Entity Summarizer in progress, request to clear cache ignored"); - return; - } - - super.clearCache(); - this.resetCounters(); - if (entityCounters != null) { - entityCounters.clear(); - } - - allWorkEnumerated = false; - - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/AutosuggestionSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/AutosuggestionSynchronizer.java deleted file mode 100644 index ae36240..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/AutosuggestionSynchronizer.java +++ /dev/null @@ -1,733 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Deque; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Supplier; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.cl.mdc.MdcContext; -import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; -import org.openecomp.sparky.dal.rest.HttpMethod; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.synchronizer.config.SynchronizerConfiguration; -import org.openecomp.sparky.synchronizer.entity.SelfLinkDescriptor; -import org.openecomp.sparky.synchronizer.entity.SuggestionSearchEntity; -import org.openecomp.sparky.synchronizer.enumeration.OperationState; -import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; -import org.openecomp.sparky.synchronizer.task.PerformActiveInventoryRetrieval; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchPut; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchRetrieval; -import org.openecomp.sparky.util.NodeUtils; -import org.openecomp.sparky.util.SuggestionsPermutation; -import org.slf4j.MDC; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; - -/** - * The Class AutosuggestionSynchronizer. - */ -public class AutosuggestionSynchronizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - private class RetrySuggestionEntitySyncContainer { - NetworkTransaction txn; - SuggestionSearchEntity ssec; - - /** - * Instantiates a new RetrySuggestionEntitySyncContainer. - * - * @param txn the txn - * @param icer the icer - */ - public RetrySuggestionEntitySyncContainer(NetworkTransaction txn, SuggestionSearchEntity icer) { - this.txn = txn; - this.ssec = icer; - } - - public NetworkTransaction getNetworkTransaction() { - return txn; - } - - public SuggestionSearchEntity getSuggestionSearchEntity() { - return ssec; - } - } - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(AutosuggestionSynchronizer.class); - private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; - - private boolean allWorkEnumerated; - private Deque<SelfLinkDescriptor> selflinks; - private ConcurrentHashMap<String, AtomicInteger> entityCounters; - private boolean syncInProgress; - private Map<String, String> contextMap; - protected ExecutorService esPutExecutor; - private Deque<RetrySuggestionEntitySyncContainer> retryQueue; - private Map<String, Integer> retryLimitTracker; - - /** - * Instantiates a new historical entity summarizer. - * - * @param indexName the index name - * @throws Exception the exception - */ - public AutosuggestionSynchronizer(String indexName) throws Exception { - super(LOG, "ASES-" + indexName.toUpperCase(), 2, 5, 5, indexName); // multiple Autosuggestion - // Entity Synchronizer will - // run for different indices - - this.allWorkEnumerated = false; - this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); - this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>(); - this.synchronizerName = "Autosuggestion Entity Synchronizer"; - this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); - this.syncInProgress = false; - this.contextMap = MDC.getCopyOfContextMap(); - this.esPutExecutor = NodeUtils.createNamedExecutor("SUES-ES-PUT", 5, LOG); - this.syncDurationInMs = -1; - } - - /** - * Collect all the work. - * - * @return the operation state - */ - private OperationState collectAllTheWork() { - final Map<String, String> contextMap = MDC.getCopyOfContextMap(); - Map<String, OxmEntityDescriptor> descriptorMap = - oxmModelLoader.getSuggestionSearchEntityDescriptors(); - - if (descriptorMap.isEmpty()) { - LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES); - LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES); - return OperationState.ERROR; - } - - Collection<String> syncTypes = descriptorMap.keySet(); - - try { - - /* - * launch a parallel async thread to process the documents for each entity-type (to max the of - * the configured executor anyway) - */ - - aaiWorkOnHand.set(syncTypes.size()); - - for (String key : syncTypes) { - - supplyAsync(new Supplier<Void>() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - OperationResult typeLinksResult = null; - try { - typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); - aaiWorkOnHand.decrementAndGet(); - processEntityTypeSelfLinks(typeLinksResult); - } catch (Exception exc) { - // TODO -> LOG, what should be logged here? - } - - return null; - } - - }, aaiExecutor).whenComplete((result, error) -> { - - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "An error occurred getting data from AAI. Error = " + error.getMessage()); - } - }); - - } - - while (aaiWorkOnHand.get() != 0) { - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); - } - - Thread.sleep(1000); - } - - aaiWorkOnHand.set(selflinks.size()); - allWorkEnumerated = true; - syncEntityTypes(); - - while (!isSyncDone()) { - performRetrySync(); - Thread.sleep(1000); - } - - /* - * Make sure we don't hang on to retries that failed which could cause issues during future - * syncs - */ - retryLimitTracker.clear(); - - } catch (Exception exc) { - // TODO -> LOG, waht should be logged here? - } - - return OperationState.OK; - - } - - /* - * (non-Javadoc) - * - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() - */ - @Override - public OperationState doSync() { - this.syncDurationInMs = -1; - syncStartedTimeStampInMs = System.currentTimeMillis(); - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "AutosuggestionSynchronizer", "", "Sync", ""); - - return collectAllTheWork(); - } - - /** - * Process entity type self links. - * - * @param operationResult the operation result - */ - private void processEntityTypeSelfLinks(OperationResult operationResult) { - - JsonNode rootNode = null; - - final String jsonResult = operationResult.getResult(); - - if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { - - try { - rootNode = mapper.readTree(jsonResult); - } catch (IOException exc) { - String message = "Could not deserialize JSON (representing operation result) as node tree. " - + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); - return; - } - - JsonNode resultData = rootNode.get("result-data"); - ArrayNode resultDataArrayNode = null; - - if (resultData.isArray()) { - resultDataArrayNode = (ArrayNode) resultData; - - Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); - JsonNode element = null; - - while (elementIterator.hasNext()) { - element = elementIterator.next(); - - final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); - final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); - - OxmEntityDescriptor descriptor = null; - - if (resourceType != null && resourceLink != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(resourceType); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); - // go to next element in iterator - continue; - } - selflinks.add(new SelfLinkDescriptor(resourceLink, - SynchronizerConfiguration.NODES_ONLY_MODIFIER, resourceType)); - - - } - } - } - } - } - - /** - * Sync entity types. - */ - private void syncEntityTypes() { - - while (selflinks.peek() != null) { - - SelfLinkDescriptor linkDescriptor = selflinks.poll(); - aaiWorkOnHand.decrementAndGet(); - - OxmEntityDescriptor descriptor = null; - - if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); - // go to next element in iterator - continue; - } - - NetworkTransaction txn = new NetworkTransaction(); - txn.setDescriptor(descriptor); - txn.setLink(linkDescriptor.getSelfLink()); - txn.setOperationType(HttpMethod.GET); - txn.setEntityType(linkDescriptor.getEntityType()); - - aaiWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) - .whenComplete((result, error) -> { - - aaiWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); - } else { - if (result == null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, - linkDescriptor.getSelfLink()); - } else { - updateActiveInventoryCounters(result); - fetchDocumentForUpsert(result); - } - } - }); - } - - } - - } - /* - * Return a set of valid suggestion attributes for the provided entityName - * that are present in the JSON - * @param node JSON node in which the attributes should be found - * @param entityName Name of the entity - * @return List of all valid suggestion attributes(key's) - */ - public List<String> getSuggestionFromReponse(JsonNode node, String entityName) { - List<String> suggestableAttr = new ArrayList<String>(); - HashMap<String, String> desc = oxmModelLoader.getOxmModel().get(entityName); - String attr = desc.get("suggestibleAttributes"); - suggestableAttr = Arrays.asList(attr.split(",")); - List<String> suggestableValue = new ArrayList<>(); - for (String attribute : suggestableAttr) { - if (node.get(attribute) != null && node.get(attribute).asText().length() > 0) { - suggestableValue.add(attribute); - } - } - return suggestableValue; - } - - /** - * Fetch all the documents for upsert. Based on the number of permutations that are available the - * number of documents will be different - * - * @param txn the txn - */ - private void fetchDocumentForUpsert(NetworkTransaction txn) { - if (!txn.getOperationResult().wasSuccessful()) { - String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - return; - } - try { - final String jsonResult = txn.getOperationResult().getResult(); - - if (jsonResult != null && jsonResult.length() > 0) { - - // Step 1: Calculate the number of possible permutations of attributes - String entityName = txn.getDescriptor().getEntityName(); - JsonNode entityNode = mapper.readTree(jsonResult); - - SuggestionsPermutation suggPermutation = new SuggestionsPermutation(); - ArrayList<ArrayList<String>> uniqueLists = suggPermutation - .getSuggestionsPermutation(getSuggestionFromReponse(entityNode, entityName)); - - // Now we have a list of all possible permutations for the status that are - // defined for this entity type. Try inserting a document for every combination. - for (ArrayList<String> uniqueList : uniqueLists) { - SuggestionSearchEntity sse = new SuggestionSearchEntity(oxmModelLoader); - sse.setSuggestableAttr(uniqueList); - sse.setPayloadFromResponse(entityNode); - sse.setLink(txn.getLink()); - sse.setLink(ActiveInventoryConfig.extractResourcePath(txn.getLink())); - populateSuggestionSearchEntityDocument(sse, jsonResult, txn); - // The unique id for the document will be created at derive fields - sse.deriveFields(); - // Insert the document only if it has valid statuses - if (sse.isSuggestableDoc()) { - String link = null; - try { - link = getElasticFullUrl("/" + sse.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction n2 = new NetworkTransaction(); - n2.setLink(link); - n2.setEntityType(txn.getEntityType()); - n2.setDescriptor(txn.getDescriptor()); - n2.setOperationType(HttpMethod.GET); - - esWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), esExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, sse); - } - }); - } - } - } - } - } catch (JsonProcessingException exc) { - // TODO -> LOG, waht should be logged here? - } catch (IOException exc) { - // TODO -> LOG, waht should be logged here? - } - } - - protected void populateSuggestionSearchEntityDocument(SuggestionSearchEntity sse, String result, - NetworkTransaction txn) throws JsonProcessingException, IOException { - - OxmEntityDescriptor resultDescriptor = txn.getDescriptor(); - - sse.setEntityType(resultDescriptor.getEntityName()); - - JsonNode entityNode = mapper.readTree(result); - - List<String> primaryKeyValues = new ArrayList<String>(); - String pkeyValue = null; - - for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { - pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); - if (pkeyValue != null) { - primaryKeyValues.add(pkeyValue); - } else { - String message = "populateSuggestionSearchEntityDocument()," - + " pKeyValue is null for entityType = " + resultDescriptor.getEntityName(); - LOG.warn(AaiUiMsgs.WARN_GENERIC, message); - } - } - - final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); - sse.setEntityPrimaryKeyValue(primaryCompositeKeyValue); - sse.generateSuggestionInputPermutations(); - } - - protected void performDocumentUpsert(NetworkTransaction esGetTxn, SuggestionSearchEntity sse) { - /** - * <p> - * <ul> - * As part of the response processing we need to do the following: - * <li>1. Extract the version (if present), it will be the ETAG when we use the - * Search-Abstraction-Service - * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version - * tag - * <li>a) if version is null or RC=404, then standard put, no _update with version tag - * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic - * </ul> - * </p> - */ - String link = null; - try { - link = getElasticFullUrl("/" + sse.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); - return; - } - - boolean wasEntryDiscovered = false; - if (esGetTxn.getOperationResult().getResultCode() == 404) { - LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, sse.getEntityPrimaryKeyValue()); - } else if (esGetTxn.getOperationResult().getResultCode() == 200) { - wasEntryDiscovered = true; - } else { - /* - * Not being a 200 does not mean a failure. eg 201 is returned for created. and 500 for es not - * found TODO -> Should we return. - */ - LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, - String.valueOf(esGetTxn.getOperationResult().getResultCode())); - return; - } - // Insert a new document only if the paylod is different. - // This is determined by hashing the payload and using it as a id for the document - // - if (!wasEntryDiscovered) { - try { - String jsonPayload = null; - - jsonPayload = sse.getIndexDocumentJson(); - if (link != null && jsonPayload != null) { - - NetworkTransaction updateElasticTxn = new NetworkTransaction(); - updateElasticTxn.setLink(link); - updateElasticTxn.setEntityType(esGetTxn.getEntityType()); - updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); - updateElasticTxn.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), - esPutExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = "Suggestion search entity sync UPDATE PUT error - " - + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetTxn, sse); - } - }); - } - } catch (Exception exc) { - String message = - "Exception caught during suggestion search entity sync PUT operation. Message - " - + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); - } - } - } - - private void processStoreDocumentResult(NetworkTransaction esPutResult, - NetworkTransaction esGetResult, SuggestionSearchEntity sse) { - - OperationResult or = esPutResult.getOperationResult(); - - if (!or.wasSuccessful()) { - if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { - - if (shouldAllowRetry(sse.getId())) { - esWorkOnHand.incrementAndGet(); - - RetrySuggestionEntitySyncContainer rssec = - new RetrySuggestionEntitySyncContainer(esGetResult, sse); - retryQueue.push(rssec); - - String message = "Store document failed during suggestion search entity synchronization" - + " due to version conflict. Entity will be re-synced."; - LOG.warn(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); - } - } else { - String message = - "Store document failed during suggestion search entity synchronization with result code " - + or.getResultCode() + " and result message " + or.getResult(); - LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); - } - } - } - - /** - * Perform retry sync. - */ - private void performRetrySync() { - while (retryQueue.peek() != null) { - - RetrySuggestionEntitySyncContainer susc = retryQueue.poll(); - if (susc != null) { - - SuggestionSearchEntity sus = susc.getSuggestionSearchEntity(); - NetworkTransaction txn = susc.getNetworkTransaction(); - - String link = null; - try { - /* - * In this retry flow the se object has already derived its fields - */ - link = getElasticFullUrl("/" + sus.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction retryTransaction = new NetworkTransaction(); - retryTransaction.setLink(link); - retryTransaction.setEntityType(txn.getEntityType()); - retryTransaction.setDescriptor(txn.getDescriptor()); - retryTransaction.setOperationType(HttpMethod.GET); - - /* - * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already - * called incrementAndGet when queuing the failed PUT! - */ - - supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), - esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, sus); - } - }); - } - - } - } - } - - /** - * Should allow retry. - * - * @param id the id - * @return true, if successful - */ - private boolean shouldAllowRetry(String id) { - boolean isRetryAllowed = true; - if (retryLimitTracker.get(id) != null) { - Integer currentCount = retryLimitTracker.get(id); - if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { - isRetryAllowed = false; - String message = "Searchable entity re-sync limit reached for " + id - + ", re-sync will no longer be attempted for this entity"; - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } else { - Integer newCount = new Integer(currentCount.intValue() + 1); - retryLimitTracker.put(id, newCount); - } - } else { - Integer firstRetryCount = new Integer(1); - retryLimitTracker.put(id, firstRetryCount); - } - - return isRetryAllowed; - } - - - - @Override - public SynchronizerState getState() { - - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - - } - - /* - * (non-Javadoc) - * - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) - */ - @Override - public String getStatReport(boolean showFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return getStatReport(syncDurationInMs, showFinalReport); - } - - /* - * (non-Javadoc) - * - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() - */ - @Override - public void shutdown() { - this.shutdownExecutors(); - } - - @Override - protected boolean isSyncDone() { - - int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " - + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); - } - - if (totalWorkOnHand > 0 || !allWorkEnumerated) { - return false; - } - - this.syncInProgress = false; - - return true; - } - - /* - * (non-Javadoc) - * - * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() - */ - @Override - public void clearCache() { - - if (syncInProgress) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, - "Autosuggestion Entity Summarizer in progress, request to clear cache ignored"); - return; - } - - super.clearCache(); - this.resetCounters(); - if (entityCounters != null) { - entityCounters.clear(); - } - - allWorkEnumerated = false; - - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/CrossEntityReferenceSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/CrossEntityReferenceSynchronizer.java deleted file mode 100644 index 1e3e85c..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/CrossEntityReferenceSynchronizer.java +++ /dev/null @@ -1,887 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Deque; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.ExecutorService; -import java.util.function.Supplier; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.sparky.config.oxm.CrossEntityReference; -import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; -import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.openecomp.sparky.dal.rest.HttpMethod; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.synchronizer.config.SynchronizerConfiguration; -import org.openecomp.sparky.synchronizer.entity.IndexableCrossEntityReference; -import org.openecomp.sparky.synchronizer.entity.MergableEntity; -import org.openecomp.sparky.synchronizer.entity.SelfLinkDescriptor; -import org.openecomp.sparky.synchronizer.enumeration.OperationState; -import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; -import org.openecomp.sparky.synchronizer.task.PerformActiveInventoryRetrieval; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchPut; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchRetrieval; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchUpdate; -import org.openecomp.sparky.util.NodeUtils; -import org.slf4j.MDC; - -import org.openecomp.cl.mdc.MdcContext; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectReader; -import com.fasterxml.jackson.databind.node.ArrayNode; - -/** - * The Class CrossEntityReferenceSynchronizer. - */ -public class CrossEntityReferenceSynchronizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - /** - * The Class RetryCrossEntitySyncContainer. - */ - private class RetryCrossEntitySyncContainer { - NetworkTransaction txn; - IndexableCrossEntityReference icer; - - /** - * Instantiates a new retry cross entity sync container. - * - * @param txn the txn - * @param icer the icer - */ - public RetryCrossEntitySyncContainer(NetworkTransaction txn, - IndexableCrossEntityReference icer) { - this.txn = txn; - this.icer = icer; - } - - public NetworkTransaction getNetworkTransaction() { - return txn; - } - - public IndexableCrossEntityReference getIndexableCrossEntityReference() { - return icer; - } - } - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(CrossEntityReferenceSynchronizer.class); - - private static final String SERVICE_INSTANCE = "service-instance"; - private Deque<SelfLinkDescriptor> selflinks; - private Deque<RetryCrossEntitySyncContainer> retryQueue; - private Map<String, Integer> retryLimitTracker; - private boolean isAllWorkEnumerated; - protected ExecutorService esPutExecutor; - protected ActiveInventoryConfig aaiConfig; - - /** - * Instantiates a new cross entity reference synchronizer. - * - * @param indexName the index name - * @throws Exception the exception - */ - public CrossEntityReferenceSynchronizer(String indexName, ActiveInventoryConfig aaiConfig) throws Exception { - super(LOG, "CERS", 2, 5, 5, indexName); - this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); - this.retryQueue = new ConcurrentLinkedDeque<RetryCrossEntitySyncContainer>(); - this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); - this.synchronizerName = "Cross Reference Entity Synchronizer"; - this.isAllWorkEnumerated = false; - this.esPutExecutor = NodeUtils.createNamedExecutor("CERS-ES-PUT", 5, LOG); - this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors( - oxmModelLoader.getCrossReferenceEntityDescriptors()); - this.esEntityStats.initializeCountersFromOxmEntityDescriptors( - oxmModelLoader.getCrossReferenceEntityDescriptors()); - this.aaiConfig = aaiConfig; - this.syncDurationInMs = -1; - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() - */ - @Override - public OperationState doSync() { - this.syncDurationInMs = -1; - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "CrossEntitySynchronizer", "", "Sync", ""); - - resetCounters(); - syncStartedTimeStampInMs = System.currentTimeMillis(); - launchSyncFlow(); - return OperationState.OK; - } - - @Override - public SynchronizerState getState() { - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) - */ - @Override - public String getStatReport(boolean showFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return getStatReport(syncDurationInMs, showFinalReport); - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() - */ - @Override - public void shutdown() { - this.shutdownExecutors(); - } - - @Override - protected boolean isSyncDone() { - int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); - - if (totalWorkOnHand > 0 || !isAllWorkEnumerated) { - return false; - } - - return true; - } - - /** - * Launch sync flow. - * - * @return the operation state - */ - private OperationState launchSyncFlow() { - final Map<String,String> contextMap = MDC.getCopyOfContextMap(); - Map<String, OxmEntityDescriptor> descriptorMap = - oxmModelLoader.getCrossReferenceEntityDescriptors(); - - if (descriptorMap.isEmpty()) { - LOG.error(AaiUiMsgs.ERROR_LOADING_OXM); - - return OperationState.ERROR; - } - - Collection<String> syncTypes = descriptorMap.keySet(); - - try { - - /* - * launch a parallel async thread to process the documents for each entity-type (to max the of - * the configured executor anyway) - */ - - aaiWorkOnHand.set(syncTypes.size()); - - for (String key : syncTypes) { - - supplyAsync(new Supplier<Void>() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - OperationResult typeLinksResult = null; - try { - typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); - aaiWorkOnHand.decrementAndGet(); - processEntityTypeSelfLinks(typeLinksResult); - } catch (Exception exc) { - // TODO -> LOG, what should be logged here? - } - - return null; - } - - }, aaiExecutor).whenComplete((result, error) -> { - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); - } - }); - } - - while (aaiWorkOnHand.get() != 0) { - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); - } - - Thread.sleep(1000); - } - - aaiWorkOnHand.set(selflinks.size()); - isAllWorkEnumerated = true; - performSync(); - - while (!isSyncDone()) { - performRetrySync(); - Thread.sleep(1000); - } - - /* - * Make sure we don't hang on to retries that failed which could cause issues during future - * syncs - */ - retryLimitTracker.clear(); - - } catch (Exception exc) { - // TODO -> LOG, waht should be logged here? - } - - return OperationState.OK; - } - - /** - * Perform sync. - */ - private void performSync() { - while (selflinks.peek() != null) { - - SelfLinkDescriptor linkDescriptor = selflinks.poll(); - aaiWorkOnHand.decrementAndGet(); - - OxmEntityDescriptor descriptor = null; - - if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); - // go to next element in iterator - continue; - } - - if (descriptor.hasCrossEntityReferences()) { - - NetworkTransaction txn = new NetworkTransaction(); - txn.setDescriptor(descriptor); - txn.setLink(linkDescriptor.getSelfLink() + linkDescriptor.getDepthModifier()); - txn.setOperationType(HttpMethod.GET); - txn.setEntityType(linkDescriptor.getEntityType()); - - aaiWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) - .whenComplete((result, error) -> { - - aaiWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.SELF_LINK_GET, error.getLocalizedMessage()); - } else { - if (result == null) { - LOG.error(AaiUiMsgs.SELF_LINK_CROSS_REF_SYNC); - } else { - updateActiveInventoryCounters(result); - fetchDocumentForUpsert(result); - } - } - }); - } - } - } - } - - /** - * Process entity type self links. - * - * @param operationResult the operation result - */ - private void processEntityTypeSelfLinks(OperationResult operationResult) { - - JsonNode rootNode = null; - - final String jsonResult = operationResult.getResult(); - - if (jsonResult != null && jsonResult.length() > 0) { - - try { - rootNode = mapper.readTree(jsonResult); - } catch (IOException exc) { - String message = "Could not deserialize JSON (representing operation result) as node tree. " - + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); - return; - } - - JsonNode resultData = rootNode.get("result-data"); - ArrayNode resultDataArrayNode = null; - - if (resultData.isArray()) { - resultDataArrayNode = (ArrayNode) resultData; - - Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); - JsonNode element = null; - - while (elementIterator.hasNext()) { - element = elementIterator.next(); - - final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); - final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); - - OxmEntityDescriptor descriptor = null; - - if (resourceType != null && resourceLink != null) { - descriptor = oxmModelLoader.getEntityDescriptor(resourceType); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); - // go to next element in iterator - continue; - } - if (descriptor.hasCrossEntityReferences()) { - selflinks.add(new SelfLinkDescriptor( - resourceLink,SynchronizerConfiguration.DEPTH_ALL_MODIFIER, resourceType)); - } - } - } - } - } - } - - - - /** - * By providing the entity type and a json node for the entity, determine the - * primary key name(s) + primary key value(s) sufficient to build an entity query string - * of the following format: - * - * <entityType>.<primaryKeyNames>:<primaryKeyValues> - * - * @return - a composite string in the above format or null - */ - private String determineEntityQueryString(String entityType, JsonNode entityJsonNode) { - - OxmEntityDescriptor entityDescriptor = - oxmModelLoader.getEntityDescriptor(entityType); - - String queryString = null; - - if ( entityDescriptor != null ) { - - final List<String> primaryKeyNames = entityDescriptor.getPrimaryKeyAttributeName(); - final List<String> keyValues = new ArrayList<String>(); - NodeUtils.extractFieldValuesFromObject(entityJsonNode, primaryKeyNames, keyValues); - - queryString = entityType + "." + NodeUtils.concatArray(primaryKeyNames,"/") + ":" + NodeUtils.concatArray(keyValues); - - } - - return queryString; - - - } - - /** - * Fetch document for upsert. - * - * @param txn the txn - */ - private void fetchDocumentForUpsert(NetworkTransaction txn) { - - if (!txn.getOperationResult().wasSuccessful()) { - LOG.error(AaiUiMsgs.SELF_LINK_GET, txn.getOperationResult().getResult()); - return; - } - - if (txn.getDescriptor().hasCrossEntityReferences()) { - - final String jsonResult = txn.getOperationResult().getResult(); - - if (jsonResult != null && jsonResult.length() > 0) { - - /** - * Here's what we are going to do: - * - * <li>Extract primary key name and value from the parent type. - * <li>Extract the primary key and value from the nested child instance. - * <li>Build a generic query to discover the self-link for the nested-child-instance using - * parent and child. - * <li>Set the self-link on the child. - * <li>Generate the id that will allow the elastic-search upsert to work. - * <li>Rinse and repeat. - */ - - OxmEntityDescriptor parentEntityDescriptor = - oxmModelLoader.getEntityDescriptor(txn.getEntityType()); - - if ( parentEntityDescriptor != null ) { - - CrossEntityReference cerDefinition = parentEntityDescriptor.getCrossEntityReference(); - - if (cerDefinition != null) { - JsonNode convertedNode = null; - try { - convertedNode = NodeUtils.convertJsonStrToJsonNode(txn.getOperationResult().getResult()); - - final String parentEntityQueryString = determineEntityQueryString(txn.getEntityType(), convertedNode); - - List<String> extractedParentEntityAttributeValues = new ArrayList<String>(); - - NodeUtils.extractFieldValuesFromObject(convertedNode, - cerDefinition.getReferenceAttributes(), - extractedParentEntityAttributeValues); - - List<JsonNode> nestedTargetEntityInstances = new ArrayList<JsonNode>(); - NodeUtils.extractObjectsByKey(convertedNode, cerDefinition.getTargetEntityType(), - nestedTargetEntityInstances); - - for (JsonNode targetEntityInstance : nestedTargetEntityInstances) { - - OxmEntityDescriptor cerDescriptor = - oxmModelLoader.getSearchableEntityDescriptor(cerDefinition.getTargetEntityType()); - - if (cerDescriptor != null) { - - String childEntityType = cerDefinition.getTargetEntityType(); - - List<String> childPrimaryKeyNames = cerDescriptor.getPrimaryKeyAttributeName(); - - List<String> childKeyValues = new ArrayList<String>(); - NodeUtils.extractFieldValuesFromObject(targetEntityInstance, childPrimaryKeyNames, childKeyValues); - - String childEntityQueryKeyString = childEntityType + "." + NodeUtils.concatArray(childPrimaryKeyNames,"/") + ":" + NodeUtils.concatArray(childKeyValues); - - /** - * Build generic-query to query child instance self-link from AAI - */ - List<String> orderedQueryKeyParams = new ArrayList<String>(); - if (SERVICE_INSTANCE.equals(childEntityType)) { - orderedQueryKeyParams.clear(); - orderedQueryKeyParams.add(childEntityQueryKeyString); - } else { - orderedQueryKeyParams.add(parentEntityQueryString); - orderedQueryKeyParams.add(childEntityQueryKeyString); - } - String genericQueryStr = null; - try { - genericQueryStr = aaiDataProvider.getGenericQueryForSelfLink(childEntityType, orderedQueryKeyParams); - - if (genericQueryStr != null) { - aaiWorkOnHand.incrementAndGet(); - OperationResult aaiQueryResult = aaiDataProvider.queryActiveInventoryWithRetries( - genericQueryStr, "application/json", - aaiConfig.getAaiRestConfig().getNumRequestRetries()); - aaiWorkOnHand.decrementAndGet(); - if (aaiQueryResult!= null && aaiQueryResult.wasSuccessful()) { - - Collection<JsonNode> entityLinks = new ArrayList<JsonNode>(); - JsonNode genericQueryResult = null; - try { - genericQueryResult = NodeUtils.convertJsonStrToJsonNode(aaiQueryResult.getResult()); - - if ( genericQueryResult != null ) { - - NodeUtils.extractObjectsByKey(genericQueryResult, "resource-link", entityLinks); - - String selfLink = null; - - if (entityLinks.size() != 1) { - /** - * an ambiguity exists where we can't reliably determine the self - * link, this should be a permanent error - */ - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_SELFLINK_AMBIGUITY, String.valueOf(entityLinks.size())); - } else { - selfLink = ((JsonNode) entityLinks.toArray()[0]).asText(); - - if (!cerDescriptor.getSearchableAttributes().isEmpty()) { - - IndexableCrossEntityReference icer = - getPopulatedDocument(targetEntityInstance, cerDescriptor); - - for (String parentCrossEntityReferenceAttributeValue : extractedParentEntityAttributeValues) { - icer.addCrossEntityReferenceValue( - parentCrossEntityReferenceAttributeValue); - } - - icer.setLink(ActiveInventoryConfig.extractResourcePath(selfLink)); - - icer.deriveFields(); - - String link = null; - try { - link = getElasticFullUrl("/" + icer.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction n2 = new NetworkTransaction(); - n2.setLink(link); - n2.setEntityType(txn.getEntityType()); - n2.setDescriptor(txn.getDescriptor()); - n2.setOperationType(HttpMethod.GET); - - esWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), - esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, icer); - } - }); - } - } - } - } else { - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DURING_AAI_RESPONSE_CONVERSION); - } - - } catch (Exception exc) { - LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, JsonNode.class.toString(), exc.getLocalizedMessage()); - } - - } else { - String message = "Entity sync failed because AAI query failed with error "; - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); - } - - } else { - String message = "Entity Sync failed because generic query str could not be determined."; - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); - } - } catch (Exception exc) { - String message = "Failed to sync entity because generation of generic query failed with error = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); - } - - } - } - - } catch (IOException ioe) { - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, ioe.getMessage()); - } - } - - } else { - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DESCRIPTOR_NOT_FOUND, txn.getEntityType()); - } - } - } - } - - /** - * Perform document upsert. - * - * @param esGetResult the es get result - * @param icer the icer - */ - protected void performDocumentUpsert(NetworkTransaction esGetResult, - IndexableCrossEntityReference icer) { - /** - * <p> - * <ul> - * As part of the response processing we need to do the following: - * <li>1. Extract the version (if present), it will be the ETAG when we use the - * Search-Abstraction-Service - * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version - * tag - * <li>a) if version is null or RC=404, then standard put, no _update with version tag - * <li>b) if version != null, do PUT with _update?version= (versionNumber) in the URI to elastic - * </ul> - * </p> - */ - String link = null; - try { - link = getElasticFullUrl("/" + icer.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); - return; - } - - boolean wasEntryDiscovered = false; - String versionNumber = null; - if (esGetResult.getOperationResult().getResultCode() == 404) { - LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, icer.getEntityPrimaryKeyValue()); - } else if (esGetResult.getOperationResult().getResultCode() == 200) { - wasEntryDiscovered = true; - try { - versionNumber = NodeUtils.extractFieldValueFromObject( - NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()), - "_version"); - } catch (IOException exc) { - LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "version Number", - icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage()); - return; - } - } else { - /* - * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we - * return. - */ - LOG.info(AaiUiMsgs.ES_OPERATION_RETURN_CODE, - String.valueOf(esGetResult.getOperationResult().getResultCode())); - return; - } - - try { - String jsonPayload = null; - if (wasEntryDiscovered) { - try { - ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>(); - NodeUtils.extractObjectsByKey( - NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()), - "_source", sourceObject); - - if (!sourceObject.isEmpty()) { - String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); - MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); - ObjectReader updater = mapper.readerForUpdating(me); - MergableEntity merged = updater.readValue(icer.getIndexDocumentJson()); - jsonPayload = mapper.writeValueAsString(merged); - } - } catch (IOException exc) { - LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "source value", - icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage()); - return; - } - } else { - jsonPayload = icer.getIndexDocumentJson(); - } - - if (wasEntryDiscovered) { - if (versionNumber != null && jsonPayload != null) { - - String requestPayload = esDataProvider.buildBulkImportOperationRequest(getIndexName(), - ElasticSearchConfig.getConfig().getType(), icer.getId(), versionNumber, jsonPayload); - - NetworkTransaction transactionTracker = new NetworkTransaction(); - transactionTracker.setEntityType(esGetResult.getEntityType()); - transactionTracker.setDescriptor(esGetResult.getDescriptor()); - transactionTracker.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), - requestPayload, esDataProvider, transactionTracker), esPutExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetResult, icer); - } - }); - } - - } else { - if (link != null && jsonPayload != null) { - - NetworkTransaction updateElasticTxn = new NetworkTransaction(); - updateElasticTxn.setLink(link); - updateElasticTxn.setEntityType(esGetResult.getEntityType()); - updateElasticTxn.setDescriptor(esGetResult.getDescriptor()); - updateElasticTxn.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), - esPutExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetResult, icer); - } - }); - } - } - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, exc.getLocalizedMessage()); - } - } - - /** - * Process store document result. - * - * @param esPutResult the es put result - * @param esGetResult the es get result - * @param icer the icer - */ - private void processStoreDocumentResult(NetworkTransaction esPutResult, - NetworkTransaction esGetResult, IndexableCrossEntityReference icer) { - - OperationResult or = esPutResult.getOperationResult(); - - if (!or.wasSuccessful()) { - if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { - - if (shouldAllowRetry(icer.getId())) { - - esWorkOnHand.incrementAndGet(); - - RetryCrossEntitySyncContainer rsc = new RetryCrossEntitySyncContainer(esGetResult, icer); - retryQueue.push(rsc); - - LOG.warn(AaiUiMsgs.ES_CROSS_REF_SYNC_VERSION_CONFLICT); - } - } else { - LOG.error(AaiUiMsgs.ES_CROSS_REF_SYNC_FAILURE, String.valueOf(or.getResultCode()), - or.getResult()); - } - } - } - - /** - * Perform retry sync. - */ - private void performRetrySync() { - while (retryQueue.peek() != null) { - - RetryCrossEntitySyncContainer rsc = retryQueue.poll(); - if (rsc != null) { - - IndexableCrossEntityReference icer = rsc.getIndexableCrossEntityReference(); - NetworkTransaction txn = rsc.getNetworkTransaction(); - - String link = null; - try { - // In this retry flow the icer object has already - // derived its fields - link = getElasticFullUrl("/" + icer.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction retryTransaction = new NetworkTransaction(); - retryTransaction.setLink(link); - retryTransaction.setEntityType(txn.getEntityType()); - retryTransaction.setDescriptor(txn.getDescriptor()); - retryTransaction.setOperationType(HttpMethod.GET); - - /* - * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow and we did - * that for this request already when queuing the failed PUT! - */ - - supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), - esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, icer); - } - }); - } - - } - } - } - - /** - * Should allow retry. - * - * @param id the id - * @return true, if successful - */ - private boolean shouldAllowRetry(String id) { - boolean isRetryAllowed = true; - if (retryLimitTracker.get(id) != null) { - Integer currentCount = retryLimitTracker.get(id); - if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { - isRetryAllowed = false; - LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_RESYNC_LIMIT, id); - } else { - Integer newCount = new Integer(currentCount.intValue() + 1); - retryLimitTracker.put(id, newCount); - } - - } else { - Integer firstRetryCount = new Integer(1); - retryLimitTracker.put(id, firstRetryCount); - } - - return isRetryAllowed; - } - - /** - * Gets the populated document. - * - * @param entityNode the entity node - * @param resultDescriptor the result descriptor - * @return the populated document - * @throws JsonProcessingException the json processing exception - * @throws IOException Signals that an I/O exception has occurred. - */ - protected IndexableCrossEntityReference getPopulatedDocument(JsonNode entityNode, - OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { - - IndexableCrossEntityReference icer = new IndexableCrossEntityReference(oxmModelLoader); - - icer.setEntityType(resultDescriptor.getEntityName()); - - List<String> primaryKeyValues = new ArrayList<String>(); - String pkeyValue = null; - - for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { - pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); - if (pkeyValue != null) { - primaryKeyValues.add(pkeyValue); - } else { - LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName()); - } - } - - final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); - icer.setEntityPrimaryKeyValue(primaryCompositeKeyValue); - - return icer; - - } -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/ElasticSearchIndexCleaner.java b/src/main/java/org/openecomp/sparky/synchronizer/ElasticSearchIndexCleaner.java deleted file mode 100644 index 9ef4fe6..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/ElasticSearchIndexCleaner.java +++ /dev/null @@ -1,642 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; - -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.dal.rest.RestDataProvider; -import org.openecomp.sparky.synchronizer.entity.ObjectIdCollection; -import org.openecomp.sparky.synchronizer.entity.SearchableEntity; -import org.openecomp.sparky.synchronizer.enumeration.OperationState; -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.sparky.logging.AaiUiMsgs; - -/** - * The Class ElasticSearchIndexCleaner. - */ -public class ElasticSearchIndexCleaner implements IndexCleaner { - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(ElasticSearchIndexCleaner.class); - - private static final String BULK_OP_LINE_TEMPLATE = "%s\n"; - private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; - - private ObjectIdCollection before; - private ObjectIdCollection after; - - private String host; - private String port; - - private String indexName; - private String indexType; - private int scrollContextTimeToLiveInMinutes; - private int numItemsToGetBulkRequest; - - private RestDataProvider restDataProvider; - private ObjectMapper mapper; - - /** - * Instantiates a new elastic search index cleaner. - * - * @param restDataProvider the rest data provider - * @param indexName the index name - * @param indexType the index type - * @param host the host - * @param port the port - * @param scrollContextTimeToLiveInMinutes the scroll context time to live in minutes - * @param numItemsToGetBulkRequest the num items to get bulk request - */ - protected ElasticSearchIndexCleaner(RestDataProvider restDataProvider, String indexName, - String indexType, String host, String port, int scrollContextTimeToLiveInMinutes, - int numItemsToGetBulkRequest) { - this.restDataProvider = restDataProvider; - this.before = null; - this.after = null; - this.indexName = indexName; - this.indexType = indexType; - this.mapper = new ObjectMapper(); - this.host = host; - this.port = port; - this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes; - this.numItemsToGetBulkRequest = numItemsToGetBulkRequest; - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePreOperationCollection() - */ - @Override - public OperationState populatePreOperationCollection() { - - try { - before = retrieveAllDocumentIdentifiers(); - return OperationState.OK; - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, indexName, exc.getMessage()); - return OperationState.ERROR; - } - - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePostOperationCollection() - */ - @Override - public OperationState populatePostOperationCollection() { - try { - after = retrieveAllDocumentIdentifiers(); - return OperationState.OK; - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, indexName, exc.getMessage()); - return OperationState.ERROR; - } - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexCleaner#performCleanup() - */ - @Override - public OperationState performCleanup() { - // TODO Auto-generated method stub - LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP, indexName); - - int sizeBefore = before.getSize(); - int sizeAfter = after.getSize(); - - LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP_SIZE, String.valueOf(sizeBefore), - String.valueOf(sizeAfter)); - - /* - * If the processedImportIds size <= 0, then something has failed in the sync operation and we - * shouldn't do the selective delete right now. - */ - - if (sizeAfter > 0) { - - Collection<String> presyncIds = before.getImportedObjectIds(); - presyncIds.removeAll(after.getImportedObjectIds()); - - try { - LOG.info(AaiUiMsgs.ES_SYNC_SELECTIVE_DELETE, indexName, indexType, - String.valueOf(presyncIds.size())); - - ObjectIdCollection bulkIds = new ObjectIdCollection(); - - Iterator<String> it = presyncIds.iterator(); - int numItemsInBulkRequest = 0; - int numItemsRemainingToBeDeleted = presyncIds.size(); - - while (it.hasNext()) { - - bulkIds.addObjectId(it.next()); - numItemsInBulkRequest++; - - if (numItemsInBulkRequest >= this.numItemsToGetBulkRequest) { - LOG.info(AaiUiMsgs.ES_BULK_DELETE, indexName, String.valueOf(bulkIds.getSize())); - OperationResult bulkDeleteResult = bulkDelete(bulkIds.getImportedObjectIds()); - // pegCountersForElasticBulkDelete(bulkDeleteResult); - numItemsRemainingToBeDeleted -= numItemsInBulkRequest; - numItemsInBulkRequest = 0; - bulkIds.clear(); - } - } - - if (numItemsRemainingToBeDeleted > 0) { - LOG.info(AaiUiMsgs.ES_BULK_DELETE, indexName, String.valueOf(bulkIds.getSize())); - OperationResult bulkDeleteResult = bulkDelete(bulkIds.getImportedObjectIds()); - // pegCountersForElasticBulkDelete(bulkDeleteResult); - } - - - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_BULK_DELETE_ERROR, indexName, exc.getLocalizedMessage()); - - } - } - - return OperationState.OK; - } - - @Override - public String getIndexName() { - return indexName; - } - - public void setIndexName(String indexName) { - this.indexName = indexName; - } - - /** - * Builds the initial scroll request payload. - * - * @param numItemsToGetPerRequest the num items to get per request - * @param fieldList the field list - * @return the string - * @throws JsonProcessingException the json processing exception - */ - protected String buildInitialScrollRequestPayload(int numItemsToGetPerRequest, - List<String> fieldList) throws JsonProcessingException { - - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("size", numItemsToGetPerRequest); - - ArrayNode fields = mapper.createArrayNode(); - - for (String f : fieldList) { - fields.add(f); - } - - rootNode.set("fields", fields); - - ObjectNode queryNode = mapper.createObjectNode(); - queryNode.set("match_all", mapper.createObjectNode()); - - rootNode.set("query", queryNode); - - return mapper.writeValueAsString(rootNode); - - } - - /** - * Builds the subsequent scroll context request payload. - * - * @param scrollId the scroll id - * @param contextTimeToLiveInMinutes the context time to live in minutes - * @return the string - * @throws JsonProcessingException the json processing exception - */ - protected String buildSubsequentScrollContextRequestPayload(String scrollId, - int contextTimeToLiveInMinutes) throws JsonProcessingException { - - ObjectNode rootNode = mapper.createObjectNode(); - - rootNode.put("scroll", contextTimeToLiveInMinutes + "m"); - rootNode.put("scroll_id", scrollId); - - return mapper.writeValueAsString(rootNode); - - } - - /** - * Parses the elastic search result. - * - * @param jsonResult the json result - * @return the json node - * @throws JsonProcessingException the json processing exception - * @throws IOException Signals that an I/O exception has occurred. - */ - protected JsonNode parseElasticSearchResult(String jsonResult) - throws JsonProcessingException, IOException { - ObjectMapper mapper = new ObjectMapper(); - return mapper.readTree(jsonResult); - } - - /** - * Lookup index doc. - * - * @param ids the ids - * @param docs the docs - * @return the array list - */ - protected ArrayList<SearchableEntity> lookupIndexDoc(ArrayList<String> ids, - List<SearchableEntity> docs) { - ArrayList<SearchableEntity> objs = new ArrayList<SearchableEntity>(); - - if (ids != null && docs != null) { - for (SearchableEntity d : docs) { - if (ids.contains(d.getId())) { - objs.add(d); - } - } - } - - return objs; - } - - /** - * Builds the delete data object. - * - * @param index the index - * @param type the type - * @param id the id - * @return the object node - */ - protected ObjectNode buildDeleteDataObject(String index, String type, String id) { - - ObjectNode indexDocProperties = mapper.createObjectNode(); - - indexDocProperties.put("_index", index); - indexDocProperties.put("_type", type); - indexDocProperties.put("_id", id); - - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.set("delete", indexDocProperties); - - return rootNode; - } - - /** - * This method might appear to be a little strange, and is simply an optimization to take an - * elipsed JsonNode key path and retrieve the node at the end of the path, if it exists. - * - * @param startNode the start node - * @param fieldPath the field path - * @return the node path - */ - protected JsonNode getNodePath(JsonNode startNode, String... fieldPath) { - - JsonNode jsonNode = null; - - for (String field : fieldPath) { - if (jsonNode == null) { - jsonNode = startNode.get(field); - } else { - jsonNode = jsonNode.get(field); - } - - /* - * This is our safety net in case any intermediate path returns a null - */ - - if (jsonNode == null) { - return null; - } - - } - - return jsonNode; - } - - /** - * Gets the full url. - * - * @param resourceUrl the resource url - * @return the full url - */ - private String getFullUrl(String resourceUrl) { - return String.format("http://%s:%s%s", host, port, resourceUrl); - } - - /** - * Retrieve all document identifiers. - * - * @return the object id collection - * @throws IOException Signals that an I/O exception has occurred. - */ - public ObjectIdCollection retrieveAllDocumentIdentifiers() throws IOException { - - ObjectIdCollection currentDocumentIds = new ObjectIdCollection(); - - long opStartTimeInMs = System.currentTimeMillis(); - - List<String> fields = new ArrayList<String>(); - fields.add("_id"); - // fields.add("entityType"); - - String scrollRequestPayload = - buildInitialScrollRequestPayload(this.numItemsToGetBulkRequest, fields); - - final String fullUrlStr = getFullUrl("/" + indexName + "/" + indexType + "/_search?scroll=" - + this.scrollContextTimeToLiveInMinutes + "m"); - - OperationResult result = - restDataProvider.doPost(fullUrlStr, scrollRequestPayload, "application/json"); - - if (result.wasSuccessful()) { - - JsonNode rootNode = parseElasticSearchResult(result.getResult()); - - /* - * Check the result for success / failure, and enumerate all the index ids that resulted in - * success, and ignore the ones that failed or log them so we have a record of the failure. - */ - int totalRecordsAvailable = 0; - String scrollId = null; - int numRecordsFetched = 0; - - if (rootNode != null) { - - scrollId = getFieldValue(rootNode, "_scroll_id"); - final String tookStr = getFieldValue(rootNode, "took"); - int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr); - boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out")); - - if (timedOut) { - LOG.error(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "all document Identifiers", - String.valueOf(tookInMs)); - } else { - LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "all document Identifiers", - String.valueOf(tookInMs)); - } - - JsonNode hitsNode = rootNode.get("hits"); - totalRecordsAvailable = Integer.parseInt(hitsNode.get("total").asText()); - - LOG.info(AaiUiMsgs.COLLECT_TOTAL, "all document Identifiers", - String.valueOf(totalRecordsAvailable)); - - /* - * Collect all object ids - */ - - ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits"); - - Iterator<JsonNode> nodeIterator = hitsArray.iterator(); - - String key = null; - String value = null; - JsonNode jsonNode = null; - - while (nodeIterator.hasNext()) { - - jsonNode = nodeIterator.next(); - - key = getFieldValue(jsonNode, "_id"); - - if (key != null) { - currentDocumentIds.addObjectId(key); - } - - /* - * if (key != null) { - * - * JsonNode fieldsNode = jNode.get("fields"); - * - * if (fieldsNode != null) { - * - * JsonNode entityTypeNode = fieldsNode.get("entityType"); - * - * if (entityTypeNode != null) { ArrayNode aNode = (ArrayNode) entityTypeNode; - * - * if (aNode.size() > 0) { value = aNode.get(0).asText(); objAndtTypesMap.put(key, value); - * numRecordsFetched++; } } } } - */ - - } - - int totalRecordsRemainingToFetch = (totalRecordsAvailable - numRecordsFetched); - - int numRequiredAdditionalFetches = - (totalRecordsRemainingToFetch / this.numItemsToGetBulkRequest); - - /* - * Do an additional fetch for the remaining items (if needed) - */ - - if (totalRecordsRemainingToFetch % numItemsToGetBulkRequest != 0) { - numRequiredAdditionalFetches += 1; - } - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.SYNC_NUMBER_REQ_FETCHES, - String.valueOf(numRequiredAdditionalFetches)); - } - - - for (int x = 0; x < numRequiredAdditionalFetches; x++) { - - if (collectItemsFromScrollContext(scrollId, currentDocumentIds) != OperationState.OK) { - // abort the whole thing because now we can't reliably cleanup the orphans. - throw new IOException( - "Failed to collect pre-sync doc collection from index. Aborting operation"); - } - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.SYNC_NUMBER_TOTAL_FETCHES, - String.valueOf(currentDocumentIds.getSize()), - String.valueOf(totalRecordsAvailable)); - } - - } - - } - - } else { - // scroll context get failed, nothing else to do - LOG.error(AaiUiMsgs.ERROR_GENERIC, result.toString()); - } - - LOG.info(AaiUiMsgs.COLLECT_TOTAL_TIME, "all document Identifiers", - String.valueOf((System.currentTimeMillis() - opStartTimeInMs))); - - return currentDocumentIds; - - } - - /** - * Collect items from scroll context. - * - * @param scrollId the scroll id - * @param objectIds the object ids - * @return the operation state - * @throws IOException Signals that an I/O exception has occurred. - */ - private OperationState collectItemsFromScrollContext(String scrollId, - ObjectIdCollection objectIds) throws IOException { - - // ObjectIdCollection documentIdCollection = new ObjectIdCollection(); - - String requestPayload = - buildSubsequentScrollContextRequestPayload(scrollId, scrollContextTimeToLiveInMinutes); - - final String fullUrlStr = getFullUrl("/_search/scroll"); - - OperationResult opResult = - restDataProvider.doPost(fullUrlStr, requestPayload, "application/json"); - - if (opResult.getResultCode() >= 300) { - LOG.warn(AaiUiMsgs.ES_SCROLL_CONTEXT_ERROR, opResult.getResult()); - return OperationState.ERROR; - } - - JsonNode rootNode = parseElasticSearchResult(opResult.getResult()); - - /* - * Check the result for success / failure, and enumerate all the index ids that resulted in - * success, and ignore the ones that failed or log them so we have a record of the failure. - */ - - if (rootNode != null) { - boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out")); - final String tookStr = getFieldValue(rootNode, "took"); - int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr); - - JsonNode hitsNode = rootNode.get("hits"); - - if (timedOut) { - LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "Scroll Context", String.valueOf(tookInMs)); - } else { - LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "Scroll Context", String.valueOf(tookInMs)); - } - - /* - * Collect all object ids - */ - - ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits"); - String key = null; - String value = null; - JsonNode jsonNode = null; - - Iterator<JsonNode> nodeIterator = hitsArray.iterator(); - - while (nodeIterator.hasNext()) { - - jsonNode = nodeIterator.next(); - - key = getFieldValue(jsonNode, "_id"); - - if (key != null) { - objectIds.addObjectId(key); - - /* - * JsonNode fieldsNode = jNode.get("fields"); - * - * if (fieldsNode != null) { - * - * JsonNode entityTypeNode = fieldsNode.get("entityType"); - * - * if (entityTypeNode != null) { ArrayNode aNode = (ArrayNode) entityTypeNode; - * - * if (aNode.size() > 0) { value = aNode.get(0).asText(); objectIdsAndTypes.put(key, - * value); } } } } - */ - - } - - } - } else { - // scroll context get failed, nothing else to do - LOG.error(AaiUiMsgs.ERROR_GENERIC, opResult.toString()); - } - - return OperationState.OK; - } - - /** - * Gets the field value. - * - * @param node the node - * @param fieldName the field name - * @return the field value - */ - protected String getFieldValue(JsonNode node, String fieldName) { - - JsonNode field = node.get(fieldName); - - if (field != null) { - return field.asText(); - } - - return null; - - } - - /** - * Bulk delete. - * - * @param docIds the doc ids - * @return the operation result - * @throws IOException Signals that an I/O exception has occurred. - */ - public OperationResult bulkDelete(Collection<String> docIds) throws IOException { - - if (docIds == null || docIds.size() == 0) { - LOG.info(AaiUiMsgs.ES_BULK_DELETE_SKIP); - return new OperationResult(500, - "Skipping bulkDelete(); operation because docs to delete list is empty"); - } - - LOG.info(AaiUiMsgs.ES_BULK_DELETE_START, String.valueOf(docIds.size())); - - StringBuilder sb = new StringBuilder(128); - - for (String id : docIds) { - sb.append( - String.format(BULK_OP_LINE_TEMPLATE, buildDeleteDataObject(indexName, indexType, id))); - } - - sb.append("\n"); - - final String fullUrlStr = getFullUrl("/_bulk"); - - return restDataProvider.doPost(fullUrlStr, sb.toString(), "application/x-www-form-urlencoded"); - - } - - /* - - */ - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/GeoSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/GeoSynchronizer.java deleted file mode 100644 index 493f3c9..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/GeoSynchronizer.java +++ /dev/null @@ -1,466 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Deque; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.function.Supplier; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.cl.mdc.MdcContext; -import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.openecomp.sparky.dal.rest.HttpMethod; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.inventory.entity.GeoIndexDocument; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.synchronizer.entity.SelfLinkDescriptor; -import org.openecomp.sparky.synchronizer.enumeration.OperationState; -import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; -import org.openecomp.sparky.synchronizer.task.PerformActiveInventoryRetrieval; -import org.openecomp.sparky.synchronizer.task.StoreDocumentTask; -import org.openecomp.sparky.util.NodeUtils; -import org.slf4j.MDC; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; - - -/** - * The Class GeoSynchronizer. - */ -public class GeoSynchronizer extends AbstractEntitySynchronizer implements IndexSynchronizer { - - private static final Logger LOG = LoggerFactory.getInstance().getLogger(GeoSynchronizer.class); - - private boolean allWorkEnumerated; - private Deque<SelfLinkDescriptor> selflinks; - - private ElasticSearchConfig elasticConfig = null; - private Map<String, OxmEntityDescriptor> geoDescriptorMap = null; - - /** - * Instantiates a new geo synchronizer. - * - * @param indexName the index name - * @throws Exception the exception - */ - public GeoSynchronizer(String indexName) throws Exception { - - super(LOG, "GEO", 2, 5, 5, indexName); - this.allWorkEnumerated = false; - this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); - this.synchronizerName = "Geo Synchronizer"; - this.geoDescriptorMap = oxmModelLoader.getGeoEntityDescriptors(); - this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors(geoDescriptorMap); - this.esEntityStats.initializeCountersFromOxmEntityDescriptors(geoDescriptorMap); - this.syncDurationInMs = -1; - } - - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() - */ - @Override - public OperationState doSync() { - resetCounters(); - allWorkEnumerated = false; - syncStartedTimeStampInMs = System.currentTimeMillis(); - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "GeoSynchronizer", "", "Sync", ""); - - collectAllTheWork(); - return OperationState.OK; - } - - - /** - * Collect all the work. - * - * @return the operation state - */ - public OperationState collectAllTheWork() { - final Map<String,String> contextMap = MDC.getCopyOfContextMap(); - if (elasticConfig == null) { - try { - elasticConfig = ElasticSearchConfig.getConfig(); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.CONFIGURATION_ERROR, "Search"); - } - } - - if (geoDescriptorMap.isEmpty()) { - setShouldSkipSync(true); - LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "geo entities"); - return OperationState.ERROR; - } - - Collection<String> syncTypes = geoDescriptorMap.keySet(); - - try { - - /* - * launch a parallel async thread to process the documents for each entity-type (to max the of - * the configured executor anyway) - */ - - aaiWorkOnHand.set(syncTypes.size()); - - for (String key : syncTypes) { - - supplyAsync(new Supplier<Void>() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - OperationResult typeLinksResult = null; - try { - typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); - aaiWorkOnHand.decrementAndGet(); - processEntityTypeSelfLinks(typeLinksResult); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc); - } - - return null; - } - - }, aaiExecutor).whenComplete((result, error) -> { - - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); - } - }); - - } - - while (aaiWorkOnHand.get() != 0) { - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); - } - - Thread.sleep(1000); - } - - aaiWorkOnHand.set(selflinks.size()); - allWorkEnumerated = true; - syncEntityTypes(); - - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc); - } - return OperationState.OK; - } - - /** - * Sync entity types. - */ - private void syncEntityTypes() { - - while (selflinks.peek() != null) { - - SelfLinkDescriptor linkDescriptor = selflinks.poll(); - aaiWorkOnHand.decrementAndGet(); - - OxmEntityDescriptor descriptor = null; - - if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); - // go to next element in iterator - continue; - } - - NetworkTransaction txn = new NetworkTransaction(); - txn.setDescriptor(descriptor); - txn.setLink(linkDescriptor.getSelfLink()); - txn.setOperationType(HttpMethod.GET); - txn.setEntityType(linkDescriptor.getEntityType()); - - aaiWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) - .whenComplete((result, error) -> { - - aaiWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); - } else { - if (result == null) { - LOG.error(AaiUiMsgs.SELF_LINK_GET_NO_RESPONSE, linkDescriptor.getSelfLink()); - } else { - processEntityTypeSelfLinkResult(result); - } - } - }); - } - } - } - - /** - * Process entity type self links. - * - * @param operationResult the operation result - */ - private void processEntityTypeSelfLinks(OperationResult operationResult) { - - JsonNode rootNode = null; - - final String jsonResult = operationResult.getResult(); - - if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { - - try { - rootNode = mapper.readTree(jsonResult); - } catch (IOException exc) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, exc); - return; - } - - JsonNode resultData = rootNode.get("result-data"); - ArrayNode resultDataArrayNode = null; - - if (resultData.isArray()) { - resultDataArrayNode = (ArrayNode) resultData; - - Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); - JsonNode element = null; - - while (elementIterator.hasNext()) { - element = elementIterator.next(); - - final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); - final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); - - if (resourceType != null && resourceLink != null) { - - if (geoDescriptorMap.containsKey(resourceType)) { - selflinks.add(new SelfLinkDescriptor(resourceLink + "?nodes-only", resourceType)); - } else { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); - // go to next element in iterator - continue; - } - - } - } - } - } - - } - - /** - * Process entity type self link result. - * - * @param txn the txn - */ - private void processEntityTypeSelfLinkResult(NetworkTransaction txn) { - - updateActiveInventoryCounters(txn); - - if (!txn.getOperationResult().wasSuccessful()) { - return; - } - - try { - if (!(txn.getDescriptor().getGeoLatName().isEmpty() - && txn.getDescriptor().getGeoLongName().isEmpty())) { - - GeoIndexDocument geoDoc = new GeoIndexDocument(oxmModelLoader); - - final String jsonResult = txn.getOperationResult().getResult(); - - if (jsonResult != null && jsonResult.length() > 0) { - - populateGeoDocument(geoDoc, jsonResult, txn.getDescriptor(), txn.getLink()); - - if (!geoDoc.isValidGeoDocument()) { - - LOG.info(AaiUiMsgs.GEO_SYNC_IGNORING_ENTITY, geoDoc.getEntityType(), geoDoc.toString()); - - } else { - - String link = null; - try { - link = getElasticFullUrl("/" + geoDoc.getId(), getIndexName(), "default"); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc); - } - - if (link != null) { - - NetworkTransaction n2 = new NetworkTransaction(); - n2.setLink(link); - n2.setEntityType(txn.getEntityType()); - n2.setDescriptor(txn.getDescriptor()); - n2.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - - supplyAsync(new StoreDocumentTask(geoDoc, n2, esDataProvider), esExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_STORE_FAILURE, error.getMessage()); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result); - } - }); - } - } - } - } - } catch (JsonProcessingException exc) { - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc); - } catch (IOException exc) { - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc); - } - - return; - } - - - /** - * Process store document result. - * - * @param txn the txn - */ - private void processStoreDocumentResult(NetworkTransaction txn) { - - OperationResult or = txn.getOperationResult(); - - if (!or.wasSuccessful()) { - LOG.error(AaiUiMsgs.ES_STORE_FAILURE, or.toString()); - /* - * if(or.getResultCode() != 404 || (or.getResultCode() == 404 && - * !synchronizerConfig.isResourceNotFoundErrorsSupressed())) { logger.error( - * "Skipping failed resource = " + "link" + " RC=[" + or.getResultCode() + "]. Message: " + - * or.getResult()); } - */ - - } - - } - - - @Override - public SynchronizerState getState() { - - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) - */ - @Override - public String getStatReport(boolean showFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return this.getStatReport(syncDurationInMs, showFinalReport); - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() - */ - @Override - public void shutdown() { - this.shutdownExecutors(); - } - - /** - * Populate geo document. - * - * @param doc the doc - * @param result the result - * @param resultDescriptor the result descriptor - * @param entityLink the entity link - * @throws JsonProcessingException the json processing exception - * @throws IOException Signals that an I/O exception has occurred. - */ - protected void populateGeoDocument(GeoIndexDocument doc, String result, - OxmEntityDescriptor resultDescriptor, String entityLink) - throws JsonProcessingException, IOException { - - doc.setSelfLink(entityLink); - doc.setEntityType(resultDescriptor.getEntityName()); - - JsonNode entityNode = mapper.readTree(result); - - List<String> primaryKeyValues = new ArrayList<String>(); - String pkeyValue = null; - - for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { - pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); - if (pkeyValue != null) { - primaryKeyValues.add(pkeyValue); - } else { - LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName()); - } - } - - final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); - doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue); - String geoLatKey = resultDescriptor.getGeoLatName(); - String geoLongKey = resultDescriptor.getGeoLongName(); - - doc.setLatitude(NodeUtils.getNodeFieldAsText(entityNode, geoLatKey)); - doc.setLongitude(NodeUtils.getNodeFieldAsText(entityNode, geoLongKey)); - doc.deriveFields(); - - } - - @Override - protected boolean isSyncDone() { - int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); - - if (totalWorkOnHand > 0 || !allWorkEnumerated) { - return false; - } - - return true; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/HistoricalEntitySummarizer.java b/src/main/java/org/openecomp/sparky/synchronizer/HistoricalEntitySummarizer.java deleted file mode 100644 index 5c97ef9..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/HistoricalEntitySummarizer.java +++ /dev/null @@ -1,368 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.io.IOException; -import java.sql.Timestamp; -import java.text.SimpleDateFormat; -import java.util.Collection; -import java.util.EnumSet; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Supplier; - -import javax.json.Json; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.cl.mdc.MdcContext; -import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; -import org.openecomp.sparky.dal.rest.HttpMethod; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.synchronizer.enumeration.OperationState; -import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; -import org.openecomp.sparky.util.NodeUtils; -import org.slf4j.MDC; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; - -/** - * The Class HistoricalEntitySummarizer. - */ -public class HistoricalEntitySummarizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - private static final Logger LOG = LoggerFactory.getInstance().getLogger(HistoricalEntitySummarizer.class); - private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; - - private boolean allWorkEnumerated; - private ConcurrentHashMap<String, AtomicInteger> entityCounters; - private boolean syncInProgress; - private Map<String, String> contextMap; - - /** - * Instantiates a new historical entity summarizer. - * - * @param indexName the index name - * @throws Exception the exception - */ - public HistoricalEntitySummarizer(String indexName) throws Exception { - super(LOG, "HES", 2, 5, 5, indexName); - - this.allWorkEnumerated = false; - this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>(); - this.synchronizerName = "Historical Entity Summarizer"; - this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); - this.syncInProgress = false; - this.contextMap = MDC.getCopyOfContextMap(); - this.syncDurationInMs = -1; - } - - /** - * Collect all the work. - * - * @return the operation state - */ - private OperationState collectAllTheWork() { - - Map<String, OxmEntityDescriptor> descriptorMap = - oxmModelLoader.getSearchableEntityDescriptors(); - - if (descriptorMap.isEmpty()) { - LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "historical entities"); - - return OperationState.ERROR; - } - - Collection<String> entityTypes = descriptorMap.keySet(); - - AtomicInteger asyncWoH = new AtomicInteger(0); - - asyncWoH.set(entityTypes.size()); - - try { - for (String entityType : entityTypes) { - - supplyAsync(new Supplier<Void>() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - try { - OperationResult typeLinksResult = - aaiDataProvider.getSelfLinksByEntityType(entityType); - updateActiveInventoryCounters(HttpMethod.GET, entityType, typeLinksResult); - processEntityTypeSelfLinks(entityType, typeLinksResult); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc.getMessage()); - - } - - return null; - } - - }, aaiExecutor).whenComplete((result, error) -> { - - asyncWoH.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, error.getMessage()); - } - - }); - - } - - - while (asyncWoH.get() > 0) { - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + " summarizer waiting for all the links to be processed."); - } - - Thread.sleep(250); - } - - esWorkOnHand.set(entityCounters.size()); - - // start doing the real work - allWorkEnumerated = true; - - insertEntityTypeCounters(); - - if (LOG.isDebugEnabled()) { - - StringBuilder sb = new StringBuilder(128); - - sb.append("\n\nHistorical Entity Counters:"); - - for (Entry<String, AtomicInteger> entry : entityCounters.entrySet()) { - sb.append("\n").append(entry.getKey()).append(" = ").append(entry.getValue().get()); - } - - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, sb.toString()); - - } - - } catch (Exception exc) { - LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, exc.getMessage()); - - - esWorkOnHand.set(0); - allWorkEnumerated = true; - - return OperationState.ERROR; - } - - return OperationState.OK; - - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() - */ - @Override - public OperationState doSync() { - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "HistoricalEntitySynchronizer", "", "Sync", ""); - - if (syncInProgress) { - LOG.info(AaiUiMsgs.HISTORICAL_SYNC_PENDING); - return OperationState.PENDING; - } - - clearCache(); - - syncInProgress = true; - this.syncStartedTimeStampInMs = System.currentTimeMillis(); - allWorkEnumerated = false; - - return collectAllTheWork(); - } - - /** - * Process entity type self links. - * - * @param entityType the entity type - * @param operationResult the operation result - */ - private void processEntityTypeSelfLinks(String entityType, OperationResult operationResult) { - - JsonNode rootNode = null; - - final String jsonResult = operationResult.getResult(); - - if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { - - try { - rootNode = mapper.readTree(jsonResult); - } catch (IOException exc) { - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc.getMessage()); - return; - } - - JsonNode resultData = rootNode.get("result-data"); - ArrayNode resultDataArrayNode = null; - - if (resultData != null && resultData.isArray()) { - resultDataArrayNode = (ArrayNode) resultData; - entityCounters.put(entityType, new AtomicInteger(resultDataArrayNode.size())); - } - } - - } - - /** - * Insert entity type counters. - */ - private void insertEntityTypeCounters() { - - if (esWorkOnHand.get() <= 0) { - return; - } - - SimpleDateFormat dateFormat = new SimpleDateFormat(INSERTION_DATE_TIME_FORMAT); - Timestamp timestamp = new Timestamp(System.currentTimeMillis()); - String currentFormattedTimeStamp = dateFormat.format(timestamp); - - Set<Entry<String, AtomicInteger>> entityCounterEntries = entityCounters.entrySet(); - - for (Entry<String, AtomicInteger> entityCounterEntry : entityCounterEntries) { - - supplyAsync(new Supplier<Void>() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - String jsonString = Json.createObjectBuilder().add( - "count", entityCounterEntry.getValue().get()) - .add("entityType", entityCounterEntry.getKey()) - .add("timestamp", currentFormattedTimeStamp).build().toString(); - - String link = null; - try { - link = getElasticFullUrl("", indexName); - OperationResult or = esDataProvider.doPost(link, jsonString, "application/json"); - updateElasticSearchCounters(HttpMethod.POST, entityCounterEntry.getKey(), or); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_STORE_FAILURE, exc.getMessage() ); - } - - return null; - } - - }, esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - }); - - } - - while (esWorkOnHand.get() > 0) { - - try { - Thread.sleep(500); - } catch (InterruptedException exc) { - LOG.error(AaiUiMsgs.INTERRUPTED, "historical Entities", exc.getMessage()); - } - } - - } - - @Override - public SynchronizerState getState() { - - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) - */ - @Override - public String getStatReport(boolean showFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return this.getStatReport(syncDurationInMs, showFinalReport); - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() - */ - @Override - public void shutdown() { - this.shutdownExecutors(); - } - - @Override - protected boolean isSyncDone() { - - int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC,indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand - + " all work enumerated = " + allWorkEnumerated); - } - - if (totalWorkOnHand > 0 || !allWorkEnumerated) { - return false; - } - - this.syncInProgress = false; - - return true; - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() - */ - @Override - public void clearCache() { - - if (syncInProgress) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "Historical Entity Summarizer in progress, request to clear cache ignored"); - return; - } - - super.clearCache(); - this.resetCounters(); - if (entityCounters != null) { - entityCounters.clear(); - } - - allWorkEnumerated = false; - - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/IndexCleaner.java b/src/main/java/org/openecomp/sparky/synchronizer/IndexCleaner.java deleted file mode 100644 index f0fcbb3..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/IndexCleaner.java +++ /dev/null @@ -1,55 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import org.openecomp.sparky.synchronizer.enumeration.OperationState; - -/** - * The Interface IndexCleaner. - */ -public interface IndexCleaner { - - /** - * Populate pre operation collection. - * - * @return the operation state - */ - public OperationState populatePreOperationCollection(); - - /** - * Populate post operation collection. - * - * @return the operation state - */ - public OperationState populatePostOperationCollection(); - - /** - * Perform cleanup. - * - * @return the operation state - */ - public OperationState performCleanup(); - - public String getIndexName(); - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/IndexIntegrityValidator.java b/src/main/java/org/openecomp/sparky/synchronizer/IndexIntegrityValidator.java deleted file mode 100644 index a3c8c83..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/IndexIntegrityValidator.java +++ /dev/null @@ -1,162 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.dal.rest.RestDataProvider; -import org.openecomp.sparky.logging.AaiUiMsgs; - -/** - * The Class IndexIntegrityValidator. - * - * @author davea. - */ -public class IndexIntegrityValidator implements IndexValidator { - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(IndexIntegrityValidator.class); - - private String host; - private String port; - private String indexName; - private String indexType; - private String tableConfigJson; - - private final RestDataProvider restDataProvider; - - /** - * Instantiates a new index integrity validator. - * - * @param restDataProvider the rest data provider - * @param indexName the index name - * @param indexType the index type - * @param host the host - * @param port the port - * @param tableConfigJson the table config json - */ - public IndexIntegrityValidator(RestDataProvider restDataProvider, String indexName, - String indexType, String host, String port, String tableConfigJson) { - this.restDataProvider = restDataProvider; - this.host = host; - this.port = port; - this.indexName = indexName; - this.indexType = indexType; - this.tableConfigJson = tableConfigJson; - } - - @Override - public String getIndexName() { - return indexName; - } - - public void setIndexName(String indexName) { - this.indexName = indexName; - } - - public String getIndexType() { - return indexType; - } - - public void setIndexType(String indexType) { - this.indexType = indexType; - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexValidator#exists() - */ - @Override - public boolean exists() { - final String fullUrlStr = getFullUrl("/" + indexName + "/"); - OperationResult existsResult = restDataProvider.doHead(fullUrlStr, "application/json"); - - int rc = existsResult.getResultCode(); - - if (rc >= 200 && rc < 300) { - LOG.info(AaiUiMsgs.INDEX_EXISTS, indexName); - return true; - } else { - LOG.info(AaiUiMsgs.INDEX_NOT_EXIST, indexName); - return false; - } - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexValidator#integrityValid() - */ - @Override - public boolean integrityValid() { - // TODO Auto-generated method stub - // logger.info("; - // System.out.println("IndexIntegrityValidator.integrityValid() for - // indexName = " + indexName); - return true; - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexValidator#createOrRepair() - */ - @Override - public void createOrRepair() { - // TODO Auto-generated method stub - String message = "IndexIntegrityValidator.createOrRepair() for indexName = " + indexName; - LOG.info(AaiUiMsgs.INFO_GENERIC, message); - - final String fullUrlStr = getFullUrl("/" + indexName + "/"); - OperationResult createResult = - restDataProvider.doPut(fullUrlStr, tableConfigJson, "application/json"); - - int rc = createResult.getResultCode(); - - if (rc >= 200 && rc < 300) { - LOG.info(AaiUiMsgs.INDEX_RECREATED, indexName); - } else if (rc == 400) { - LOG.info(AaiUiMsgs.INDEX_ALREADY_EXISTS, indexName); - } else { - LOG.warn(AaiUiMsgs.INDEX_INTEGRITY_CHECK_FAILED, indexName, createResult.getResult()); - } - - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexValidator#destroyIndex() - */ - @Override - public void destroyIndex() { - // TODO Auto-generated method stub - // we don't do this for now - - } - - /** - * Gets the full url. - * - * @param resourceUrl the resource url - * @return the full url - */ - private String getFullUrl(String resourceUrl) { - return String.format("http://%s:%s%s", host, port, resourceUrl); - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/IndexSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/IndexSynchronizer.java deleted file mode 100644 index 2ebfc3c..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/IndexSynchronizer.java +++ /dev/null @@ -1,65 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import org.openecomp.sparky.synchronizer.enumeration.OperationState; -import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; - -/** - * The Interface IndexSynchronizer. - * - * @author davea. - */ -public interface IndexSynchronizer { - - /** - * Do sync. - * - * @return the operation state - */ - public OperationState doSync(); - - public SynchronizerState getState(); - - /** - * Gets the stat report. - * - * @param finalReport the final report - * @return the stat report - */ - public String getStatReport(boolean finalReport); - - /** - * Shutdown. - */ - public void shutdown(); - - public String getIndexName(); - - /** - * Clear cache. - */ - public void clearCache(); - - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/IndexValidator.java b/src/main/java/org/openecomp/sparky/synchronizer/IndexValidator.java deleted file mode 100644 index c0dbfb0..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/IndexValidator.java +++ /dev/null @@ -1,56 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -/** - * The Interface IndexValidator. - */ -public interface IndexValidator { - - /** - * Exists. - * - * @return true, if successful - */ - public boolean exists(); - - /** - * Integrity valid. - * - * @return true, if successful - */ - public boolean integrityValid(); - - /** - * Creates the or repair. - */ - public void createOrRepair(); - - /** - * Destroy index. - */ - public void destroyIndex(); - - public String getIndexName(); - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/MyErrorHandler.java b/src/main/java/org/openecomp/sparky/synchronizer/MyErrorHandler.java deleted file mode 100644 index 8f59651..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/MyErrorHandler.java +++ /dev/null @@ -1,91 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import java.io.PrintWriter; - -import org.xml.sax.ErrorHandler; -import org.xml.sax.SAXException; -import org.xml.sax.SAXParseException; - -/** - * The Class MyErrorHandler. - */ -public class MyErrorHandler implements ErrorHandler { - - /** Error handler output goes here. */ - private PrintWriter out; - - /** - * Instantiates a new my error handler. - * - * @param out the out - */ - public MyErrorHandler(PrintWriter out) { - this.out = out; - } - - /** - * Returns a string describing parse exception details. - * - * @param spe the spe - * @return the parses the exception info - */ - private String getParseExceptionInfo(SAXParseException spe) { - String systemId = spe.getSystemId(); - if (systemId == null) { - systemId = "null"; - } - String info = "URI=" + systemId + " Line=" + spe.getLineNumber() + ": " + spe.getMessage(); - return info; - } - - // The following methods are standard SAX ErrorHandler methods. - // See SAX documentation for more info. - - /* (non-Javadoc) - * @see org.xml.sax.ErrorHandler#warning(org.xml.sax.SAXParseException) - */ - @Override - public void warning(SAXParseException spe) throws SAXException { - out.println("Warning: " + getParseExceptionInfo(spe)); - } - - /* (non-Javadoc) - * @see org.xml.sax.ErrorHandler#error(org.xml.sax.SAXParseException) - */ - @Override - public void error(SAXParseException spe) throws SAXException { - String message = "Error: " + getParseExceptionInfo(spe); - throw new SAXException(message); - } - - /* (non-Javadoc) - * @see org.xml.sax.ErrorHandler#fatalError(org.xml.sax.SAXParseException) - */ - @Override - public void fatalError(SAXParseException spe) throws SAXException { - String message = "Fatal Error: " + getParseExceptionInfo(spe); - throw new SAXException(message); - } -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/SearchableEntitySynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/SearchableEntitySynchronizer.java deleted file mode 100644 index 0097786..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/SearchableEntitySynchronizer.java +++ /dev/null @@ -1,760 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import org.openecomp.cl.mdc.MdcContext; - -import org.openecomp.cl.mdc.MdcContext; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectReader; -import com.fasterxml.jackson.databind.node.ArrayNode; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Deque; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.ExecutorService; -import java.util.function.Supplier; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; -import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.openecomp.sparky.dal.rest.HttpMethod; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.synchronizer.config.SynchronizerConfiguration; -import org.openecomp.sparky.synchronizer.entity.MergableEntity; -import org.openecomp.sparky.synchronizer.entity.SearchableEntity; -import org.openecomp.sparky.synchronizer.entity.SelfLinkDescriptor; -import org.openecomp.sparky.synchronizer.enumeration.OperationState; -import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; -import org.openecomp.sparky.synchronizer.task.PerformActiveInventoryRetrieval; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchPut; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchRetrieval; -import org.openecomp.sparky.synchronizer.task.PerformElasticSearchUpdate; -import org.openecomp.sparky.util.NodeUtils; -import org.slf4j.MDC; - -/** - * The Class SearchableEntitySynchronizer. - */ -public class SearchableEntitySynchronizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - /** - * The Class RetrySearchableEntitySyncContainer. - */ - private class RetrySearchableEntitySyncContainer { - NetworkTransaction txn; - SearchableEntity se; - - /** - * Instantiates a new retry searchable entity sync container. - * - * @param txn the txn - * @param se the se - */ - public RetrySearchableEntitySyncContainer(NetworkTransaction txn, SearchableEntity se) { - this.txn = txn; - this.se = se; - } - - public NetworkTransaction getNetworkTransaction() { - return txn; - } - - public SearchableEntity getSearchableEntity() { - return se; - } - } - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(SearchableEntitySynchronizer.class); - - private boolean allWorkEnumerated; - private Deque<SelfLinkDescriptor> selflinks; - private Deque<RetrySearchableEntitySyncContainer> retryQueue; - private Map<String, Integer> retryLimitTracker; - protected ExecutorService esPutExecutor; - - /** - * Instantiates a new searchable entity synchronizer. - * - * @param indexName the index name - * @throws Exception the exception - */ - public SearchableEntitySynchronizer(String indexName) throws Exception { - super(LOG, "SES", 2, 5, 5, indexName); - this.allWorkEnumerated = false; - this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); - this.retryQueue = new ConcurrentLinkedDeque<RetrySearchableEntitySyncContainer>(); - this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); - this.synchronizerName = "Searchable Entity Synchronizer"; - this.esPutExecutor = NodeUtils.createNamedExecutor("SES-ES-PUT", 5, LOG); - this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors( - oxmModelLoader.getSearchableEntityDescriptors()); - this.esEntityStats.initializeCountersFromOxmEntityDescriptors( - oxmModelLoader.getSearchableEntityDescriptors()); - this.syncDurationInMs = -1; - } - - /** - * Collect all the work. - * - * @return the operation state - */ - private OperationState collectAllTheWork() { - final Map<String, String> contextMap = MDC.getCopyOfContextMap(); - Map<String, OxmEntityDescriptor> descriptorMap = - oxmModelLoader.getSearchableEntityDescriptors(); - - if (descriptorMap.isEmpty()) { - LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); - LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); - return OperationState.ERROR; - } - - Collection<String> syncTypes = descriptorMap.keySet(); - - /*Collection<String> syncTypes = new ArrayList<String>(); - syncTypes.add("service-instance");*/ - - try { - - /* - * launch a parallel async thread to process the documents for each entity-type (to max the - * of the configured executor anyway) - */ - - aaiWorkOnHand.set(syncTypes.size()); - - for (String key : syncTypes) { - - supplyAsync(new Supplier<Void>() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - OperationResult typeLinksResult = null; - try { - typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); - aaiWorkOnHand.decrementAndGet(); - processEntityTypeSelfLinks(typeLinksResult); - } catch (Exception exc) { - // TODO -> LOG, what should be logged here? - } - - return null; - } - - }, aaiExecutor).whenComplete((result, error) -> { - - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "An error occurred getting data from AAI. Error = " + error.getMessage()); - } - }); - - } - - while (aaiWorkOnHand.get() != 0) { - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); - } - - Thread.sleep(1000); - } - - aaiWorkOnHand.set(selflinks.size()); - allWorkEnumerated = true; - syncEntityTypes(); - - while (!isSyncDone()) { - performRetrySync(); - Thread.sleep(1000); - } - - /* - * Make sure we don't hang on to retries that failed which could cause issues during future - * syncs - */ - retryLimitTracker.clear(); - - } catch (Exception exc) { - // TODO -> LOG, waht should be logged here? - } - - return OperationState.OK; - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() - */ - @Override - public OperationState doSync() { - this.syncDurationInMs = -1; - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "SearchableEntitySynchronizer", "", "Sync", ""); - - resetCounters(); - this.allWorkEnumerated = false; - syncStartedTimeStampInMs = System.currentTimeMillis(); - collectAllTheWork(); - - return OperationState.OK; - } - - /** - * Process entity type self links. - * - * @param operationResult the operation result - */ - private void processEntityTypeSelfLinks(OperationResult operationResult) { - - JsonNode rootNode = null; - - final String jsonResult = operationResult.getResult(); - - if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { - - try { - rootNode = mapper.readTree(jsonResult); - } catch (IOException exc) { - String message = - "Could not deserialize JSON (representing operation result) as node tree. " + - "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); - } - - JsonNode resultData = rootNode.get("result-data"); - ArrayNode resultDataArrayNode = null; - - if (resultData.isArray()) { - resultDataArrayNode = (ArrayNode) resultData; - - Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); - JsonNode element = null; - - while (elementIterator.hasNext()) { - element = elementIterator.next(); - - final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); - final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); - - OxmEntityDescriptor descriptor = null; - - if (resourceType != null && resourceLink != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(resourceType); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); - // go to next element in iterator - continue; - } - - if (descriptor.hasSearchableAttributes()) { - selflinks.add(new SelfLinkDescriptor(resourceLink, SynchronizerConfiguration.NODES_ONLY_MODIFIER, resourceType)); - } - - } - } - } - } - - } - - /** - * Sync entity types. - */ - private void syncEntityTypes() { - - while (selflinks.peek() != null) { - - SelfLinkDescriptor linkDescriptor = selflinks.poll(); - aaiWorkOnHand.decrementAndGet(); - - OxmEntityDescriptor descriptor = null; - - if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); - // go to next element in iterator - continue; - } - - NetworkTransaction txn = new NetworkTransaction(); - txn.setDescriptor(descriptor); - txn.setLink(linkDescriptor.getSelfLink()); - txn.setOperationType(HttpMethod.GET); - txn.setEntityType(linkDescriptor.getEntityType()); - - aaiWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) - .whenComplete((result, error) -> { - - aaiWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); - } else { - if (result == null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, - linkDescriptor.getSelfLink()); - } else { - updateActiveInventoryCounters(result); - fetchDocumentForUpsert(result); - } - } - }); - } - - } - - } - - /** - * Perform document upsert. - * - * @param esGetTxn the es get txn - * @param se the se - */ - protected void performDocumentUpsert(NetworkTransaction esGetTxn, SearchableEntity se) { - /** - * <p> - * <ul> - * As part of the response processing we need to do the following: - * <li>1. Extract the version (if present), it will be the ETAG when we use the - * Search-Abstraction-Service - * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version - * tag - * <li>a) if version is null or RC=404, then standard put, no _update with version tag - * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic - * </ul> - * </p> - */ - String link = null; - try { - link = getElasticFullUrl("/" + se.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); - return; - } - - String versionNumber = null; - boolean wasEntryDiscovered = false; - if (esGetTxn.getOperationResult().getResultCode() == 404) { - LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, se.getEntityPrimaryKeyValue()); - } else if (esGetTxn.getOperationResult().getResultCode() == 200) { - wasEntryDiscovered = true; - try { - versionNumber = NodeUtils.extractFieldValueFromObject( - NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), - "_version"); - } catch (IOException exc) { - String message = - "Error extracting version number from response, aborting searchable entity sync of " - + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); - return; - } - } else { - /* - * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we - * return. - */ - LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, - String.valueOf(esGetTxn.getOperationResult().getResultCode())); - return; - } - - try { - String jsonPayload = null; - if (wasEntryDiscovered) { - try { - ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>(); - NodeUtils.extractObjectsByKey( - NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), - "_source", sourceObject); - - if (!sourceObject.isEmpty()) { - String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); - MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); - ObjectReader updater = mapper.readerForUpdating(me); - MergableEntity merged = updater.readValue(se.getIndexDocumentJson()); - jsonPayload = mapper.writeValueAsString(merged); - } - } catch (IOException exc) { - String message = - "Error extracting source value from response, aborting searchable entity sync of " - + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); - return; - } - } else { - jsonPayload = se.getIndexDocumentJson(); - } - - if (wasEntryDiscovered) { - if (versionNumber != null && jsonPayload != null) { - - String requestPayload = esDataProvider.buildBulkImportOperationRequest(getIndexName(), - ElasticSearchConfig.getConfig().getType(), se.getId(), versionNumber, jsonPayload); - - NetworkTransaction transactionTracker = new NetworkTransaction(); - transactionTracker.setEntityType(esGetTxn.getEntityType()); - transactionTracker.setDescriptor(esGetTxn.getDescriptor()); - transactionTracker.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), - requestPayload, esDataProvider, transactionTracker), esPutExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = "Searchable entity sync UPDATE PUT error - " - + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetTxn, se); - } - }); - } - - } else { - if (link != null && jsonPayload != null) { - - NetworkTransaction updateElasticTxn = new NetworkTransaction(); - updateElasticTxn.setLink(link); - updateElasticTxn.setEntityType(esGetTxn.getEntityType()); - updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); - updateElasticTxn.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), - esPutExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = - "Searchable entity sync UPDATE PUT error - " + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetTxn, se); - } - }); - } - } - } catch (Exception exc) { - String message = "Exception caught during searchable entity sync PUT operation. Message - " - + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } - } - - /** - * Populate searchable entity document. - * - * @param doc the doc - * @param result the result - * @param resultDescriptor the result descriptor - * @throws JsonProcessingException the json processing exception - * @throws IOException Signals that an I/O exception has occurred. - */ - protected void populateSearchableEntityDocument(SearchableEntity doc, String result, - OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { - - doc.setEntityType(resultDescriptor.getEntityName()); - - JsonNode entityNode = mapper.readTree(result); - - List<String> primaryKeyValues = new ArrayList<String>(); - String pkeyValue = null; - - for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { - pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); - if (pkeyValue != null) { - primaryKeyValues.add(pkeyValue); - } else { - String message = "populateSearchableEntityDocument(), pKeyValue is null for entityType = " - + resultDescriptor.getEntityName(); - LOG.warn(AaiUiMsgs.WARN_GENERIC, message); - } - } - - final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); - doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue); - - final List<String> searchTagFields = resultDescriptor.getSearchableAttributes(); - - /* - * Based on configuration, use the configured field names for this entity-Type to build a - * multi-value collection of search tags for elastic search entity search criteria. - */ - for (String searchTagField : searchTagFields) { - String searchTagValue = NodeUtils.getNodeFieldAsText(entityNode, searchTagField); - if (searchTagValue != null && !searchTagValue.isEmpty()) { - doc.addSearchTagWithKey(searchTagValue, searchTagField); - } - } - } - - /** - * Fetch document for upsert. - * - * @param txn the txn - */ - private void fetchDocumentForUpsert(NetworkTransaction txn) { - if (!txn.getOperationResult().wasSuccessful()) { - String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - return; - } - - try { - if (txn.getDescriptor().hasSearchableAttributes()) { - - final String jsonResult = txn.getOperationResult().getResult(); - if (jsonResult != null && jsonResult.length() > 0) { - - SearchableEntity se = new SearchableEntity(oxmModelLoader); - se.setLink(ActiveInventoryConfig.extractResourcePath(txn.getLink())); - populateSearchableEntityDocument(se, jsonResult, txn.getDescriptor()); - se.deriveFields(); - - String link = null; - try { - link = getElasticFullUrl("/" + se.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction n2 = new NetworkTransaction(); - n2.setLink(link); - n2.setEntityType(txn.getEntityType()); - n2.setDescriptor(txn.getDescriptor()); - n2.setOperationType(HttpMethod.GET); - - esWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), esExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, se); - } - }); - } - } - - } - } catch (JsonProcessingException exc) { - // TODO -> LOG, waht should be logged here? - } catch (IOException exc) { - // TODO -> LOG, waht should be logged here? - } - } - - /** - * Process store document result. - * - * @param esPutResult the es put result - * @param esGetResult the es get result - * @param se the se - */ - private void processStoreDocumentResult(NetworkTransaction esPutResult, - NetworkTransaction esGetResult, SearchableEntity se) { - - OperationResult or = esPutResult.getOperationResult(); - - if (!or.wasSuccessful()) { - if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { - - if (shouldAllowRetry(se.getId())) { - esWorkOnHand.incrementAndGet(); - - RetrySearchableEntitySyncContainer rsc = - new RetrySearchableEntitySyncContainer(esGetResult, se); - retryQueue.push(rsc); - - String message = "Store document failed during searchable entity synchronization" - + " due to version conflict. Entity will be re-synced."; - LOG.warn(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } - } else { - String message = - "Store document failed during searchable entity synchronization with result code " - + or.getResultCode() + " and result message " + or.getResult(); - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } - } - } - - /** - * Perform retry sync. - */ - private void performRetrySync() { - while (retryQueue.peek() != null) { - - RetrySearchableEntitySyncContainer rsc = retryQueue.poll(); - if (rsc != null) { - - SearchableEntity se = rsc.getSearchableEntity(); - NetworkTransaction txn = rsc.getNetworkTransaction(); - - String link = null; - try { - /* - * In this retry flow the se object has already derived its fields - */ - link = getElasticFullUrl("/" + se.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction retryTransaction = new NetworkTransaction(); - retryTransaction.setLink(link); - retryTransaction.setEntityType(txn.getEntityType()); - retryTransaction.setDescriptor(txn.getDescriptor()); - retryTransaction.setOperationType(HttpMethod.GET); - - /* - * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already - * called incrementAndGet when queuing the failed PUT! - */ - - supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), - esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, se); - } - }); - } - - } - } - } - - /** - * Should allow retry. - * - * @param id the id - * @return true, if successful - */ - private boolean shouldAllowRetry(String id) { - boolean isRetryAllowed = true; - if (retryLimitTracker.get(id) != null) { - Integer currentCount = retryLimitTracker.get(id); - if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { - isRetryAllowed = false; - String message = "Searchable entity re-sync limit reached for " + id - + ", re-sync will no longer be attempted for this entity"; - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } else { - Integer newCount = new Integer(currentCount.intValue() + 1); - retryLimitTracker.put(id, newCount); - } - } else { - Integer firstRetryCount = new Integer(1); - retryLimitTracker.put(id, firstRetryCount); - } - - return isRetryAllowed; - } - - @Override - public SynchronizerState getState() { - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) - */ - @Override - public String getStatReport(boolean showFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return this.getStatReport(syncDurationInMs, showFinalReport); - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() - */ - @Override - public void shutdown() { - this.shutdownExecutors(); - } - - @Override - protected boolean isSyncDone() { - int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); - - if (totalWorkOnHand > 0 || !allWorkEnumerated) { - return false; - } - - return true; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/SyncController.java b/src/main/java/org/openecomp/sparky/synchronizer/SyncController.java deleted file mode 100644 index 4ed7136..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/SyncController.java +++ /dev/null @@ -1,478 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.util.Collection; -import java.util.LinkedHashSet; -import java.util.concurrent.ExecutorService; -import java.util.function.Supplier; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; -import org.openecomp.sparky.util.NodeUtils; - -/** - * The Class SyncController. - * - * @author davea. - */ -public class SyncController { - private static final Logger LOG = LoggerFactory.getInstance().getLogger(SyncController.class); - - /** - * The Enum InternalState. - */ - private enum InternalState { - IDLE, PRE_SYNC, SYNC_OPERATION, SELECTIVE_DELETE, ABORTING_SYNC, REPAIRING_INDEX, POST_SYNC, - TEST_INDEX_INTEGRITY, GENERATE_FINAL_REPORT - } - - /** - * The Enum SyncActions. - */ - public enum SyncActions { - SYNCHRONIZE, REPAIR_INDEX, INDEX_INTEGRITY_VALIDATION_COMPLETE, PRE_SYNC_COMPLETE, - SYNC_COMPLETE, SYNC_ABORTED, SYNC_FAILURE, POST_SYNC_COMPLETE, PURGE_COMPLETE, REPORT_COMPLETE - } - - private Collection<IndexSynchronizer> registeredSynchronizers; - private Collection<IndexValidator> registeredIndexValidators; - private Collection<IndexCleaner> registeredIndexCleaners; - private InternalState currentInternalState; - private ExecutorService syncControllerExecutor; - private ExecutorService statReporterExecutor; - private final String controllerName; - - /** - * Instantiates a new sync controller. - * - * @param name the name - * @throws Exception the exception - */ - public SyncController(String name) throws Exception { - - this.controllerName = name; - /* - * Does LHS result in a non-duplicated object collection?? What happens if you double-add an - * object? - */ - - registeredSynchronizers = new LinkedHashSet<IndexSynchronizer>(); - registeredIndexValidators = new LinkedHashSet<IndexValidator>(); - registeredIndexCleaners = new LinkedHashSet<IndexCleaner>(); - - this.syncControllerExecutor = NodeUtils.createNamedExecutor("SyncController", 5, LOG); - this.statReporterExecutor = NodeUtils.createNamedExecutor("StatReporter", 1, LOG); - - this.currentInternalState = InternalState.IDLE; - } - - /** - * Change internal state. - * - * @param newState the new state - * @param causedByAction the caused by action - */ - private void changeInternalState(InternalState newState, SyncActions causedByAction) { - LOG.info(AaiUiMsgs.SYNC_INTERNAL_STATE_CHANGED, controllerName, - currentInternalState.toString(), newState.toString(), causedByAction.toString()); - - this.currentInternalState = newState; - - performStateAction(); - } - - public String getControllerName() { - return controllerName; - } - - /** - * Perform action. - * - * @param requestedAction the requested action - */ - public void performAction(SyncActions requestedAction) { - - if (currentInternalState == InternalState.IDLE) { - - try { - switch (requestedAction) { - case SYNCHRONIZE: - changeInternalState(InternalState.TEST_INDEX_INTEGRITY, requestedAction); - break; - - default: - break; - } - - } catch (Exception exc) { - String message = "An error occurred while performing action = " + requestedAction - + ". Error = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } else { - LOG.error(AaiUiMsgs.SYNC_NOT_VALID_STATE_DURING_REQUEST, currentInternalState.toString()); - } - } - - /** - * Perform state action. - */ - private void performStateAction() { - - try { - switch (currentInternalState) { - - case TEST_INDEX_INTEGRITY: - performIndexIntegrityValidation(); - break; - - case PRE_SYNC: - performPreSyncCleanupCollection(); - break; - - case SYNC_OPERATION: - performSynchronization(); - break; - - case POST_SYNC: - performIndexSyncPostCollection(); - changeInternalState(InternalState.SELECTIVE_DELETE, SyncActions.POST_SYNC_COMPLETE); - break; - - case SELECTIVE_DELETE: - performIndexCleanup(); - changeInternalState(InternalState.GENERATE_FINAL_REPORT, SyncActions.PURGE_COMPLETE); - break; - - case GENERATE_FINAL_REPORT: - - dumpStatReport(true); - clearCaches(); - changeInternalState(InternalState.IDLE, SyncActions.REPORT_COMPLETE); - break; - - case ABORTING_SYNC: - performSyncAbort(); - break; - - default: - break; - } - } catch (Exception exc) { - String message = "Caught an error which performing action. Error = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - /** - * Register entity synchronizer. - * - * @param entitySynchronizer the entity synchronizer - */ - public void registerEntitySynchronizer(IndexSynchronizer entitySynchronizer) { - - String indexName = entitySynchronizer.getIndexName(); - - if (indexName != null) { - registeredSynchronizers.add(entitySynchronizer); - } else { - String message = "Failed to register entity synchronizer because index name is null"; - LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); - } - - } - - /** - * Register index validator. - * - * @param indexValidator the index validator - */ - public void registerIndexValidator(IndexValidator indexValidator) { - - String indexName = indexValidator.getIndexName(); - - if (indexName != null) { - registeredIndexValidators.add(indexValidator); - } else { - String message = "Failed to register index validator because index name is null"; - LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); - } - - } - - /** - * Register index cleaner. - * - * @param indexCleaner the index cleaner - */ - public void registerIndexCleaner(IndexCleaner indexCleaner) { - - String indexName = indexCleaner.getIndexName(); - - if (indexName != null) { - registeredIndexCleaners.add(indexCleaner); - } else { - String message = "Failed to register index cleaner because index name is null"; - LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); - } - } - - /* - * State machine should drive our flow dosync just dispatches an action and the state machine - * determines what is in play and what is next - */ - - /** - * Dump stat report. - * - * @param showFinalReport the show final report - */ - private void dumpStatReport(boolean showFinalReport) { - - for (IndexSynchronizer synchronizer : registeredSynchronizers) { - - String statReport = synchronizer.getStatReport(showFinalReport); - - if (statReport != null) { - LOG.info(AaiUiMsgs.INFO_GENERIC, statReport); - } - } - } - - /** - * Clear caches. - */ - private void clearCaches() { - - /* - * Any entity caches that were built as part of the sync operation should be cleared to save - * memory. The original intent of the caching was to provide a short-lived cache to satisfy - * entity requests from multiple synchronizers yet minimizing interactions with the AAI. - */ - - for (IndexSynchronizer synchronizer : registeredSynchronizers) { - synchronizer.clearCache(); - } - } - - /** - * Perform pre sync cleanup collection. - */ - private void performPreSyncCleanupCollection() { - - /* - * ask the index cleaners to collect the their pre-sync object id collections - */ - - for (IndexCleaner cleaner : registeredIndexCleaners) { - cleaner.populatePreOperationCollection(); - } - - changeInternalState(InternalState.SYNC_OPERATION, SyncActions.PRE_SYNC_COMPLETE); - - } - - /** - * Perform index sync post collection. - */ - private void performIndexSyncPostCollection() { - - /* - * ask the entity purgers to collect the their pre-sync object id collections - */ - - for (IndexCleaner cleaner : registeredIndexCleaners) { - cleaner.populatePostOperationCollection(); - } - - } - - /** - * Perform index cleanup. - */ - private void performIndexCleanup() { - - /* - * ask the entity purgers to collect the their pre-sync object id collections - */ - - for (IndexCleaner cleaner : registeredIndexCleaners) { - cleaner.performCleanup(); - } - - } - - /** - * Perform sync abort. - */ - private void performSyncAbort() { - changeInternalState(InternalState.IDLE, SyncActions.SYNC_ABORTED); - } - - /** - * Perform index integrity validation. - */ - private void performIndexIntegrityValidation() { - - /* - * loop through registered index validators and test and fix, if needed - */ - - for (IndexValidator validator : registeredIndexValidators) { - try { - if (!validator.exists()) { - validator.createOrRepair(); - } - } catch (Exception exc) { - String message = "Index validator caused an error = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - changeInternalState(InternalState.PRE_SYNC, SyncActions.INDEX_INTEGRITY_VALIDATION_COMPLETE); - - } - - /** - * Shutdown. - */ - public void shutdown() { - - this.syncControllerExecutor.shutdown(); - for (IndexSynchronizer synchronizer : registeredSynchronizers) { - - try { - synchronizer.shutdown(); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "Synchronizer shutdown caused an error = " + exc.getMessage()); - } - - } - this.statReporterExecutor.shutdown(); - } - - /* - * Need some kind of task running that responds to a transient boolean to kill it or we just stop - * the executor that it is in? - */ - - - - /** - * Perform synchronization. - */ - private void performSynchronization() { - - /* - * Get all the synchronizers running in parallel - */ - - for (IndexSynchronizer synchronizer : registeredSynchronizers) { - supplyAsync(new Supplier<Void>() { - - @Override - public Void get() { - - synchronizer.doSync(); - return null; - } - - }, this.syncControllerExecutor).whenComplete((result, error) -> { - - /* - * We don't bother checking the result, because it will always be null as the doSync() is - * non-blocking. - */ - - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "doSync operation failed with an error = " + error.getMessage()); - } - }); - } - - boolean allDone = false; - long nextReportTimeStampInMs = System.currentTimeMillis() + 30000L; - boolean dumpPeriodicStatReport = false; - - while (!allDone) { - - int totalFinished = 0; - - for (IndexSynchronizer synchronizer : registeredSynchronizers) { - if (dumpPeriodicStatReport) { - if (synchronizer.getState() != SynchronizerState.IDLE) { - String statReport = synchronizer.getStatReport(false); - if (statReport != null) { - LOG.info(AaiUiMsgs.INFO_GENERIC, statReport); - } - } - if (synchronizer.getState() == SynchronizerState.IDLE) { - totalFinished++; - } - } - } - if ( System.currentTimeMillis() > nextReportTimeStampInMs) { - dumpPeriodicStatReport = true; - nextReportTimeStampInMs = System.currentTimeMillis() + 30000L; - } else { - dumpPeriodicStatReport = false; - } - allDone = (totalFinished == registeredSynchronizers.size()); - - try { - Thread.sleep(250); - } catch (InterruptedException exc) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "An error occurred while waiting for sync to complete. Error = " + exc.getMessage()); - } - - } - - changeInternalState(InternalState.POST_SYNC, SyncActions.SYNC_COMPLETE); - - } - - public SynchronizerState getState() { - - switch (currentInternalState) { - - case IDLE: { - return SynchronizerState.IDLE; - } - - default: { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - - } - } - - } - -}
\ No newline at end of file diff --git a/src/main/java/org/openecomp/sparky/synchronizer/SyncHelper.java b/src/main/java/org/openecomp/sparky/synchronizer/SyncHelper.java deleted file mode 100644 index 002bc58..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/SyncHelper.java +++ /dev/null @@ -1,702 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import com.google.common.util.concurrent.ThreadFactoryBuilder; - -import java.lang.Thread.UncaughtExceptionHandler; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; -import org.openecomp.sparky.config.oxm.OxmModelLoader; -import org.openecomp.sparky.dal.aai.ActiveInventoryAdapter; -import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; -import org.openecomp.sparky.dal.aai.config.ActiveInventoryRestConfig; -import org.openecomp.sparky.dal.cache.EntityCache; -import org.openecomp.sparky.dal.cache.InMemoryEntityCache; -import org.openecomp.sparky.dal.cache.PersistentEntityCache; -import org.openecomp.sparky.dal.elasticsearch.ElasticSearchAdapter; -import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.openecomp.sparky.dal.rest.RestClientBuilder; -import org.openecomp.sparky.dal.rest.RestfulDataAccessor; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.synchronizer.SyncController.SyncActions; -import org.openecomp.sparky.synchronizer.config.SynchronizerConfiguration; -import org.openecomp.sparky.synchronizer.config.SynchronizerConstants; -import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; -import org.openecomp.sparky.util.ErrorUtil; -import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; -import org.slf4j.MDC; - -/** - * The Class SyncHelper. - * - * @author davea. - */ -public class SyncHelper { - - private final Logger LOG = LoggerFactory.getInstance().getLogger(SyncHelper.class); - private SyncController syncController = null; - private SyncController entityCounterHistorySummarizer = null; - - private ScheduledExecutorService oneShotExecutor = Executors.newSingleThreadScheduledExecutor(); - private ScheduledExecutorService periodicExecutor = null; - private ScheduledExecutorService historicalExecutor = - Executors.newSingleThreadScheduledExecutor(); - - private SynchronizerConfiguration syncConfig; - private ElasticSearchConfig esConfig; - private OxmModelLoader oxmModelLoader; - - private Boolean initialSyncRunning = false; - private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); - private AtomicLong timeNextSync = new AtomicLong(); - Map<String, String> contextMap; - - /** - * The Class SyncTask. - */ - private class SyncTask implements Runnable { - - private boolean isInitialSync; - - /** - * Instantiates a new sync task. - * - * @param initialSync the initial sync - */ - public SyncTask(boolean initialSync) { - this.isInitialSync = initialSync; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Runnable#run() - */ - @Override - public void run() { - long opStartTime = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - - LOG.info(AaiUiMsgs.SEARCH_ENGINE_SYNC_STARTED, sdf.format(opStartTime) - .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD)); - - try { - - if (syncController == null) { - LOG.error(AaiUiMsgs.SYNC_SKIPPED_SYNCCONTROLLER_NOT_INITIALIZED); - return; - } - - int taskFrequencyInDays = SynchronizerConfiguration.getConfig().getSyncTaskFrequencyInDay(); - - /* - * Do nothing if the initial start-up sync hasn't finished yet, but the regular sync - * scheduler fired up a regular sync. - */ - if (!initialSyncRunning) { - if (isInitialSync) { - initialSyncRunning = true; - } else { - // update 'timeNextSync' for periodic sync - timeNextSync.getAndAdd(taskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); - - } - - LOG.info(AaiUiMsgs.INFO_GENERIC, "SyncTask, starting syncrhonization"); - - syncController.performAction(SyncActions.SYNCHRONIZE); - - while (syncController.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { - Thread.sleep(1000); - } - - } else { - LOG.info(AaiUiMsgs.SKIP_PERIODIC_SYNC_AS_SYNC_DIDNT_FINISH, sdf.format(opStartTime) - .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD)); - - return; - } - - long opEndTime = System.currentTimeMillis(); - - if (isInitialSync) { - /* - * Handle corner case when start-up sync operation overlapped with a scheduled - * sync-start-time. Note that the scheduled sync does nothing if 'initialSyncRunning' is - * TRUE. So the actual next-sync is one more sync-cycle away - */ - long knownNextSyncTime = timeNextSync.get(); - if (knownNextSyncTime != SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS - && opEndTime > knownNextSyncTime) { - timeNextSync.compareAndSet(knownNextSyncTime, - knownNextSyncTime + taskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); - initialSyncRunning = false; - } - } - - String durationMessage = - String.format(syncController.getControllerName() + " synchronization took '%d' ms.", - (opEndTime - opStartTime)); - - LOG.info(AaiUiMsgs.SYNC_DURATION, durationMessage); - - // Provide log about the time for next synchronization - if (syncConfig.isConfigOkForPeriodicSync() - && timeNextSync.get() != SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS) { - TimeZone tz = TimeZone.getTimeZone(syncConfig.getSyncTaskStartTimeTimeZone()); - sdf.setTimeZone(tz); - if (opEndTime - opStartTime > taskFrequencyInDays - * SynchronizerConstants.MILLISEC_IN_A_DAY) { - String durationWasLongerMessage = String.format( - syncController.getControllerName() - + " synchronization took '%d' ms which is larger than" - + " synchronization interval of '%d' ms.", - (opEndTime - opStartTime), - taskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); - - LOG.info(AaiUiMsgs.SYNC_DURATION, durationWasLongerMessage); - } - - LOG.info(AaiUiMsgs.SYNC_TO_BEGIN, syncController.getControllerName(), - sdf.format(timeNextSync).replaceAll(SynchronizerConstants.TIME_STD, - SynchronizerConstants.TIME_CONFIG_STD)); - } - - } catch (Exception exc) { - String message = "Caught an exception while attempt to synchronize elastic search " - + "with an error cause = " + ErrorUtil.extractStackTraceElements(5, exc); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - - } - - } - - /** - * The Class HistoricalEntityCountSummaryTask. - */ - private class HistoricalEntityCountSummaryTask implements Runnable { - - /** - * Instantiates a new historical entity count summary task. - */ - public HistoricalEntityCountSummaryTask() {} - - /* - * (non-Javadoc) - * - * @see java.lang.Runnable#run() - */ - @Override - public void run() { - - long opStartTime = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - LOG.info(AaiUiMsgs.HISTORICAL_ENTITY_COUNT_SUMMARIZER_STARTING, sdf.format(opStartTime) - .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD)); - - try { - if (entityCounterHistorySummarizer == null) { - LOG.error(AaiUiMsgs.HISTORICAL_ENTITY_COUNT_SUMMARIZER_NOT_STARTED); - return; - } - - LOG.info(AaiUiMsgs.INFO_GENERIC, - "EntityCounterHistorySummarizer, starting syncrhonization"); - - entityCounterHistorySummarizer.performAction(SyncActions.SYNCHRONIZE); - - while (entityCounterHistorySummarizer - .getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { - Thread.sleep(1000); - } - - long opEndTime = System.currentTimeMillis(); - - LOG.info(AaiUiMsgs.HISTORICAL_SYNC_DURATION, - entityCounterHistorySummarizer.getControllerName(), - String.valueOf(opEndTime - opStartTime)); - - long taskFrequencyInMs = - syncConfig.getHistoricalEntitySummarizedFrequencyInMinutes() * 60 * 1000; - - if (syncConfig.isHistoricalEntitySummarizerEnabled()) { - String time = sdf.format(System.currentTimeMillis() + taskFrequencyInMs) - .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD); - - LOG.info(AaiUiMsgs.HISTORICAL_SYNC_TO_BEGIN, time); - } - - - } catch (Exception exc) { - String message = "Caught an exception while attempting to populate entity country " - + "history elasticsearch table with an error cause = " - + ErrorUtil.extractStackTraceElements(5, exc); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - - } - - } - - /** - * Gets the first sync time. - * - * @param calendar the calendar - * @param timeNow the time now - * @param taskFreqInDay the task freq in day - * @return the first sync time - */ - public long getFirstSyncTime(Calendar calendar, long timeNow, int taskFreqInDay) { - if (taskFreqInDay == SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS) { - return SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS; - } else if (timeNow > calendar.getTimeInMillis()) { - calendar.add(Calendar.DAY_OF_MONTH, taskFreqInDay); - } - return calendar.getTimeInMillis(); - } - - /** - * Boot strap and configure the moving pieces of the Sync Controller. - */ - - private void initializeSyncController() { - - try { - - /* - * TODO: it would be nice to have XML IoC / dependency injection kind of thing for these - * pieces maybe Spring? - */ - - /* - * Sync Controller itself - */ - - syncController = new SyncController("entitySyncController"); - - /* - * Create common elements - */ - - ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new RestClientBuilder()); - ActiveInventoryRestConfig aaiRestConfig = - ActiveInventoryConfig.getConfig().getAaiRestConfig(); - - - EntityCache cache = null; - - if (aaiRestConfig.isCacheEnabled()) { - cache = new PersistentEntityCache(aaiRestConfig.getStorageFolderOverride(), - aaiRestConfig.getNumCacheWorkers()); - } else { - cache = new InMemoryEntityCache(); - } - - RestClientBuilder clientBuilder = new RestClientBuilder(); - - aaiAdapter.setCacheEnabled(true); - aaiAdapter.setEntityCache(cache); - - clientBuilder.setUseHttps(false); - - RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); - - ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); - ElasticSearchAdapter esAdapter = new ElasticSearchAdapter(nonCachingRestProvider, esConfig); - - /* - * Register Index Validators - */ - - IndexIntegrityValidator entitySearchIndexValidator = - new IndexIntegrityValidator(nonCachingRestProvider, esConfig.getIndexName(), - esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), - esConfig.buildElasticSearchTableConfig()); - - syncController.registerIndexValidator(entitySearchIndexValidator); - - // TODO: Insert IndexValidator for TopographicalEntityIndex - // we should have one, but one isn't 100% required as none of the fields are analyzed - - /* - * Register Synchronizers - */ - - SearchableEntitySynchronizer ses = new SearchableEntitySynchronizer(esConfig.getIndexName()); - ses.setAaiDataProvider(aaiAdapter); - ses.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(ses); - - CrossEntityReferenceSynchronizer cers = new CrossEntityReferenceSynchronizer( - esConfig.getIndexName(), ActiveInventoryConfig.getConfig()); - cers.setAaiDataProvider(aaiAdapter); - cers.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(cers); - - GeoSynchronizer geo = new GeoSynchronizer(esConfig.getTopographicalSearchIndex()); - geo.setAaiDataProvider(aaiAdapter); - geo.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(geo); - - if (syncConfig.isAutosuggestSynchronizationEnabled()) { - initAutoSuggestionSynchronizer(esConfig, aaiAdapter, esAdapter, nonCachingRestProvider); - initAggregationSynchronizer(esConfig, aaiAdapter, esAdapter, nonCachingRestProvider); - } - - /* - * Register Cleaners - */ - - IndexCleaner searchableIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, - esConfig.getIndexName(), esConfig.getType(), esConfig.getIpAddress(), - esConfig.getHttpPort(), syncConfig.getScrollContextTimeToLiveInMinutes(), - syncConfig.getNumScrollContextItemsToRetrievePerRequest()); - - syncController.registerIndexCleaner(searchableIndexCleaner); - - IndexCleaner geoIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, - esConfig.getTopographicalSearchIndex(), esConfig.getType(), esConfig.getIpAddress(), - esConfig.getHttpPort(), syncConfig.getScrollContextTimeToLiveInMinutes(), - syncConfig.getNumScrollContextItemsToRetrievePerRequest()); - - syncController.registerIndexCleaner(geoIndexCleaner); - - - } catch (Exception exc) { - String message = "Error: failed to sync with message = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - - } - - /** - * Inits the entity counter history summarizer. - */ - private void initEntityCounterHistorySummarizer() { - - LOG.info(AaiUiMsgs.INFO_GENERIC, "initEntityCounterHistorySummarizer"); - - try { - entityCounterHistorySummarizer = new SyncController("entityCounterHistorySummarizer"); - - ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new RestClientBuilder()); - aaiAdapter.setCacheEnabled(false); - - RestClientBuilder clientBuilder = new RestClientBuilder(); - clientBuilder.setUseHttps(false); - - RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); - ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); - ElasticSearchAdapter esAdapter = new ElasticSearchAdapter(nonCachingRestProvider, esConfig); - - IndexIntegrityValidator entityCounterHistoryValidator = - new IndexIntegrityValidator(nonCachingRestProvider, esConfig.getEntityCountHistoryIndex(), - esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), - esConfig.buildElasticSearchEntityCountHistoryTableConfig()); - - entityCounterHistorySummarizer.registerIndexValidator(entityCounterHistoryValidator); - - HistoricalEntitySummarizer historicalSummarizer = - new HistoricalEntitySummarizer(esConfig.getEntityCountHistoryIndex()); - historicalSummarizer.setAaiDataProvider(aaiAdapter); - historicalSummarizer.setEsDataProvider(esAdapter); - entityCounterHistorySummarizer.registerEntitySynchronizer(historicalSummarizer); - - } catch (Exception exc) { - String message = "Error: failed to sync with message = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - private List<String> getAutosuggestableEntitiesFromOXM() { - Map<String, OxmEntityDescriptor> map = oxmModelLoader.getSuggestionSearchEntityDescriptors(); - List<String> suggestableEntities = new ArrayList<String>(); - - for (String entity: map.keySet()){ - suggestableEntities.add(entity); - } - return suggestableEntities; - } - - /** - * Initialize the AutosuggestionSynchronizer and - * AggregationSuggestionSynchronizer - * - * @param esConfig - * @param aaiAdapter - * @param esAdapter - * @param nonCachingRestProvider - */ - private void initAutoSuggestionSynchronizer(ElasticSearchConfig esConfig, - ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, - RestfulDataAccessor nonCachingRestProvider) { - LOG.info(AaiUiMsgs.INFO_GENERIC, "initAutoSuggestionSynchronizer"); - - // Initialize for entityautosuggestindex - try { - IndexIntegrityValidator autoSuggestionIndexValidator = - new IndexIntegrityValidator(nonCachingRestProvider, esConfig.getAutosuggestIndexname(), - esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), - esConfig.buildAutosuggestionTableConfig()); - - syncController.registerIndexValidator(autoSuggestionIndexValidator); - - AutosuggestionSynchronizer suggestionSynchronizer = - new AutosuggestionSynchronizer(esConfig.getAutosuggestIndexname()); - suggestionSynchronizer.setAaiDataProvider(aaiAdapter); - suggestionSynchronizer.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(suggestionSynchronizer); - - AggregationSuggestionSynchronizer aggregationSuggestionSynchronizer = - new AggregationSuggestionSynchronizer(esConfig.getAutosuggestIndexname()); - aggregationSuggestionSynchronizer.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(aggregationSuggestionSynchronizer); - - IndexCleaner autosuggestIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, - esConfig.getAutosuggestIndexname(), esConfig.getType(), esConfig.getIpAddress(), - esConfig.getHttpPort(), syncConfig.getScrollContextTimeToLiveInMinutes(), - syncConfig.getNumScrollContextItemsToRetrievePerRequest()); - - syncController.registerIndexCleaner(autosuggestIndexCleaner); - } catch (Exception exc) { - String message = "Error: failed to sync with message = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - /** - * Initialize the AggregationSynchronizer - * - * @param esConfig - * @param aaiAdapter - * @param esAdapter - * @param nonCachingRestProvider - */ - private void initAggregationSynchronizer(ElasticSearchConfig esConfig, - ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, - RestfulDataAccessor nonCachingRestProvider) { - LOG.info(AaiUiMsgs.INFO_GENERIC, "initAggregationSynchronizer"); - - List<String> aggregationEntities = getAutosuggestableEntitiesFromOXM(); - - // For each index: create an IndexValidator, a Synchronizer, and an IndexCleaner - for (String entity : aggregationEntities) { - try { - String indexName = TierSupportUiConstants.getAggregationIndexName(entity); - - IndexIntegrityValidator aggregationIndexValidator = new IndexIntegrityValidator( - nonCachingRestProvider, indexName, esConfig.getType(), esConfig.getIpAddress(), - esConfig.getHttpPort(), esConfig.buildAggregationTableConfig()); - - syncController.registerIndexValidator(aggregationIndexValidator); - - /* - * TODO: This per-entity-synchronizer approach will eventually result in AAI / ES overload - * because of the existing dedicated thread pools for ES + AAI operations within the - * synchronizer. If we had 50 types to sync then the thread pools within each Synchronizer - * would cause some heartburn as there would be hundreds of threads trying to talk to AAI. - * Given that we our running out of time, let's make sure we can get it functional and then - * we'll re-visit. - */ - AggregationSynchronizer aggSynchronizer = new AggregationSynchronizer(entity, indexName); - aggSynchronizer.setAaiDataProvider(aaiAdapter); - aggSynchronizer.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(aggSynchronizer); - - IndexCleaner entityDataIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, - indexName, esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), - syncConfig.getScrollContextTimeToLiveInMinutes(), - syncConfig.getNumScrollContextItemsToRetrievePerRequest()); - - syncController.registerIndexCleaner(entityDataIndexCleaner); - - } catch (Exception exc) { - String message = "Error: failed to sync with message = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - } - - /** - * Instantiates a new sync helper. - * - * @param loader the loader - */ - public SyncHelper(OxmModelLoader loader) { - try { - this.contextMap = MDC.getCopyOfContextMap(); - this.syncConfig = SynchronizerConfiguration.getConfig(); - this.esConfig = ElasticSearchConfig.getConfig(); - this.oxmModelLoader = loader; - - UncaughtExceptionHandler uncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() { - - @Override - public void uncaughtException(Thread thread, Throwable exc) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, thread.getName() + ": " + exc); - } - }; - - ThreadFactory namedThreadFactory = new ThreadFactoryBuilder().setNameFormat("SyncHelper-%d") - .setUncaughtExceptionHandler(uncaughtExceptionHandler).build(); - - periodicExecutor = Executors.newScheduledThreadPool(3, namedThreadFactory); - - /* - * We only want to initialize the synchronizer if sync has been configured to start - */ - if (syncConfig.isConfigOkForStartupSync() || syncConfig.isConfigOkForPeriodicSync()) { - initializeSyncController(); - } - - if (syncConfig.isHistoricalEntitySummarizerEnabled()) { - initEntityCounterHistorySummarizer(); - } else { - LOG.info(AaiUiMsgs.INFO_GENERIC, "history summarizer disabled"); - } - - - // schedule startup synchronization - if (syncConfig.isConfigOkForStartupSync()) { - - long taskInitialDelayInMs = syncConfig.getSyncTaskInitialDelayInMs(); - if (taskInitialDelayInMs != SynchronizerConstants.DELAY_NO_STARTUP_SYNC_IN_MS) { - oneShotExecutor.schedule(new SyncTask(true), taskInitialDelayInMs, TimeUnit.MILLISECONDS); - LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine startup synchronization is enabled."); - } else { - LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine startup synchronization is disabled."); - } - } - - // schedule periodic synchronization - if (syncConfig.isConfigOkForPeriodicSync()) { - - TimeZone tz = TimeZone.getTimeZone(syncConfig.getSyncTaskStartTimeTimeZone()); - Calendar calendar = Calendar.getInstance(tz); - sdf.setTimeZone(tz); - - calendar.set(Calendar.HOUR_OF_DAY, syncConfig.getSyncTaskStartTimeHr()); - calendar.set(Calendar.MINUTE, syncConfig.getSyncTaskStartTimeMin()); - calendar.set(Calendar.SECOND, syncConfig.getSyncTaskStartTimeSec()); - - long timeCurrent = calendar.getTimeInMillis(); - int taskFrequencyInDay = syncConfig.getSyncTaskFrequencyInDay(); - timeNextSync.getAndSet(getFirstSyncTime(calendar, timeCurrent, taskFrequencyInDay)); - - long delayUntilFirstRegSyncInMs = 0; - delayUntilFirstRegSyncInMs = timeNextSync.get() - timeCurrent; - - // Do all calculation in milliseconds - long taskFreqencyInMs = taskFrequencyInDay * SynchronizerConstants.MILLISEC_IN_A_DAY; - - if (taskFreqencyInMs != SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS) { - periodicExecutor.scheduleAtFixedRate(new SyncTask(false), delayUntilFirstRegSyncInMs, - taskFreqencyInMs, TimeUnit.MILLISECONDS); - LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine periodic synchronization is enabled."); - // case: when - startup sync is misconfigured or is disabled - // - give a clue to user when is the next periodic sync - if (!syncConfig.isConfigOkForStartupSync() - || syncConfig.isConfigDisabledForInitialSync()) { - LOG.info(AaiUiMsgs.SYNC_TO_BEGIN, syncController.getControllerName(), - sdf.format(timeNextSync).replaceAll(SynchronizerConstants.TIME_STD, - SynchronizerConstants.TIME_CONFIG_STD)); - } - } else { - LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine periodic synchronization is disabled."); - } - } - - // schedule periodic synchronization - if (syncConfig.isHistoricalEntitySummarizerEnabled()) { - scheduleHistoricalCounterSyncTask(); - } - - } catch (Exception exc) { - String message = "Caught an exception while starting up the SyncHelper. Error cause = \n" - + ErrorUtil.extractStackTraceElements(5, exc); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - /** - * Schedule historical counter sync task. - */ - private void scheduleHistoricalCounterSyncTask() { - long taskFrequencyInMs = - syncConfig.getHistoricalEntitySummarizedFrequencyInMinutes() * 60 * 1000; - historicalExecutor.scheduleWithFixedDelay(new HistoricalEntityCountSummaryTask(), 0, - taskFrequencyInMs, TimeUnit.MILLISECONDS); - LOG.info(AaiUiMsgs.INFO_GENERIC, - "Historical Entity Count Summarizer synchronization is enabled."); - } - - /** - * Shutdown. - */ - public void shutdown() { - - if (oneShotExecutor != null) { - oneShotExecutor.shutdown(); - } - - if (periodicExecutor != null) { - periodicExecutor.shutdown(); - } - - if (historicalExecutor != null) { - historicalExecutor.shutdown(); - } - - if (syncController != null) { - syncController.shutdown(); - } - - if (entityCounterHistorySummarizer != null) { - entityCounterHistorySummarizer.shutdown(); - } - - } - - public OxmModelLoader getOxmModelLoader() { - return oxmModelLoader; - } - - public void setOxmModelLoader(OxmModelLoader oxmModelLoader) { - this.oxmModelLoader = oxmModelLoader; - } -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/TaskProcessingStats.java b/src/main/java/org/openecomp/sparky/synchronizer/TaskProcessingStats.java deleted file mode 100644 index e54395c..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/TaskProcessingStats.java +++ /dev/null @@ -1,133 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import org.openecomp.sparky.analytics.AbstractStatistics; -import org.openecomp.sparky.synchronizer.config.TaskProcessorConfig; - -/** - * The Class TaskProcessingStats. - */ -public class TaskProcessingStats extends AbstractStatistics { - - private static String TASK_AGE_STATS = "taskAgeStats"; - private static String TASK_RESPONSE_STATS = "taskResponseStats"; - private static String RESPONSE_SIZE_IN_BYTES = "taskResponseSizeInBytes"; - // private static String QUEUE_ITEM_LENGTH = "queueItemLength"; - private static String TPS = "transactionsPerSecond"; - - /** - * Instantiates a new task processing stats. - * - * @param config the config - */ - public TaskProcessingStats(TaskProcessorConfig config) { - - addHistogram(TASK_AGE_STATS, config.getTaskAgeHistogramLabel(), - config.getTaskAgeHistogramMaxYAxis(), config.getTaskAgeHistogramNumBins(), - config.getTaskAgeHistogramNumDecimalPoints()); - - addHistogram(TASK_RESPONSE_STATS, config.getResponseTimeHistogramLabel(), - config.getResponseTimeHistogramMaxYAxis(), config.getResponseTimeHistogramNumBins(), - config.getResponseTimeHistogramNumDecimalPoints()); - - addHistogram(RESPONSE_SIZE_IN_BYTES, config.getBytesHistogramLabel(), - config.getBytesHistogramMaxYAxis(), config.getBytesHistogramNumBins(), - config.getBytesHistogramNumDecimalPoints()); - - /* - * addHistogram(QUEUE_ITEM_LENGTH, config.getQueueLengthHistogramLabel(), - * config.getQueueLengthHistogramMaxYAxis(), config.getQueueLengthHistogramNumBins(), - * config.getQueueLengthHistogramNumDecimalPoints()); - */ - - addHistogram(TPS, config.getTpsHistogramLabel(), config.getTpsHistogramMaxYAxis(), - config.getTpsHistogramNumBins(), config.getTpsHistogramNumDecimalPoints()); - - } - - /* - * public void updateQueueItemLengthHistogram(long value) { updateHistogram(QUEUE_ITEM_LENGTH, - * value); } - */ - - /** - * Update task age stats histogram. - * - * @param value the value - */ - public void updateTaskAgeStatsHistogram(long value) { - updateHistogram(TASK_AGE_STATS, value); - } - - /** - * Update task response stats histogram. - * - * @param value the value - */ - public void updateTaskResponseStatsHistogram(long value) { - updateHistogram(TASK_RESPONSE_STATS, value); - } - - /** - * Update response size in bytes histogram. - * - * @param value the value - */ - public void updateResponseSizeInBytesHistogram(long value) { - updateHistogram(RESPONSE_SIZE_IN_BYTES, value); - } - - /** - * Update transactions per second histogram. - * - * @param value the value - */ - public void updateTransactionsPerSecondHistogram(long value) { - updateHistogram(TPS, value); - } - - /** - * Gets the statistics report. - * - * @param verboseEnabled the verbose enabled - * @param indentPadding the indent padding - * @return the statistics report - */ - public String getStatisticsReport(boolean verboseEnabled, String indentPadding) { - - StringBuilder sb = new StringBuilder(); - - sb.append("\n").append(getHistogramStats(TASK_AGE_STATS, verboseEnabled, indentPadding)); - // sb.append("\n").append(getHistogramStats(QUEUE_ITEM_LENGTH, verboseEnabled, indentPadding)); - sb.append("\n").append(getHistogramStats(TASK_RESPONSE_STATS, verboseEnabled, indentPadding)); - sb.append("\n") - .append(getHistogramStats(RESPONSE_SIZE_IN_BYTES, verboseEnabled, indentPadding)); - sb.append("\n").append(getHistogramStats(TPS, verboseEnabled, indentPadding)); - - return sb.toString(); - - } - - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/TransactionRateController.java b/src/main/java/org/openecomp/sparky/synchronizer/TransactionRateController.java deleted file mode 100644 index 14ae67f..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/TransactionRateController.java +++ /dev/null @@ -1,110 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer; - -import java.util.concurrent.atomic.AtomicInteger; - -import org.openecomp.sparky.analytics.AveragingRingBuffer; -import org.openecomp.sparky.synchronizer.config.TaskProcessorConfig; - -/** - * TODO: Fill in description. - * - * @author davea. - */ -public class TransactionRateController { - - private AveragingRingBuffer responseTimeTracker; - private double msPerTransaction; - private int numThreads; - private TaskProcessorConfig config; - private long startTimeInMs; - private AtomicInteger numTransactions; - - /** - * Instantiates a new transaction rate controller. - * - * @param config the config - */ - public TransactionRateController(TaskProcessorConfig config) { - - this.config = config; - this.responseTimeTracker = new AveragingRingBuffer( - config.getNumSamplesPerThreadForRunningAverage() * config.getMaxConcurrentWorkers()); - this.msPerTransaction = 1000 / config.getTargetTps(); - this.numThreads = config.getMaxConcurrentWorkers(); - this.startTimeInMs = System.currentTimeMillis(); - this.numTransactions = new AtomicInteger(0); - } - - /** - * Track response time. - * - * @param responseTimeInMs the response time in ms - */ - public void trackResponseTime(long responseTimeInMs) { - this.numTransactions.incrementAndGet(); - responseTimeTracker.addSample(responseTimeInMs); - } - - public long getFixedDelayInMs() { - - /* - * The math here is pretty simple: - * - * 1. Target TPS is 10. Then the msPerTxn = 1000/10 = 100ms - * - * 2. If the calculated avgResponseTime = 40 ms, then the proposed delay is 60ms per thread. - * - * 3. If the calculated avgResponseTime = 200ms, then the proposed delay is -100 ms, which is - * not possible, we can't speed it up, so we don't propose any further delay. - */ - - double proposedDelay = 0; - - if (config.isTransactionRateControllerEnabled()) { - proposedDelay = ((msPerTransaction - responseTimeTracker.getAvg()) * this.numThreads); - - if (proposedDelay > 0) { - return (long) (proposedDelay); - } - } - - return (long) proposedDelay; - } - - public long getAvg() { - return responseTimeTracker.getAvg(); - } - - public double getCurrentTps() { - if (numTransactions.get() > 0) { - double timeDelta = System.currentTimeMillis() - startTimeInMs; - double numTxns = numTransactions.get(); - return (numTxns / timeDelta) * 1000.0; - } - - return 0.0; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConfiguration.java b/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConfiguration.java deleted file mode 100644 index 51095d8..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConfiguration.java +++ /dev/null @@ -1,441 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.config; - -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Date; -import java.util.List; -import java.util.Properties; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.util.ConfigHelper; -import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; - - -/** - * The Class SynchronizerConfiguration. - */ -public class SynchronizerConfiguration { - - private static final Logger LOG = LoggerFactory.getInstance().getLogger(SynchronizerConfiguration.class); - - public static final String CONFIG_FILE = - TierSupportUiConstants.DYNAMIC_CONFIG_APP_LOCATION + "synchronizer.properties"; - - private static SynchronizerConfiguration instance; - - public static final String DEPTH_MODIFIER = "?depth=0"; - public static final String DEPTH_ALL_MODIFIER = "?depth=all"; - public static final String DEPTH_AND_NODES_ONLY_MODIFIER = "?depth=0&nodes-only"; - public static final String NODES_ONLY_MODIFIER = "?nodes-only"; - - public static SynchronizerConfiguration getConfig() throws Exception { - - if (instance == null) { - instance = new SynchronizerConfiguration(); - instance.initialize(); - } - - return instance; - } - - /** - * Instantiates a new synchronizer configuration. - */ - public SynchronizerConfiguration() { - // test method - } - - /** - * Initialize. - * - * @throws Exception the exception - */ - protected void initialize() throws Exception { - - Properties props = ConfigHelper.loadConfigFromExplicitPath(CONFIG_FILE); - - // parse config for startup sync - try { - syncTaskInitialDelayInMs = - Integer.parseInt(props.getProperty("synchronizer.syncTask.initialDelayInMs", - SynchronizerConstants.DEFAULT_INITIAL_DELAY_IN_MS)); - if (syncTaskInitialDelayInMs < 0) { - throw new Exception(); - } - } catch (Exception exc) { - this.setConfigOkForStartupSync(false); - syncTaskInitialDelayInMs = SynchronizerConstants.DEFAULT_CONFIG_ERROR_INT_VALUE; - String message = "Invalid configuration for synchronizer parameter:" - + " 'synchronizer.syncTask.initialDelayInMs'"; - LOG.error(AaiUiMsgs.SYNC_INVALID_CONFIG_PARAM, message); - } - - // parse config for periodic sync - try { - syncTaskFrequencyInDay = - Integer.parseInt(props.getProperty("synchronizer.syncTask.taskFrequencyInDay", - SynchronizerConstants.DEFAULT_TASK_FREQUENCY_IN_DAY)); - if (syncTaskFrequencyInDay < 0) { - throw new Exception(); - } - } catch (Exception exc) { - this.setConfigOkForPeriodicSync(false); - syncTaskFrequencyInDay = SynchronizerConstants.DEFAULT_CONFIG_ERROR_INT_VALUE; - String message = "Invalid configuration for synchronizer parameter:" - + " 'synchronizer.syncTask.taskFrequencyInDay'"; - LOG.error(AaiUiMsgs.SYNC_INVALID_CONFIG_PARAM, message); - } - - try { - syncTaskStartTime = props.getProperty("synchronizer.syncTask.startTimestamp", - SynchronizerConstants.DEFAULT_START_TIMESTAMP); // Default 05:00:00 UTC - Pattern pattern = Pattern.compile(SynchronizerConstants.TIMESTAMP24HOURS_PATTERN); - Matcher matcher = pattern.matcher(syncTaskStartTime); - if (!matcher.matches()) { - throw new Exception(); - } - - List<String> timestampVal = Arrays.asList(syncTaskStartTime.split(" ")); - - if (timestampVal.size() == SynchronizerConstants.COMPONENTS_IN_TIMESTAMP) { - // Need both time and timezone offset - syncTaskStartTimeTimeZone = timestampVal - .get(SynchronizerConstants.IDX_TIMEZONE_IN_TIMESTAMP).replaceAll("UTC", "GMT"); - - String time = timestampVal.get(SynchronizerConstants.IDX_TIME_IN_TIMESTAMP); - DateFormat format = new SimpleDateFormat("HH:mm:ss"); - Date date = format.parse(time); - Calendar calendar = Calendar.getInstance(); - calendar.setTime(date); - - syncTaskStartTimeHr = calendar.get(Calendar.HOUR_OF_DAY); - syncTaskStartTimeMin = calendar.get(Calendar.MINUTE); - syncTaskStartTimeSec = calendar.get(Calendar.SECOND); - } else { - LOG.info(AaiUiMsgs.SYNC_START_TIME); - } - } catch (Exception exc) { - this.setConfigOkForPeriodicSync(false); - String message = "Invalid configuration for synchronizer parameter:" - + " 'synchronizer.syncTask.startTimestamp'"; - LOG.error(AaiUiMsgs.SYNC_INVALID_CONFIG_PARAM, message); - } - - scrollContextTimeToLiveInMinutes = - Integer.parseInt(props.getProperty("synchronizer.scrollContextTimeToLiveInMinutes", "5")); - numScrollContextItemsToRetrievePerRequest = Integer.parseInt( - props.getProperty("synchronizer.numScrollContextItemsToRetrievePerRequest", "5000")); - - resolverProgressLogFrequencyInMs = Long - .parseLong(props.getProperty("synchronizer.resolver.progressLogFrequencyInMs", "60000")); - resolverQueueMonitorFrequencyInMs = Long - .parseLong(props.getProperty("synchronizer.resolver.queueMonitorFrequencyInMs", "1000")); - - indexIntegrityValidatorEnabled = Boolean - .parseBoolean(props.getProperty("synchronizer.indexIntegrityValidator.enabled", "false")); - indexIntegrityValidatorFrequencyInMs = Long.parseLong( - props.getProperty("synchronizer.indexIntegrityValidatorFrequencyInMs", "300000")); - - displayVerboseQueueManagerStats = Boolean - .parseBoolean(props.getProperty("synchronizer.resolver.displayVerboseQueueManagerStats")); - - resourceNotFoundErrorsSupressed = - Boolean.parseBoolean(props.getProperty("synchronizer.suppressResourceNotFoundErrors")); - - nodesOnlyModifierEnabled = - Boolean.parseBoolean(props.getProperty("synchronizer.applyNodesOnlyModifier")); - - historicalEntitySummarizerEnabled = Boolean - .parseBoolean(props.getProperty("synchronizer.historicalEntitySummarizerEnabled", "true")); - historicalEntitySummarizedFrequencyInMinutes = Long.parseLong( - props.getProperty("synchronizer.historicalEntitySummarizedFrequencyInMinutes", "60")); - - autosuggestSynchronizationEnabled = Boolean - .parseBoolean(props.getProperty("synchronizer.autosuggestSynchronizationEnabled", "true")); - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, this.toString()); - } - } - - public boolean isNodesOnlyModifierEnabled() { - return nodesOnlyModifierEnabled; - } - - public void setNodesOnlyModifierEnabled(boolean nodesOnlyModifierEnabled) { - this.nodesOnlyModifierEnabled = nodesOnlyModifierEnabled; - } - - public int getSyncTaskInitialDelayInMs() { - return syncTaskInitialDelayInMs; - } - - public void setSyncTaskInitialDelayInMs(int syncTaskInitialDelayInMs) { - this.syncTaskInitialDelayInMs = syncTaskInitialDelayInMs; - } - - public boolean isDisplayVerboseQueueManagerStats() { - return displayVerboseQueueManagerStats; - } - - public void setDisplayVerboseQueueManagerStats(boolean displayVerboseQueueManagerStats) { - this.displayVerboseQueueManagerStats = displayVerboseQueueManagerStats; - } - - public boolean isHistoricalEntitySummarizerEnabled() { - return historicalEntitySummarizerEnabled; - } - - public void setHistoricalEntitySummarizerEnabled(boolean historicalEntitySummarizerEnabled) { - this.historicalEntitySummarizerEnabled = historicalEntitySummarizerEnabled; - } - - public long getHistoricalEntitySummarizedFrequencyInMinutes() { - return historicalEntitySummarizedFrequencyInMinutes; - } - - public void setHistoricalEntitySummarizedFrequencyInMinutes( - long historicalEntitySummarizedFrequencyInMinutes) { - this.historicalEntitySummarizedFrequencyInMinutes = - historicalEntitySummarizedFrequencyInMinutes; - } - - private int syncTaskInitialDelayInMs; - - private int syncTaskFrequencyInMs; - - private int scrollContextTimeToLiveInMinutes; - - private int numScrollContextItemsToRetrievePerRequest; - - private long resolverProgressLogFrequencyInMs; - - private long resolverQueueMonitorFrequencyInMs; - - private boolean indexIntegrityValidatorEnabled; - - private long indexIntegrityValidatorFrequencyInMs; - - private int syncTaskFrequencyInDay; - - private String syncTaskStartTime; - - private int syncTaskStartTimeHr = 5; // for default sync start time - - private int syncTaskStartTimeMin; - - private int syncTaskStartTimeSec; - - private String syncTaskStartTimeTimeZone; - - private boolean displayVerboseQueueManagerStats; - - private boolean resourceNotFoundErrorsSupressed; - - private boolean nodesOnlyModifierEnabled; - - private boolean historicalEntitySummarizerEnabled; - - private boolean autosuggestSynchronizationEnabled; - - private long historicalEntitySummarizedFrequencyInMinutes; - - - private boolean configOkForStartupSync = true; - - private boolean configOkForPeriodicSync = true; - - public boolean isResourceNotFoundErrorsSupressed() { - return resourceNotFoundErrorsSupressed; - } - - public void setResourceNotFoundErrorsSupressed(boolean resourceNotFoundErrorsSupressed) { - this.resourceNotFoundErrorsSupressed = resourceNotFoundErrorsSupressed; - } - - public int getScrollContextTimeToLiveInMinutes() { - return scrollContextTimeToLiveInMinutes; - } - - public void setScrollContextTimeToLiveInMinutes(int scrollContextTimeToLiveInMinutes) { - this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes; - } - - public int getNumScrollContextItemsToRetrievePerRequest() { - return numScrollContextItemsToRetrievePerRequest; - } - - public void setNumScrollContextItemsToRetrievePerRequest( - int numScrollContextItemsToRetrievePerRequest) { - this.numScrollContextItemsToRetrievePerRequest = numScrollContextItemsToRetrievePerRequest; - } - - public int getSyncTaskFrequencyInDay() { - return syncTaskFrequencyInDay; - } - - public void setSyncTaskFrequencyInDay(int syncTaskFrequencyInDay) { - this.syncTaskFrequencyInDay = syncTaskFrequencyInDay; - } - - public String getSyncTaskStartTime() { - return syncTaskStartTime; - } - - public void setSyncTaskStartTime(String syncTaskStartTime) { - this.syncTaskStartTime = syncTaskStartTime; - } - - public int getSyncTaskStartTimeHr() { - return syncTaskStartTimeHr; - } - - public void setSyncTaskStartTimeHr(int syncTaskStartTimeHr) { - this.syncTaskStartTimeHr = syncTaskStartTimeHr; - } - - public int getSyncTaskStartTimeMin() { - return syncTaskStartTimeMin; - } - - public void setSyncTaskStartTimeMin(int syncTaskStartTimeMin) { - this.syncTaskStartTimeMin = syncTaskStartTimeMin; - } - - public int getSyncTaskStartTimeSec() { - return syncTaskStartTimeSec; - } - - public void setSyncTaskStartTimeSec(int syncTaskStartTimeSec) { - this.syncTaskStartTimeSec = syncTaskStartTimeSec; - } - - public String getSyncTaskStartTimeTimeZone() { - return syncTaskStartTimeTimeZone; - } - - public void setSyncTaskStartTimeTimeZone(String syncTaskStartTimeTimeZone) { - this.syncTaskStartTimeTimeZone = syncTaskStartTimeTimeZone; - } - - public int getSyncTaskFrequencyInMs() { - return syncTaskFrequencyInMs; - } - - public void setSyncTaskFrequencyInMs(int syncTaskFrequencyInMs) { - this.syncTaskFrequencyInMs = syncTaskFrequencyInMs; - } - - public long getResolverProgressLogFrequencyInMs() { - return resolverProgressLogFrequencyInMs; - } - - public void setResolverProgressLogFrequencyInMs(long resolverProgressLogFrequencyInMs) { - this.resolverProgressLogFrequencyInMs = resolverProgressLogFrequencyInMs; - } - - public long getResolverQueueMonitorFrequencyInMs() { - return resolverQueueMonitorFrequencyInMs; - } - - public void setResolverQueueMonitorFrequencyInMs(long resolverQueueMonitorFrequencyInMs) { - this.resolverQueueMonitorFrequencyInMs = resolverQueueMonitorFrequencyInMs; - } - - public boolean isIndexIntegrityValidatorEnabled() { - return indexIntegrityValidatorEnabled; - } - - public void setIndexIntegrityValidatorEnabled(boolean indexIntegrityValidatorEnabled) { - this.indexIntegrityValidatorEnabled = indexIntegrityValidatorEnabled; - } - - public long getIndexIntegrityValidatorFrequencyInMs() { - return indexIntegrityValidatorFrequencyInMs; - } - - public void setIndexIntegrityValidatorFrequencyInMs(long indexIntegrityValidatorFrequencyInMs) { - this.indexIntegrityValidatorFrequencyInMs = indexIntegrityValidatorFrequencyInMs; - } - - public boolean isConfigOkForStartupSync() { - return configOkForStartupSync; - } - - public void setConfigOkForStartupSync(boolean configOkForStartupSync) { - this.configOkForStartupSync = configOkForStartupSync; - } - - public boolean isConfigOkForPeriodicSync() { - return configOkForPeriodicSync; - } - - public void setConfigOkForPeriodicSync(boolean configOkForPeriodicSync) { - this.configOkForPeriodicSync = configOkForPeriodicSync; - } - - public boolean isConfigDisabledForInitialSync() { - return syncTaskInitialDelayInMs == SynchronizerConstants.DELAY_NO_STARTUP_SYNC_IN_MS; - } - - public boolean isAutosuggestSynchronizationEnabled() { - return autosuggestSynchronizationEnabled; - } - - public void setAutosuggestSynchronizationEnabled(boolean autosuggestSynchronizationEnabled) { - this.autosuggestSynchronizationEnabled = autosuggestSynchronizationEnabled; - } - - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "SynchronizerConfiguration [syncTaskInitialDelayInMs=" + syncTaskInitialDelayInMs - + ", syncTaskFrequencyInMs=" + syncTaskFrequencyInMs + ", scrollContextTimeToLiveInMinutes=" - + scrollContextTimeToLiveInMinutes + ", numScrollContextItemsToRetrievePerRequest=" - + numScrollContextItemsToRetrievePerRequest + ", resolverProgressLogFrequencyInMs=" - + resolverProgressLogFrequencyInMs + ", resolverQueueMonitorFrequencyInMs=" - + resolverQueueMonitorFrequencyInMs + ", indexIntegrityValidatorEnabled=" - + indexIntegrityValidatorEnabled + ", indexIntegrityValidatorFrequencyInMs=" - + indexIntegrityValidatorFrequencyInMs + ", ssyncTaskFrequencyInDay=" - + syncTaskFrequencyInDay + ", syncTaskStartTime=" + syncTaskStartTime - + ", syncTaskStartTimeHr=" + syncTaskStartTimeHr + ", syncTaskStartTimeMin=" - + syncTaskStartTimeMin + ", syncTaskStartTimeSec=" + syncTaskStartTimeSec - + ", syncTaskStartTimeTimeZone=" + syncTaskStartTimeTimeZone - + ", displayVerboseQueueManagerStats=" + displayVerboseQueueManagerStats - + ", resourceNotFoundErrorsSupressed=" + resourceNotFoundErrorsSupressed - + ", nodesOnlyModifierEnabled=" + nodesOnlyModifierEnabled + ", configOKForStartupSync=" - + configOkForStartupSync + ", configOKForPeriodicSync=" + configOkForPeriodicSync - + ", autosuggestSynchronizationEnabled=" + autosuggestSynchronizationEnabled + "]"; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConstants.java b/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConstants.java deleted file mode 100644 index 057b91b..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConstants.java +++ /dev/null @@ -1,60 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.config; - -import java.util.Date; - -/** - * The Class SynchronizerConstants. - */ -public final class SynchronizerConstants { - // Error values for invalid user input - public static final int DEFAULT_CONFIG_ERROR_INT_VALUE = Integer.MAX_VALUE; - public static final Date DEFAULT_CONFIG_ERROR_DATE_VALUE = new Date(Long.MAX_VALUE); - - // constants for scheduling synchronizer - public static final int COMPONENTS_IN_TIMESTAMP = 2; - public static final String DEFAULT_INITIAL_DELAY_IN_MS = "0"; - public static final String DEFAULT_TASK_FREQUENCY_IN_DAY = "0"; - public static final String DEFAULT_START_TIMESTAMP = "05:00:00 UTC"; - public static final long DELAY_NO_STARTUP_SYNC_IN_MS = 0; - public static final long DELAY_NO_PERIODIC_SYNC_IN_MS = 0; - public static final int IDX_TIME_IN_TIMESTAMP = 0; - public static final int IDX_TIMEZONE_IN_TIMESTAMP = 1; - public static final long MILLISEC_IN_A_MIN = 60000; - public static final long MILLISEC_IN_A_DAY = 24 * 60 * 60 * 1000; - public static final String TIME_STD = "GMT"; - public static final String TIME_CONFIG_STD = "UTC"; - public static final String TIMESTAMP24HOURS_PATTERN = - "([01]?[0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9] UTC[+|-][0-5][0-9]:[0-5][0-9]"; - - - - public static final String DEFAULT_SCROLL_CTX_TIME_TO_LIVE_IN_MIN = "5"; - public static final String DEFAULT_NUM_SCROLL_CTX_ITEMS_TO_RETRIEVE_PER_REQ = "5000"; - - /** - * Instantiates a new synchronizer constants. - */ - private SynchronizerConstants() {} -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/config/TaskProcessorConfig.java b/src/main/java/org/openecomp/sparky/synchronizer/config/TaskProcessorConfig.java deleted file mode 100644 index 970c463..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/config/TaskProcessorConfig.java +++ /dev/null @@ -1,325 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.config; - -import java.util.Properties; - -/** - * TODO: Fill in description. - * - * @author davea. - */ -public class TaskProcessorConfig { - /** - * Initialize from properties. - * - * @param props the props - */ - public void initializeFromProperties(Properties props) { - - if (props == null) { - return; - } - - maxConcurrentWorkers = Integer.parseInt(props.getProperty("maxConcurrentWorkers")); - transactionRateControllerEnabled = - Boolean.parseBoolean(props.getProperty("transactionRateControllerEnabled")); - numSamplesPerThreadForRunningAverage = - Integer.parseInt(props.getProperty("numSamplesPerThreadForRunningAverage")); - targetTps = Double.parseDouble(props.getProperty("targetTPS")); - bytesHistogramLabel = props.getProperty("bytesHistogramLabel"); - bytesHistogramMaxYAxis = Long.parseLong(props.getProperty("bytesHistogramMaxYAxis")); - bytesHistogramNumBins = Integer.parseInt(props.getProperty("bytesHistogramNumBins")); - bytesHistogramNumDecimalPoints = - Integer.parseInt(props.getProperty("bytesHistogramNumDecimalPoints")); - queueLengthHistogramLabel = props.getProperty("queueLengthHistogramLabel"); - queueLengthHistogramMaxYAxis = - Long.parseLong(props.getProperty("queueLengthHistogramMaxYAxis")); - queueLengthHistogramNumBins = - Integer.parseInt(props.getProperty("queueLengthHistogramNumBins")); - queueLengthHistogramNumDecimalPoints = - Integer.parseInt(props.getProperty("queueLengthHistogramNumDecimalPoints")); - - taskAgeHistogramLabel = props.getProperty("taskAgeHistogramLabel"); - taskAgeHistogramMaxYAxis = Long.parseLong(props.getProperty("taskAgeHistogramMaxYAxis")); - taskAgeHistogramNumBins = Integer.parseInt(props.getProperty("taskAgeHistogramNumBins")); - taskAgeHistogramNumDecimalPoints = - Integer.parseInt(props.getProperty("taskAgeHistogramNumDecimalPoints")); - - responseTimeHistogramLabel = props.getProperty("responseTimeHistogramLabel"); - responseTimeHistogramMaxYAxis = - Long.parseLong(props.getProperty("responseTimeHistogramMaxYAxis")); - responseTimeHistogramNumBins = - Integer.parseInt(props.getProperty("responseTimeHistogramNumBins")); - responseTimeHistogramNumDecimalPoints = - Integer.parseInt(props.getProperty("responseTimeHistogramNumDecimalPoints")); - - tpsHistogramLabel = props.getProperty("tpsHistogramLabel"); - tpsHistogramMaxYAxis = Long.parseLong(props.getProperty("tpsHistogramMaxYAxis")); - tpsHistogramNumBins = Integer.parseInt(props.getProperty("tpsHistogramNumBins")); - tpsHistogramNumDecimalPoints = - Integer.parseInt(props.getProperty("tpsHistogramNumDecimalPoints")); - - } - - private int maxConcurrentWorkers; - - private boolean transactionRateControllerEnabled; - - private int numSamplesPerThreadForRunningAverage; - - private double targetTps; - - private String bytesHistogramLabel; - - private long bytesHistogramMaxYAxis; - - private int bytesHistogramNumBins; - - private int bytesHistogramNumDecimalPoints; - - private String queueLengthHistogramLabel; - - private long queueLengthHistogramMaxYAxis; - - private int queueLengthHistogramNumBins; - - private int queueLengthHistogramNumDecimalPoints; - - private String taskAgeHistogramLabel; - - private long taskAgeHistogramMaxYAxis; - - private int taskAgeHistogramNumBins; - - private int taskAgeHistogramNumDecimalPoints; - - private String responseTimeHistogramLabel; - - private long responseTimeHistogramMaxYAxis; - - private int responseTimeHistogramNumBins; - - private int responseTimeHistogramNumDecimalPoints; - - private String tpsHistogramLabel; - - private long tpsHistogramMaxYAxis; - - private int tpsHistogramNumBins; - - private int tpsHistogramNumDecimalPoints; - - public String getBytesHistogramLabel() { - return bytesHistogramLabel; - } - - public void setBytesHistogramLabel(String bytesHistogramLabel) { - this.bytesHistogramLabel = bytesHistogramLabel; - } - - public long getBytesHistogramMaxYAxis() { - return bytesHistogramMaxYAxis; - } - - public void setBytesHistogramMaxYAxis(long bytesHistogramMaxYAxis) { - this.bytesHistogramMaxYAxis = bytesHistogramMaxYAxis; - } - - public int getBytesHistogramNumBins() { - return bytesHistogramNumBins; - } - - public void setBytesHistogramNumBins(int bytesHistogramNumBins) { - this.bytesHistogramNumBins = bytesHistogramNumBins; - } - - public int getBytesHistogramNumDecimalPoints() { - return bytesHistogramNumDecimalPoints; - } - - public void setBytesHistogramNumDecimalPoints(int bytesHistogramNumDecimalPoints) { - this.bytesHistogramNumDecimalPoints = bytesHistogramNumDecimalPoints; - } - - public String getQueueLengthHistogramLabel() { - return queueLengthHistogramLabel; - } - - public void setQueueLengthHistogramLabel(String queueLengthHistogramLabel) { - this.queueLengthHistogramLabel = queueLengthHistogramLabel; - } - - public long getQueueLengthHistogramMaxYAxis() { - return queueLengthHistogramMaxYAxis; - } - - public void setQueueLengthHistogramMaxYAxis(long queueLengthHistogramMaxYAxis) { - this.queueLengthHistogramMaxYAxis = queueLengthHistogramMaxYAxis; - } - - public int getQueueLengthHistogramNumBins() { - return queueLengthHistogramNumBins; - } - - public void setQueueLengthHistogramNumBins(int queueLengthHistogramNumBins) { - this.queueLengthHistogramNumBins = queueLengthHistogramNumBins; - } - - public int getQueueLengthHistogramNumDecimalPoints() { - return queueLengthHistogramNumDecimalPoints; - } - - public void setQueueLengthHistogramNumDecimalPoints(int queueLengthHistogramNumDecimalPoints) { - this.queueLengthHistogramNumDecimalPoints = queueLengthHistogramNumDecimalPoints; - } - - public boolean isTransactionRateControllerEnabled() { - return transactionRateControllerEnabled; - } - - public void setTransactionRateControllerEnabled(boolean transactionRateControllerEnabled) { - this.transactionRateControllerEnabled = transactionRateControllerEnabled; - } - - public int getNumSamplesPerThreadForRunningAverage() { - return numSamplesPerThreadForRunningAverage; - } - - public void setNumSamplesPerThreadForRunningAverage(int numSamplesPerThreadForRunningAverage) { - this.numSamplesPerThreadForRunningAverage = numSamplesPerThreadForRunningAverage; - } - - public double getTargetTps() { - return targetTps; - } - - public void setTargetTps(double targetTps) { - this.targetTps = targetTps; - } - - public int getMaxConcurrentWorkers() { - return maxConcurrentWorkers; - } - - public void setMaxConcurrentWorkers(int maxConcurrentWorkers) { - this.maxConcurrentWorkers = maxConcurrentWorkers; - } - - public String getTaskAgeHistogramLabel() { - return taskAgeHistogramLabel; - } - - public void setTaskAgeHistogramLabel(String taskAgeHistogramLabel) { - this.taskAgeHistogramLabel = taskAgeHistogramLabel; - } - - public long getTaskAgeHistogramMaxYAxis() { - return taskAgeHistogramMaxYAxis; - } - - public void setTaskAgeHistogramMaxYAxis(long taskAgeHistogramMaxYAxis) { - this.taskAgeHistogramMaxYAxis = taskAgeHistogramMaxYAxis; - } - - public int getTaskAgeHistogramNumBins() { - return taskAgeHistogramNumBins; - } - - public void setTaskAgeHistogramNumBins(int taskAgeHistogramNumBins) { - this.taskAgeHistogramNumBins = taskAgeHistogramNumBins; - } - - public int getTaskAgeHistogramNumDecimalPoints() { - return taskAgeHistogramNumDecimalPoints; - } - - public void setTaskAgeHistogramNumDecimalPoints(int taskAgeHistogramNumDecimalPoints) { - this.taskAgeHistogramNumDecimalPoints = taskAgeHistogramNumDecimalPoints; - } - - public String getResponseTimeHistogramLabel() { - return responseTimeHistogramLabel; - } - - public void setResponseTimeHistogramLabel(String responseTimeHistogramLabel) { - this.responseTimeHistogramLabel = responseTimeHistogramLabel; - } - - public long getResponseTimeHistogramMaxYAxis() { - return responseTimeHistogramMaxYAxis; - } - - public void setResponseTimeHistogramMaxYAxis(long responseTimeHistogramMaxYAxis) { - this.responseTimeHistogramMaxYAxis = responseTimeHistogramMaxYAxis; - } - - public int getResponseTimeHistogramNumBins() { - return responseTimeHistogramNumBins; - } - - public void setResponseTimeHistogramNumBins(int responseTimeHistogramNumBins) { - this.responseTimeHistogramNumBins = responseTimeHistogramNumBins; - } - - public int getResponseTimeHistogramNumDecimalPoints() { - return responseTimeHistogramNumDecimalPoints; - } - - public void setResponseTimeHistogramNumDecimalPoints(int responseTimeHistogramNumDecimalPoints) { - this.responseTimeHistogramNumDecimalPoints = responseTimeHistogramNumDecimalPoints; - } - - public String getTpsHistogramLabel() { - return tpsHistogramLabel; - } - - public void setTpsHistogramLabel(String tpsHistogramLabel) { - this.tpsHistogramLabel = tpsHistogramLabel; - } - - public long getTpsHistogramMaxYAxis() { - return tpsHistogramMaxYAxis; - } - - public void setTpsHistogramMaxYAxis(long tpsHistogramMaxYAxis) { - this.tpsHistogramMaxYAxis = tpsHistogramMaxYAxis; - } - - public int getTpsHistogramNumBins() { - return tpsHistogramNumBins; - } - - public void setTpsHistogramNumBins(int tpsHistogramNumBins) { - this.tpsHistogramNumBins = tpsHistogramNumBins; - } - - public int getTpsHistogramNumDecimalPoints() { - return tpsHistogramNumDecimalPoints; - } - - public void setTpsHistogramNumDecimalPoints(int tpsHistogramNumDecimalPoints) { - this.tpsHistogramNumDecimalPoints = tpsHistogramNumDecimalPoints; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationEntity.java deleted file mode 100644 index 8e2f725..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationEntity.java +++ /dev/null @@ -1,113 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -import java.util.HashMap; -import java.util.Map; - -import org.openecomp.sparky.config.oxm.OxmModelLoader; -import org.openecomp.sparky.util.NodeUtils; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - -/** - * The Class AggregationEntity. - */ -public class AggregationEntity extends IndexableEntity implements IndexDocument { - private Map<String, String> attributes = new HashMap<String, String>(); - protected ObjectMapper mapper = new ObjectMapper(); - - /** - * Instantiates a new aggregation entity. - */ - public AggregationEntity() { - super(); - } - - /** - * Instantiates a new aggregation entity. - * - * @param loader the loader - */ - public AggregationEntity(OxmModelLoader loader) { - super(loader); - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() - */ - @Override - public void deriveFields() { - - /* - * We'll try and create a unique identity key that we can use for differencing the previously - * imported record sets as we won't have granular control of what is created/removed and when. - * The best we can hope for is identification of resources by generated Id until the - * Identity-Service UUID is tagged against all resources, then we can use that instead. - */ - this.id = - NodeUtils.generateUniqueShaDigest(link); - } - - public void copyAttributeKeyValuePair(Map<String, Object> map){ - for(String key: map.keySet()){ - if (!key.equalsIgnoreCase("relationship-list")){ // ignore relationship data which is not required in aggregation - this.attributes.put(key, map.get(key).toString()); // not sure if entity attribute can contain an object as value - } - } - } - - public void addAttributeKeyValuePair(String key, String value){ - this.attributes.put(key, value); - } - - @Override - public String getIndexDocumentJson() { - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("link", this.getLink()); - rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); - for (String key: this.attributes.keySet()){ - rootNode.put(key, this.attributes.get(key)); - } - return rootNode.toString(); - } - - @Override - public ObjectNode getBulkImportEntity() { - // TODO Auto-generated method stub - return null; - } - - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "") - + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " - : "") - + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") - + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") + "]"; - } -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationSuggestionEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationSuggestionEntity.java deleted file mode 100644 index 9f6a2c3..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationSuggestionEntity.java +++ /dev/null @@ -1,83 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - -import java.util.ArrayList; -import java.util.List; - -import org.json.JSONArray; -import org.json.JSONObject; -import org.openecomp.sparky.util.NodeUtils; - -public class AggregationSuggestionEntity extends IndexableEntity implements IndexDocument { - - private List<String> inputs = new ArrayList<String>(); - private final String outputString = "VNFs"; - protected ObjectMapper mapper = new ObjectMapper(); - - public AggregationSuggestionEntity() { - super(); - inputs.add("VNFs"); - inputs.add("generic-vnfs"); - } - - @Override - public void deriveFields() { - this.id = NodeUtils.generateUniqueShaDigest(this.outputString); - } - - @Override - public String getIndexDocumentJson() { - - JSONArray inputArray = new JSONArray(); - for (String input: inputs) { - input = input.replace(",","" ); - input = input.replace("[","" ); - input = input.replace("]","" ); - inputArray.put(input); - } - - JSONObject entitySuggest = new JSONObject(); - entitySuggest.put("input", inputArray); - entitySuggest.put("output", this.outputString); - entitySuggest.put("weight", 100); - - JSONObject payloadNode = new JSONObject(); - entitySuggest.put("payload", payloadNode); - - JSONObject rootNode = new JSONObject(); - rootNode.put("entity_suggest", entitySuggest); - - return rootNode.toString(); - } - - @Override - public ObjectNode getBulkImportEntity() { - // TODO Auto-generated method stub - return null; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexDocument.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexDocument.java deleted file mode 100644 index f1de89d..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexDocument.java +++ /dev/null @@ -1,42 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -import com.fasterxml.jackson.databind.node.ObjectNode; - -/** - * The Interface IndexDocument. - */ -public interface IndexDocument { - - /** - * Derive fields. - */ - public void deriveFields(); - - public String getIndexDocumentJson(); - - public String getId(); - - public ObjectNode getBulkImportEntity(); -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableCrossEntityReference.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableCrossEntityReference.java deleted file mode 100644 index 76ef5c0..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableCrossEntityReference.java +++ /dev/null @@ -1,116 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -import java.util.ArrayList; - -import org.openecomp.sparky.config.oxm.OxmModelLoader; -import org.openecomp.sparky.util.NodeUtils; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - - -/** - * The Class IndexableCrossEntityReference. - */ - -public class IndexableCrossEntityReference extends IndexableEntity implements IndexDocument { - - protected String crossReferenceEntityValues; - protected ArrayList<String> crossEntityReferenceCollection = new ArrayList<String>(); - protected ObjectMapper mapper = new ObjectMapper(); - - /** - * Instantiates a new indexable cross entity reference. - */ - public IndexableCrossEntityReference() { - super(); - } - - /** - * Instantiates a new indexable cross entity reference. - * - * @param loader the loader - */ - public IndexableCrossEntityReference(OxmModelLoader loader) { - super(loader); - } - - /** - * Adds the cross entity reference value. - * - * @param crossEntityReferenceValue the cross entity reference value - */ - public void addCrossEntityReferenceValue(String crossEntityReferenceValue) { - if (!crossEntityReferenceCollection.contains(crossEntityReferenceValue)) { - crossEntityReferenceCollection.add(crossEntityReferenceValue); - } - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() - */ - @Override - public void deriveFields() { - this.id = NodeUtils.generateUniqueShaDigest(link); - this.crossReferenceEntityValues = NodeUtils.concatArray(crossEntityReferenceCollection, ";"); - } - - @Override - public String getIndexDocumentJson() { - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("entityType", this.getEntityType()); - rootNode.put("entityPrimaryKeyValue", this.getEntityPrimaryKeyValue()); - rootNode.put("crossEntityReferenceValues", crossReferenceEntityValues); - rootNode.put("link", link); - rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); - return rootNode.toString(); - } - - @Override - public ObjectNode getBulkImportEntity() { - // TODO Auto-generated method stub - return null; - } - - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "IndexableCrossEntityReference [" - + (crossReferenceEntityValues != null - ? "crossReferenceEntityValues=" + crossReferenceEntityValues + ", " : "") - + (crossEntityReferenceCollection != null - ? "crossEntityReferenceCollection=" + crossEntityReferenceCollection + ", " : "") - + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") - + (entityType != null ? "entityType=" + entityType + ", " : "") - + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " - : "") - + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") - + (link != null ? "link=" + link + ", " : "") + (loader != null ? "loader=" + loader : "") - + "]"; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableEntity.java deleted file mode 100644 index 4999b3e..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableEntity.java +++ /dev/null @@ -1,103 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -import java.sql.Timestamp; -import java.text.SimpleDateFormat; - -import org.openecomp.sparky.config.oxm.OxmModelLoader; - -/** - * The Class IndexableEntity. - */ -public abstract class IndexableEntity { - protected String id; // generated, SHA-256 digest - protected String entityType; - protected String entityPrimaryKeyValue; - protected String lastmodTimestamp; - protected String link; - protected OxmModelLoader loader; - - private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; - - /** - * Instantiates a new indexable entity. - */ - public IndexableEntity() { - SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT); - Timestamp timestamp = new Timestamp(System.currentTimeMillis()); - String currentFormattedTimeStamp = dateFormat.format(timestamp); - this.setEntityTimeStamp(currentFormattedTimeStamp); - } - - /** - * Instantiates a new indexable entity. - * - * @param loader the loader - */ - public IndexableEntity(OxmModelLoader loader) { - this(); - this.loader = loader; - } - - public String getId() { - return id; - } - - public String getEntityType() { - return entityType; - } - - public String getEntityPrimaryKeyValue() { - return entityPrimaryKeyValue; - } - - public String getEntityTimeStamp() { - return lastmodTimestamp; - } - - public void setId(String id) { - this.id = id; - } - - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - public void setEntityPrimaryKeyValue(String fieldValue) { - this.entityPrimaryKeyValue = fieldValue; - } - - public void setEntityTimeStamp(String lastmodTimestamp) { - this.lastmodTimestamp = lastmodTimestamp; - } - - public String getLink() { - return link; - } - - public void setLink(String link) { - this.link = link; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/MergableEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/MergableEntity.java deleted file mode 100644 index e40383a..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/MergableEntity.java +++ /dev/null @@ -1,57 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; - -import java.util.HashMap; -import java.util.Map; - -/** - * The Class MergableEntity. - */ -public class MergableEntity { - private Map<String, String> other = new HashMap<String, String>(); - - /** - * Any. - * - * @return the map - */ - @JsonAnyGetter - public Map<String, String> any() { - return other; - } - - /** - * Sets the. - * - * @param name the name - * @param value the value - */ - @JsonAnySetter - public void set(String name, String value) { - other.put(name, value); - } -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/ObjectIdCollection.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/ObjectIdCollection.java deleted file mode 100644 index ee79eb8..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/ObjectIdCollection.java +++ /dev/null @@ -1,76 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -import java.util.Collection; -import java.util.List; -import java.util.concurrent.ConcurrentHashMap; - -/** - * The Class ObjectIdCollection. - */ -public class ObjectIdCollection { - - protected ConcurrentHashMap<String, String> importedObjectIds = - new ConcurrentHashMap<String, String>(); - - public Collection<String> getImportedObjectIds() { - return importedObjectIds.values(); - } - - /** - * Adds the object id. - * - * @param id the id - */ - public void addObjectId(String id) { - importedObjectIds.putIfAbsent(id, id); - } - - public int getSize() { - return importedObjectIds.values().size(); - } - - /** - * Adds the all. - * - * @param items the items - */ - public void addAll(List<String> items) { - if (items == null) { - return; - } - - items.stream().forEach((item) -> { - importedObjectIds.putIfAbsent(item, item); - }); - - } - - /** - * Clear. - */ - public void clear() { - importedObjectIds.clear(); - } -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/SearchableEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/SearchableEntity.java deleted file mode 100644 index d80ced2..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/SearchableEntity.java +++ /dev/null @@ -1,149 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - -import java.util.ArrayList; -import java.util.List; - -import org.openecomp.sparky.config.oxm.OxmModelLoader; -import org.openecomp.sparky.util.NodeUtils; - -/** - * The Class SearchableEntity. - */ -public class SearchableEntity extends IndexableEntity implements IndexDocument { - protected List<String> searchTagCollection = new ArrayList<String>(); - protected List<String> searchTagIdCollection = new ArrayList<String>(); - protected ObjectMapper mapper = new ObjectMapper(); - - /** - * Instantiates a new searchable entity. - */ - public SearchableEntity() { - super(); - } - - /** - * Instantiates a new searchable entity. - * - * @param loader the loader - */ - public SearchableEntity(OxmModelLoader loader) { - super(loader); - } - - /* - * Generated fields, leave the settings for junit overrides - */ - protected String searchTags; // generated based on searchTagCollection values - protected String searchTagIDs; - - /** - * Generates the sha based id. - */ - public void generateId() { - this.id = NodeUtils.generateUniqueShaDigest(link); - } - - /* (non-Javadoc) - * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() - */ - @Override - public void deriveFields() { - - /* - * We'll try and create a unique identity key that we can use for differencing the previously - * imported record sets as we won't have granular control of what is created/removed and when. - * The best we can hope for is identification of resources by generated Id until the - * Identity-Service UUID is tagged against all resources, then we can use that instead. - */ - generateId(); - this.searchTags = NodeUtils.concatArray(searchTagCollection, ";"); - this.searchTagIDs = NodeUtils.concatArray(this.searchTagIdCollection, ";"); - } - - /** - * Adds the search tag with key. - * - * @param searchTag the search tag - * @param searchTagKey the key associated with the search tag (key:value) - */ - public void addSearchTagWithKey(String searchTag, String searchTagKey) { - searchTagIdCollection.add(searchTagKey); - searchTagCollection.add(searchTag); - } - - public List<String> getSearchTagCollection() { - return searchTagCollection; - } - - public String getSearchTags() { - return searchTags; - } - - public String getSearchTagIDs() { - return searchTagIDs; - } - - public List<String> getSearchTagIdCollection() { - return searchTagIdCollection; - } - - @Override - public String getIndexDocumentJson() { - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("entityType", this.getEntityType()); - rootNode.put("entityPrimaryKeyValue", this.getEntityPrimaryKeyValue()); - rootNode.put("searchTagIDs", this.getSearchTagIDs()); - rootNode.put("searchTags", this.getSearchTags()); - rootNode.put("link", this.getLink()); - rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); - return rootNode.toString(); - } - - @Override - public ObjectNode getBulkImportEntity() { - // TODO Auto-generated method stub - return null; - } - - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "") - + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " - : "") - + (searchTagCollection != null ? "searchTagCollection=" + searchTagCollection + ", " : "") - + (searchTagIdCollection != null ? "searchTagIDCollection=" + searchTagIdCollection + ", " - : "") - + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") - + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") - + (searchTags != null ? "searchTags=" + searchTags + ", " : "") - + (searchTagIDs != null ? "searchTagIDs=" + searchTagIDs : "") + "]"; - } -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/SelfLinkDescriptor.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/SelfLinkDescriptor.java deleted file mode 100644 index a2a2ba3..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/SelfLinkDescriptor.java +++ /dev/null @@ -1,88 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -/** - * The Class SelfLinkDescriptor. - */ -public class SelfLinkDescriptor { - private String selfLink; - private String entityType; - private String depthModifier; - - public String getDepthModifier() { - return depthModifier; - } - - public void setDepthModifier(String depthModifier) { - this.depthModifier = depthModifier; - } - - public String getSelfLink() { - return selfLink; - } - - public void setSelfLink(String selfLink) { - this.selfLink = selfLink; - } - - public String getEntityType() { - return entityType; - } - - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - public SelfLinkDescriptor(String selfLink) { - this(selfLink, null, null); - } - - /** - * Instantiates a new self link descriptor. - * - * @param selfLink the self link - * @param entityType the entity type - */ - public SelfLinkDescriptor(String selfLink, String entityType) { - this(selfLink, null, entityType); - } - - public SelfLinkDescriptor(String selfLink, String depthModifier, String entityType) { - this.selfLink = selfLink; - this.entityType = entityType; - this.depthModifier = depthModifier; - } - - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "SelfLinkDescriptor [" + (selfLink != null ? "selfLink=" + selfLink + ", " : "") - + (entityType != null ? "entityType=" + entityType + ", " : "") - + (depthModifier != null ? "depthModifier=" + depthModifier : "") + "]"; - } - -} - diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/SuggestionSearchEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/SuggestionSearchEntity.java deleted file mode 100644 index 0f06322..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/SuggestionSearchEntity.java +++ /dev/null @@ -1,277 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.json.JSONArray; -import org.json.JSONObject; -import org.openecomp.sparky.config.oxm.OxmModelLoader; -import org.openecomp.sparky.util.NodeUtils; - -public class SuggestionSearchEntity extends IndexableEntity implements IndexDocument { - - private String entityType; - private List<String> suggestionConnectorWords = new ArrayList<String>(); - private List<String> suggestionAttributeTypes = new ArrayList<String>(); - private List<String> suggestionAttributeValues = new ArrayList<String>(); - private List<String> suggestionTypeAliases = new ArrayList<String>(); - private List<String> suggestionInputPermutations = new ArrayList<String>(); - private List<String> suggestableAttr = new ArrayList<String>(); - private Map<String, String> payload = new HashMap<String, String>(); - private JSONObject payloadJsonNode = new JSONObject(); - private StringBuffer outputString = new StringBuffer(); - private String aliasToUse; - - public Map<String, String> getPayload() { - return payload; - } - - public void setPayload(Map<String, String> payload) { - this.payload = payload; - } - - - public JSONObject getPayloadJsonNode() { - return payloadJsonNode; - } - - public void setPayloadJsonNode(JSONObject payloadJsonNode) { - this.payloadJsonNode = payloadJsonNode; - } - - - protected ObjectMapper mapper = new ObjectMapper(); - - public SuggestionSearchEntity() { - super(); - } - - public void setSuggestableAttr(ArrayList<String> attributes) { - for (String attribute : attributes) { - this.suggestableAttr.add(attribute); - } - } - - public void setPayloadFromResponse(JsonNode node) { - Map<String, String> nodePayload = new HashMap<String, String>(); - if (suggestableAttr != null) { - for (String attribute : suggestableAttr) { - if (node.get(attribute) != null) { - nodePayload.put(attribute, node.get(attribute).asText()); - } - } - this.setPayload(nodePayload); - } - } - - - public SuggestionSearchEntity(OxmModelLoader loader) { - super(loader); - } - - @Override - public String getEntityType() { - return entityType; - } - - @Override - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - public List<String> getSuggestionConnectorWords() { - return suggestionConnectorWords; - } - - public void setSuggestionConnectorWords(List<String> suggestionConnectorWords) { - this.suggestionConnectorWords = suggestionConnectorWords; - } - - public List<String> getSuggestionPropertyTypes() { - return this.suggestionAttributeTypes; - } - - public void setSuggestionPropertyTypes(List<String> suggestionPropertyTypes) { - this.suggestionAttributeTypes = suggestionPropertyTypes; - } - - public List<String> getSuggestionAttributeValues() { - return this.suggestionAttributeValues; - } - - public void setSuggestionAttributeValues(List<String> suggestionAttributeValues) { - this.suggestionAttributeValues = suggestionAttributeValues; - } - - public List<String> getSuggestionAliases() { - return this.suggestionTypeAliases; - } - - public void setSuggestionAliases(List<String> suggestionAliases) { - this.suggestionTypeAliases = suggestionAliases; - } - - public List<String> getSuggestionInputPermutations() { - return this.suggestionInputPermutations; - } - - public void setSuggestionInputPermutations(List<String> permutations) { - this.suggestionInputPermutations = permutations; - } - - public void generateSuggestionInputPermutations() { - - - List<String> entityNames = new ArrayList<>(); - entityNames.add(entityType); - HashMap<String, String> desc = loader.getOxmModel().get(this.entityType); - String attr = desc.get("suggestionAliases"); - String[] suggestionAliasesArray = attr.split(","); - suggestionTypeAliases = Arrays.asList(suggestionAliasesArray); - this.setAliasToUse(suggestionAliasesArray[suggestionAliasesArray.length - 1]); - for (String alias : suggestionTypeAliases) { - entityNames.add(alias); - } - ArrayList<String> listOfSearchSuggestionPermutations = new ArrayList<>(); - - ArrayList<String> listToPermutate = new ArrayList<>(payload.values()); - - for (String entityName : entityNames) { - listToPermutate.add(entityName); - permutateList(listToPermutate, new ArrayList<String>(), listToPermutate.size(), - listOfSearchSuggestionPermutations); - listToPermutate.remove(entityName); - } - suggestionInputPermutations = listOfSearchSuggestionPermutations; - } - - /** - * Generate all permutations of a list of Strings - * - * @param list - * @param permutation - * @param size - */ - private void permutateList(List<String> list, List<String> permutation, int size, - List<String> listOfSearchSuggestionPermutationList) { - if (permutation.size() == size) { - StringBuilder newPermutation = new StringBuilder(); - - for (int i = 0; i < permutation.size(); i++) { - newPermutation.append(permutation.get(i)).append(" "); - } - - listOfSearchSuggestionPermutationList.add(newPermutation.toString().trim()); - - return; - } - - String[] availableItems = list.toArray(new String[0]); - - for (String i : availableItems) { - permutation.add(i); - list.remove(i); - permutateList(list, permutation, size, listOfSearchSuggestionPermutationList); - list.add(i); - permutation.remove(i); - } - } - - public boolean isSuggestableDoc() { - return this.getPayload().size() != 0; - } - - - @Override - public void deriveFields() { - - int payloadEntryCounter = 1; - for (Map.Entry<String, String> payload : getPayload().entrySet()) { - // Add the payload(status) only if a valid value is present - if (payload.getValue() != null &&payload.getValue().length() > 0) { - this.getPayloadJsonNode().put(payload.getKey(), payload.getValue()); - this.outputString.append(payload.getValue()); - if (payloadEntryCounter < getPayload().entrySet().size()) { - this.outputString.append(" and "); - } else{ - this.outputString.append(" "); - } - } - payloadEntryCounter++; - } - - this.outputString.append(this.getAliasToUse()); - this.id = NodeUtils.generateUniqueShaDigest(outputString.toString()); - } - - @Override - public String getIndexDocumentJson() { - // TODO Auto-generated method stub - JSONObject rootNode = new JSONObject(); - - JSONArray suggestionsArray = new JSONArray(); - for (String suggestion : suggestionInputPermutations) { - suggestionsArray.put(suggestion); - } - - JSONObject entitySuggest = new JSONObject(); - - entitySuggest.put("input", suggestionsArray); - entitySuggest.put("output", this.outputString); - entitySuggest.put("payload", this.payloadJsonNode); - rootNode.put("entity_suggest", entitySuggest); - - return rootNode.toString(); - } - - @Override - public ObjectNode getBulkImportEntity() { - // TODO Auto-generated method stub - return null; - } - - public String getAliasToUse() { - return aliasToUse; - } - - public void setAliasToUse(String aliasToUse) { - this.aliasToUse = aliasToUse; - } - - @Override - public String toString() { - return "SuggestionSearchEntity [entityType=" + entityType + ", suggestionConnectorWords=" - + suggestionConnectorWords + ", suggestionAttributeTypes=" + suggestionAttributeTypes - + ", suggestionAttributeValues=" + suggestionAttributeValues + ", suggestionTypeAliases=" - + suggestionTypeAliases + ", mapper=" + mapper + "]"; - } -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/TransactionStorageType.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/TransactionStorageType.java deleted file mode 100644 index b7557f3..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/entity/TransactionStorageType.java +++ /dev/null @@ -1,54 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.entity; - -/** - * The Enum TransactionStorageType. - */ -public enum TransactionStorageType { - EDGE_TAG_QUERY(0, "aaiOffline/edge-tag-query"), ACTIVE_INVENTORY_QUERY(1, - "aaiOffline/active-inventory-query"); - - private Integer index; - private String outputFolder; - - /** - * Instantiates a new transaction storage type. - * - * @param index the index - * @param outputFolder the output folder - */ - TransactionStorageType(Integer index, String outputFolder) { - this.index = index; - this.outputFolder = outputFolder; - } - - public Integer getIndex() { - return index; - } - - public String getOutputFolder() { - return outputFolder; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/enumeration/OperationState.java b/src/main/java/org/openecomp/sparky/synchronizer/enumeration/OperationState.java deleted file mode 100644 index f32cc06..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/enumeration/OperationState.java +++ /dev/null @@ -1,30 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.enumeration; - -/** - * The Enum OperationState. - */ -public enum OperationState { - INIT, OK, ERROR, ABORT, PENDING -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/enumeration/SynchronizerState.java b/src/main/java/org/openecomp/sparky/synchronizer/enumeration/SynchronizerState.java deleted file mode 100644 index 1791893..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/enumeration/SynchronizerState.java +++ /dev/null @@ -1,30 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.enumeration; - -/** - * The Enum SynchronizerState. - */ -public enum SynchronizerState { - IDLE, PERFORMING_SYNCHRONIZATION -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/filter/ElasticSearchSynchronizerFilter.java b/src/main/java/org/openecomp/sparky/synchronizer/filter/ElasticSearchSynchronizerFilter.java deleted file mode 100644 index 1214097..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/filter/ElasticSearchSynchronizerFilter.java +++ /dev/null @@ -1,108 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.filter; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; - -import org.openecomp.cl.api.Logger; -import org.openecomp.cl.eelf.LoggerFactory; -import org.openecomp.sparky.config.oxm.OxmModelLoader; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.openecomp.sparky.synchronizer.SyncHelper; -import org.openecomp.sparky.util.NodeUtils; - -import org.openecomp.cl.mdc.MdcContext; - -/* - * This is a wire-frame for an experiment to get the jetty filter-lifecyle initialization method to - * setup a scheduled thread executor with an ElasticSearchSynchronization task, which (I'm hoping) - * will allow us to do periodic ES <=> AAI synchronization. - * - * Alternatively, if the embedded java approach doesn't work we could try instead to do a - * System.exec( "perl refreshElasticSearchInstance.pl"). We have two options, I'm hoping the - * embedded options will work for us. - */ - -/** - * The Class ElasticSearchSynchronizerFilter. - */ -public class ElasticSearchSynchronizerFilter implements Filter { - - private static final Logger LOG = LoggerFactory.getInstance().getLogger(ElasticSearchSynchronizerFilter.class); - - private SyncHelper syncHelper; - - /* (non-Javadoc) - * @see javax.servlet.Filter#destroy() - */ - @Override - public void destroy() { - - if (syncHelper != null) { - syncHelper.shutdown(); - } - } - - /* (non-Javadoc) - * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain) - */ - @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) - throws IOException, ServletException { - - /* - * However, we will setup the filtermap with a url that should never get it, so we shouldn't - * ever be in here. - */ - - chain.doFilter(request, response); - } - - /* (non-Javadoc) - * @see javax.servlet.Filter#init(javax.servlet.FilterConfig) - */ - @Override - public void init(FilterConfig filterConfig) throws ServletException { - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "ElasticSearchSynchronizerFilter", "", "Init", ""); - - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "init()"); - - try { - new SyncHelper(OxmModelLoader.getInstance()); - } catch (Exception exc) { - throw new ServletException("Caught an exception while initializing filter", exc); - } - - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntitySelfLinkTask.java b/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntitySelfLinkTask.java deleted file mode 100644 index e2273cc..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntitySelfLinkTask.java +++ /dev/null @@ -1,74 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.task; - -import java.util.function.Supplier; - -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; -import org.openecomp.sparky.dal.rest.OperationResult; - -/** - * The Class CollectEntitySelfLinkTask. - */ -public class CollectEntitySelfLinkTask implements Supplier<NetworkTransaction> { - - private NetworkTransaction txn; - - private ActiveInventoryDataProvider provider; - - /** - * Instantiates a new collect entity self link task. - * - * @param txn the txn - * @param provider the provider - */ - public CollectEntitySelfLinkTask(NetworkTransaction txn, ActiveInventoryDataProvider provider) { - this.txn = txn; - this.provider = provider; - } - - /* (non-Javadoc) - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - - txn.setTaskAgeInMs(); - - long startTimeInMs = System.currentTimeMillis(); - OperationResult result = null; - try { - result = provider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); - } catch (Exception exc) { - result = new OperationResult(500, - "Caught an exception while trying to resolve link = " + exc.getMessage()); - } finally { - result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - txn.setOperationResult(result); - } - - return txn; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntityTypeSelfLinksTask.java b/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntityTypeSelfLinksTask.java deleted file mode 100644 index 98f4425..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntityTypeSelfLinksTask.java +++ /dev/null @@ -1,75 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.task; - -import java.util.function.Supplier; - -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; -import org.openecomp.sparky.dal.rest.OperationResult; - -/** - * The Class CollectEntityTypeSelfLinksTask. - */ -public class CollectEntityTypeSelfLinksTask implements Supplier<NetworkTransaction> { - - private ActiveInventoryDataProvider aaiProvider; - - private NetworkTransaction txn; - - /** - * Instantiates a new collect entity type self links task. - * - * @param txn the txn - * @param provider the provider - */ - public CollectEntityTypeSelfLinksTask(NetworkTransaction txn, - ActiveInventoryDataProvider provider) { - this.aaiProvider = provider; - this.txn = txn; - } - - /* (non-Javadoc) - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - - txn.setTaskAgeInMs(); - - long startTimeInMs = System.currentTimeMillis(); - OperationResult result = null; - try { - result = aaiProvider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); - } catch (Exception exc) { - result = new OperationResult(500, - "Caught an exception while trying to resolve link = " + exc.getMessage()); - } finally { - result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - txn.setOperationResult(result); - } - - return txn; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/GetCrossEntityReferenceEntityTask.java b/src/main/java/org/openecomp/sparky/synchronizer/task/GetCrossEntityReferenceEntityTask.java deleted file mode 100644 index aaad6c9..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/task/GetCrossEntityReferenceEntityTask.java +++ /dev/null @@ -1,75 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.task; - -import java.util.function.Supplier; - -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; -import org.openecomp.sparky.dal.rest.OperationResult; - -/** - * The Class GetCrossEntityReferenceEntityTask. - */ -public class GetCrossEntityReferenceEntityTask implements Supplier<NetworkTransaction> { - - private NetworkTransaction txn; - - private ActiveInventoryDataProvider provider; - - /** - * Instantiates a new gets the cross entity reference entity task. - * - * @param txn the txn - * @param provider the provider - */ - public GetCrossEntityReferenceEntityTask(NetworkTransaction txn, - ActiveInventoryDataProvider provider) { - this.txn = txn; - this.provider = provider; - } - - /* (non-Javadoc) - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - - txn.setTaskAgeInMs(); - - long startTimeInMs = System.currentTimeMillis(); - OperationResult result = null; - try { - result = provider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); - } catch (Exception exc) { - result = new OperationResult(500, - "Caught an exception while trying to resolve link = " + exc.getMessage()); - } finally { - result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - txn.setOperationResult(result); - } - - return txn; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformActiveInventoryRetrieval.java b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformActiveInventoryRetrieval.java deleted file mode 100644 index 920427e..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformActiveInventoryRetrieval.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.task; - -import java.util.Map; -import java.util.function.Supplier; - -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -/* - * Consider abstraction the tasks into common elemnts, because most of them repeat a generic call - * flow pattern - */ - -/** - * The Class PerformActiveInventoryRetrieval. - */ -public class PerformActiveInventoryRetrieval implements Supplier<NetworkTransaction> { - - private static Logger logger = LoggerFactory.getLogger(PerformActiveInventoryRetrieval.class); - - private NetworkTransaction txn; - private ActiveInventoryDataProvider aaiProvider; - private Map<String, String> contextMap; - - /** - * Instantiates a new perform active inventory retrieval. - * - * @param txn the txn - * @param aaiProvider the aai provider - */ - public PerformActiveInventoryRetrieval(NetworkTransaction txn, - ActiveInventoryDataProvider aaiProvider) { - this.txn = txn; - this.aaiProvider = aaiProvider; - this.contextMap = MDC.getCopyOfContextMap(); - } - - /* (non-Javadoc) - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - - txn.setTaskAgeInMs(); - - long startTimeInMs = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - OperationResult result = null; - try { - // todo: use proper config instead of hard-coding parameters - result = aaiProvider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); - } catch (Exception exc) { - logger.error("Failure to resolve self link from AAI. Error = ", exc); - result = new OperationResult(500, - "Caught an exception while trying to resolve link = " + exc.getMessage()); - } finally { - result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - txn.setOperationResult(result); - } - - return txn; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchPut.java b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchPut.java deleted file mode 100644 index f318c9d..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchPut.java +++ /dev/null @@ -1,82 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.task; - -import java.util.Map; -import java.util.function.Supplier; - -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.dal.rest.RestDataProvider; -import org.slf4j.MDC; - -/** - * The Class PerformElasticSearchPut. - */ -public class PerformElasticSearchPut implements Supplier<NetworkTransaction> { - - private RestDataProvider restDataProvider; - private String jsonPayload; - private NetworkTransaction txn; - private Map<String, String> contextMap; - - /** - * Instantiates a new perform elastic search put. - * - * @param jsonPayload the json payload - * @param txn the txn - * @param restDataProvider the rest data provider - */ - public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn, - RestDataProvider restDataProvider) { - this.jsonPayload = jsonPayload; - this.txn = txn; - this.restDataProvider = restDataProvider; - this.contextMap = MDC.getCopyOfContextMap(); - } - - public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn, - RestDataProvider restDataProvider, Map<String, String> contextMap) { - this.jsonPayload = jsonPayload; - this.txn = txn; - this.restDataProvider = restDataProvider; - this.contextMap = contextMap; - } - - /* (non-Javadoc) - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - txn.setTaskAgeInMs(); - long startTimeInMs = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - - OperationResult or = restDataProvider.doPut(txn.getLink(), jsonPayload, "application/json"); - - or.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - txn.setOperationResult(or); - - return txn; - } -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchRetrieval.java b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchRetrieval.java deleted file mode 100644 index 17652b4..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchRetrieval.java +++ /dev/null @@ -1,66 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.task; - -import java.util.Map; -import java.util.function.Supplier; - -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.dal.rest.RestDataProvider; -import org.slf4j.MDC; - -/** - * The Class PerformElasticSearchRetrieval. - */ -public class PerformElasticSearchRetrieval implements Supplier<NetworkTransaction> { - - private NetworkTransaction txn; - private RestDataProvider restDataProvider; - private Map<String, String> contextMap; - - /** - * Instantiates a new perform elastic search retrieval. - * - * @param elasticSearchTxn the elastic search txn - * @param restDataProvider the rest data provider - */ - public PerformElasticSearchRetrieval(NetworkTransaction elasticSearchTxn, - RestDataProvider restDataProvider) { - this.txn = elasticSearchTxn; - this.restDataProvider = restDataProvider; - this.contextMap = MDC.getCopyOfContextMap(); - } - - /* (non-Javadoc) - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - MDC.setContextMap(contextMap); - OperationResult or = restDataProvider.doGet(txn.getLink(), "application/json"); - txn.setOperationResult(or); - return txn; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchUpdate.java b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchUpdate.java deleted file mode 100644 index 52505a2..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchUpdate.java +++ /dev/null @@ -1,80 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.task; - -import java.util.Map; -import java.util.function.Supplier; - -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.elasticsearch.ElasticSearchDataProvider; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.slf4j.MDC; - -/** - * The Class PerformElasticSearchUpdate. - */ -public class PerformElasticSearchUpdate implements Supplier<NetworkTransaction> { - - private ElasticSearchDataProvider esDataProvider; - private NetworkTransaction operationTracker; - private String updatePayload; - private String updateUrl; - private Map<String, String> contextMap; - - /** - * Instantiates a new perform elastic search update. - * - * @param updateUrl the update url - * @param updatePayload the update payload - * @param esDataProvider the es data provider - * @param transactionTracker the transaction tracker - */ - public PerformElasticSearchUpdate(String updateUrl, String updatePayload, - ElasticSearchDataProvider esDataProvider, NetworkTransaction transactionTracker) { - this.updateUrl = updateUrl; - this.updatePayload = updatePayload; - this.esDataProvider = esDataProvider; - this.contextMap = MDC.getCopyOfContextMap(); - this.operationTracker = new NetworkTransaction(); - operationTracker.setEntityType(transactionTracker.getEntityType()); - operationTracker.setDescriptor(transactionTracker.getDescriptor()); - operationTracker.setOperationType(transactionTracker.getOperationType()); - } - - /* (non-Javadoc) - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - operationTracker.setTaskAgeInMs(); - long startTimeInMs = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - OperationResult or = esDataProvider.doBulkOperation(updateUrl, updatePayload); - - or.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - operationTracker.setOperationResult(or); - - return operationTracker; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/PersistOperationResultToDisk.java b/src/main/java/org/openecomp/sparky/synchronizer/task/PersistOperationResultToDisk.java deleted file mode 100644 index c63e6a3..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/task/PersistOperationResultToDisk.java +++ /dev/null @@ -1,85 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.task; - -import java.io.File; -import java.util.Map; -import java.util.function.Supplier; - -import org.openecomp.cl.api.Logger; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.logging.AaiUiMsgs; -import org.slf4j.MDC; - -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * The Class PersistOperationResultToDisk. - */ -public class PersistOperationResultToDisk implements Supplier<Void> { - - private String fullPath; - private OperationResult dataToStore; - private ObjectMapper mapper; - private Logger logger; - private Map<String, String> contextMap; - - /** - * Instantiates a new persist operation result to disk. - * - * @param fullPath the full path - * @param dataToStore the data to store - * @param mapper the mapper - * @param logger the logger - */ - public PersistOperationResultToDisk(String fullPath, OperationResult dataToStore, - ObjectMapper mapper, Logger logger) { - - this.fullPath = fullPath; - this.mapper = mapper; - this.dataToStore = dataToStore; - this.logger = logger; - this.contextMap = MDC.getCopyOfContextMap(); - } - - /* (non-Javadoc) - * @see java.util.function.Supplier#get() - */ - @Override - public Void get() { - MDC.setContextMap(contextMap); - File file = new File(fullPath); - if (!file.exists()) { - try { - mapper.writeValue(new File(fullPath), dataToStore); - } catch (Exception exc) { - logger.error(AaiUiMsgs.DISK_DATA_WRITE_IO_ERROR, exc.toString()); - } - } - - return null; - } - - - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/RetrieveOperationResultFromDisk.java b/src/main/java/org/openecomp/sparky/synchronizer/task/RetrieveOperationResultFromDisk.java deleted file mode 100644 index 3701879..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/task/RetrieveOperationResultFromDisk.java +++ /dev/null @@ -1,89 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.task; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.function.Supplier; - -import org.openecomp.cl.api.Logger; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.logging.AaiUiMsgs; - -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * The Class RetrieveOperationResultFromDisk. - */ -public class RetrieveOperationResultFromDisk implements Supplier<OperationResult> { - - private String fullPath; - private ObjectMapper mapper; - private Logger logger; - - /** - * Instantiates a new retrieve operation result from disk. - * - * @param fullPath the full path - * @param mapper the mapper - * @param logger the logger - */ - public RetrieveOperationResultFromDisk(String fullPath, ObjectMapper mapper, Logger logger) { - - this.fullPath = fullPath; - this.mapper = mapper; - this.logger = logger; - } - - /* (non-Javadoc) - * @see java.util.function.Supplier#get() - */ - @Override - public OperationResult get() { - - try { - File file = new File(fullPath); - if (file.exists()) { - if (logger.isDebugEnabled()) { - logger.debug(AaiUiMsgs.WILL_RETRIEVE_TXN, fullPath); - } - - Path path = Paths.get(fullPath); - byte[] byteBuffer = Files.readAllBytes(path); - - OperationResult opResult = mapper.readValue(byteBuffer, OperationResult.class); - - return opResult; - } else { - logger.debug(AaiUiMsgs.FAILED_TO_RESTORE_TXN_FILE_MISSING, fullPath); - } - } catch (IOException exc) { - logger.error(AaiUiMsgs.DISK_CACHE_READ_IO_ERROR, exc.getLocalizedMessage()); - } - return null; - } - -} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/StoreDocumentTask.java b/src/main/java/org/openecomp/sparky/synchronizer/task/StoreDocumentTask.java deleted file mode 100644 index 5b7ec00..0000000 --- a/src/main/java/org/openecomp/sparky/synchronizer/task/StoreDocumentTask.java +++ /dev/null @@ -1,79 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.openecomp.sparky.synchronizer.task; - -import java.util.Map; -import java.util.function.Supplier; - -import org.openecomp.sparky.dal.NetworkTransaction; -import org.openecomp.sparky.dal.rest.OperationResult; -import org.openecomp.sparky.dal.rest.RestDataProvider; -import org.openecomp.sparky.synchronizer.entity.IndexDocument; -import org.slf4j.MDC; - -/** - * The Class StoreDocumentTask. - */ -public class StoreDocumentTask implements Supplier<NetworkTransaction> { - - private IndexDocument doc; - - private NetworkTransaction txn; - - private RestDataProvider esDataProvider; - private Map<String, String> contextMap; - - /** - * Instantiates a new store document task. - * - * @param doc the doc - * @param txn the txn - * @param esDataProvider the es data provider - */ - public StoreDocumentTask(IndexDocument doc, NetworkTransaction txn, - RestDataProvider esDataProvider) { - this.doc = doc; - this.txn = txn; - this.esDataProvider = esDataProvider; - this.contextMap = MDC.getCopyOfContextMap(); - } - - /* (non-Javadoc) - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - txn.setTaskAgeInMs(); - - long startTimeInMs = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - OperationResult or = - esDataProvider.doPut(txn.getLink(), doc.getIndexDocumentJson(), "application/json"); - or.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - - txn.setOperationResult(or); - - return txn; - } - -} |