From 7b42006c51d4f014f72ae39170544a85d2f09fb5 Mon Sep 17 00:00:00 2001 From: Shawn Severin Date: Mon, 11 Dec 2017 15:42:52 -0500 Subject: Adding UI extensibility Adding the ability for Sparky developers to create their own custom front-end views Issue-ID: AAI-542 Change-Id: I83f9608639799e3bf85b654f44a0a7a5a85ad264 Signed-off-by: Shawn Severin --- .../v1/conf/HelloWorldBeans.xml | 8 - .../inventory-ui-service/v1/conf/jaxrsBeans.groovy | 2 +- .../v1/routes/helloServlet.route | 4 - .../v1/routes/helloWorld.route | 4 - .../v1/routes/jaxrsExample.route | 4 - .../v1/routes/serverStaticContent.route | 4 - .../routes/sparky-core-unifiedFilterRequest.route | 4 - src/main/config/aaiEntityNodeDescriptors.json | 30 + src/main/config/ajsc-override-web.xml | 34 +- src/main/config/cadi.properties | 36 + src/main/config/csp-cookie-filter.properties | 18 + src/main/config/es_sv_mappings.json | 36 + src/main/config/es_sv_settings.json | 36 + src/main/config/runner-web.xml | 45 +- src/main/docker/Dockerfile | 6 +- .../java/org/onap/aai/sparky/JaxrsEchoService.java | 6 +- .../java/org/onap/aai/sparky/JaxrsUserService.java | 61 ++ src/main/java/org/onap/aai/sparky/Test.java | 27 + .../search/AggregateSummaryProcessor.java | 238 +++++ .../search/AggregateVnfSearchProvider.java | 160 ++++ .../aggregatevnf/search/VnfSearchQueryBuilder.java | 184 ++++ .../sync/AggregationSyncControllerFactory.java | 232 +++++ .../aggregation/sync/AggregationSynchronizer.java | 778 ++++++++++++++++ .../sync/HistoricalEntitySummarizer.java | 391 ++++++++ .../sync/HistoricalEntitySyncController.java | 90 ++ .../aai/sparky/analytics/AbstractStatistics.java | 42 +- .../aai/sparky/analytics/HistoricalCounter.java | 82 +- .../sync/AutoSuggestionSyncController.java | 97 ++ .../sync/AutosuggestionSynchronizer.java | 749 ++++++++++++++++ .../sync/VnfAliasSuggestionSynchronizer.java | 192 ++++ .../sync/VnfAliasSyncController.java | 95 ++ .../common/search/CommonSearchSuggestion.java | 88 ++ .../org/onap/aai/sparky/config/Configurable.java | 43 - .../config/exception/ConfigurationException.java | 31 - .../sparky/config/oxm/CrossEntityReference.java | 1 - .../config/oxm/CrossEntityReferenceDescriptor.java | 65 ++ .../config/oxm/CrossEntityReferenceLookup.java | 154 ++++ .../aai/sparky/config/oxm/GeoEntityDescriptor.java | 59 ++ .../aai/sparky/config/oxm/GeoEntityLookup.java | 155 ++++ .../sparky/config/oxm/GeoOxmEntityDescriptor.java | 69 ++ .../aai/sparky/config/oxm/OxmEntityDescriptor.java | 142 +-- .../aai/sparky/config/oxm/OxmEntityLookup.java | 151 ++++ .../onap/aai/sparky/config/oxm/OxmModelLoader.java | 500 ++--------- .../sparky/config/oxm/OxmModelLoaderFilter.java | 90 -- .../aai/sparky/config/oxm/OxmModelProcessor.java | 31 + .../sparky/config/oxm/SearchableEntityLookup.java | 138 +++ .../config/oxm/SearchableOxmEntityDescriptor.java | 73 ++ .../config/oxm/SuggestionEntityDescriptor.java | 52 ++ .../sparky/config/oxm/SuggestionEntityLookup.java | 197 +++++ .../sync/CrossEntityReferenceSynchronizer.java | 949 ++++++++++++++++++++ .../aai/sparky/dal/ActiveInventoryAdapter.java | 460 ++++++++++ .../onap/aai/sparky/dal/ElasticSearchAdapter.java | 120 +++ .../onap/aai/sparky/dal/NetworkTransaction.java | 25 +- .../aai/sparky/dal/aai/ActiveInventoryAdapter.java | 407 --------- .../dal/aai/ActiveInventoryDataProvider.java | 7 +- .../dal/aai/ActiveInventoryEntityStatistics.java | 65 +- ...tiveInventoryProcessingExceptionStatistics.java | 7 +- .../dal/aai/config/ActiveInventoryConfig.java | 74 +- .../dal/aai/config/ActiveInventoryRestConfig.java | 134 +-- .../dal/aai/config/ActiveInventorySslConfig.java | 16 +- .../org/onap/aai/sparky/dal/cache/EntityCache.java | 60 -- .../aai/sparky/dal/cache/InMemoryEntityCache.java | 107 --- .../sparky/dal/cache/PersistentEntityCache.java | 256 ------ .../dal/elasticsearch/ElasticSearchAdapter.java | 213 ----- .../elasticsearch/ElasticSearchDataProvider.java | 6 +- .../ElasticSearchEntityStatistics.java | 46 +- .../dal/elasticsearch/HashQueryResponse.java | 3 +- .../sparky/dal/elasticsearch/SearchAdapter.java | 68 +- .../elasticsearch/config/ElasticSearchConfig.java | 366 +------- .../sparky/dal/proxy/config/DataRouterConfig.java | 132 +++ .../dal/proxy/processor/AaiUiProxyProcessor.java | 227 +++++ .../aai/sparky/dal/rest/RestClientBuilder.java | 10 +- .../aai/sparky/dal/rest/RestfulDataAccessor.java | 107 +-- .../sparky/dal/sas/config/SearchServiceConfig.java | 5 + .../ResettableStreamHttpServletRequest.java | 128 --- .../sparky/dataintegrity/config/DiUiConstants.java | 77 ++ .../editattributes/AttributeEditProcessor.java | 182 ++++ .../sparky/editattributes/AttributeUpdater.java | 366 ++++++++ .../editattributes/UserAuthorizationReader.java | 77 ++ .../aai/sparky/editattributes/UserValidator.java | 65 ++ .../sparky/editattributes/entity/EditRequest.java | 67 ++ .../exception/AttributeUpdateException.java | 60 ++ .../inventory/EntityHistoryQueryBuilder.java | 143 +++ .../inventory/GeoVisualizationProcessor.java | 202 +++++ .../sparky/inventory/entity/GeoIndexDocument.java | 292 ++++++ .../inventory/entity/TopographicalEntity.java | 220 +++++ .../org/onap/aai/sparky/logging/AaiUiMsgs.java | 53 +- .../onap/aai/sparky/logging/util/ServletUtils.java | 161 ++++ .../sparky/search/EntityCountHistoryProcessor.java | 417 +++++++++ .../org/onap/aai/sparky/search/SearchResponse.java | 99 +++ .../org/onap/aai/sparky/search/Suggestion.java | 57 -- .../org/onap/aai/sparky/search/SuggestionList.java | 70 -- .../aai/sparky/search/UnifiedSearchProcessor.java | 212 +++++ .../aai/sparky/search/VnfSearchQueryBuilder.java | 174 ---- .../onap/aai/sparky/search/VnfSearchService.java | 348 -------- .../onap/aai/sparky/search/api/SearchProvider.java | 34 + .../aai/sparky/search/config/SuggestionConfig.java | 6 +- .../search/entity/ExternalSearchRequestEntity.java | 69 ++ .../sparky/search/entity/QuerySearchEntity.java | 71 ++ .../aai/sparky/search/entity/SearchSuggestion.java | 37 + .../search/filters/FilterElasticSearchAdapter.java | 7 +- .../aai/sparky/search/filters/FilterProcessor.java | 1 - .../search/filters/config/FiltersConfig.java | 2 +- .../search/registry/SearchProviderRegistry.java | 74 ++ .../org/onap/aai/sparky/security/EcompSso.java | 21 +- .../sparky/security/filter/CspCookieFilter.java | 274 ++++++ .../aai/sparky/security/filter/LoginFilter.java | 19 +- .../security/portal/PortalRestAPIServiceImpl.java | 33 +- .../portal/config/PortalAuthenticationConfig.java | 1 + .../sparky/suggestivesearch/SuggestionEntity.java | 61 -- .../sparky/sync/AbstractEntitySynchronizer.java | 564 ++++++++++++ .../aai/sparky/sync/ElasticSearchIndexCleaner.java | 607 +++++++++++++ .../sparky/sync/ElasticSearchSchemaFactory.java | 109 +++ .../org/onap/aai/sparky/sync/IndexCleaner.java | 55 ++ .../aai/sparky/sync/IndexIntegrityValidator.java | 176 ++++ .../onap/aai/sparky/sync/IndexSynchronizer.java | 65 ++ .../org/onap/aai/sparky/sync/IndexValidator.java | 56 ++ .../org/onap/aai/sparky/sync/SyncController.java | 96 ++ .../onap/aai/sparky/sync/SyncControllerImpl.java | 692 +++++++++++++++ .../aai/sparky/sync/SyncControllerRegistrar.java | 27 + .../aai/sparky/sync/SyncControllerRegistry.java | 48 + .../aai/sparky/sync/SyncControllerService.java | 220 +++++ .../aai/sparky/sync/SynchronizerConstants.java | 63 ++ .../onap/aai/sparky/sync/TaskProcessingStats.java | 133 +++ .../aai/sparky/sync/TransactionRateMonitor.java | 73 ++ .../sync/config/ElasticSearchEndpointConfig.java | 70 ++ .../sync/config/ElasticSearchSchemaConfig.java | 75 ++ .../sync/config/NetworkStatisticsConfig.java | 237 +++++ .../sparky/sync/config/SyncControllerConfig.java | 303 +++++++ .../aai/sparky/sync/entity/AggregationEntity.java | 102 +++ .../sync/entity/AggregationSuggestionEntity.java | 104 +++ .../onap/aai/sparky/sync/entity/IndexDocument.java | 39 + .../sync/entity/IndexableCrossEntityReference.java | 99 +++ .../aai/sparky/sync/entity/IndexableEntity.java | 98 +++ .../aai/sparky/sync/entity/MergableEntity.java | 57 ++ .../aai/sparky/sync/entity/ObjectIdCollection.java | 76 ++ .../aai/sparky/sync/entity/SearchableEntity.java | 144 +++ .../aai/sparky/sync/entity/SelfLinkDescriptor.java | 90 ++ .../sparky/sync/entity/SuggestionSearchEntity.java | 326 +++++++ .../sparky/sync/entity/TransactionStorageType.java | 54 ++ .../sparky/sync/enumeration/OperationState.java | 30 + .../sparky/sync/enumeration/SynchronizerState.java | 30 + .../sync/task/PerformActiveInventoryRetrieval.java | 100 +++ .../sparky/sync/task/PerformElasticSearchPut.java | 88 ++ .../sync/task/PerformElasticSearchRetrieval.java | 72 ++ .../sync/task/PerformElasticSearchUpdate.java | 80 ++ .../aai/sparky/sync/task/StoreDocumentTask.java | 90 ++ .../aai/sparky/sync/task/SyncControllerTask.java | 53 ++ .../synchronizer/AbstractEntitySynchronizer.java | 568 ------------ .../AggregationSuggestionSynchronizer.java | 183 ---- .../synchronizer/AggregationSynchronizer.java | 771 ---------------- .../synchronizer/AutosuggestionSynchronizer.java | 737 ---------------- .../CrossEntityReferenceSynchronizer.java | 907 ------------------- .../synchronizer/ElasticSearchIndexCleaner.java | 795 ----------------- .../onap/aai/sparky/synchronizer/IndexCleaner.java | 55 -- .../synchronizer/IndexIntegrityValidator.java | 227 ----- .../aai/sparky/synchronizer/IndexSynchronizer.java | 65 -- .../aai/sparky/synchronizer/IndexValidator.java | 56 -- .../aai/sparky/synchronizer/MyErrorHandler.java | 111 --- .../synchronizer/SearchableEntitySynchronizer.java | 767 ---------------- .../aai/sparky/synchronizer/SyncController.java | 476 ---------- .../onap/aai/sparky/synchronizer/SyncHelper.java | 568 ------------ .../sparky/synchronizer/TaskProcessingStats.java | 189 ---- .../synchronizer/TransactionRateController.java | 110 --- .../config/NetworkStatisticsConfig.java | 237 ----- .../config/SynchronizerConfiguration.java | 544 ------------ .../synchronizer/config/SynchronizerConstants.java | 60 -- .../synchronizer/config/TaskProcessorConfig.java | 325 ------- .../synchronizer/entity/AggregationEntity.java | 146 --- .../entity/AggregationSuggestionEntity.java | 119 --- .../sparky/synchronizer/entity/IndexDocument.java | 42 - .../entity/IndexableCrossEntityReference.java | 163 ---- .../synchronizer/entity/IndexableEntity.java | 139 --- .../sparky/synchronizer/entity/MergableEntity.java | 68 -- .../synchronizer/entity/ObjectIdCollection.java | 90 -- .../synchronizer/entity/SearchableEntity.java | 196 ----- .../synchronizer/entity/SelfLinkDescriptor.java | 90 -- .../entity/SuggestionSearchEntity.java | 349 -------- .../entity/TransactionStorageType.java | 69 -- .../synchronizer/enumeration/OperationState.java | 30 - .../enumeration/SynchronizerState.java | 30 - .../filter/ElasticSearchSynchronizerFilter.java | 136 --- .../task/CollectEntitySelfLinkTask.java | 104 --- .../task/CollectEntityTypeSelfLinksTask.java | 105 --- .../task/GetCrossEntityReferenceEntityTask.java | 105 --- .../task/PerformActiveInventoryRetrieval.java | 151 ---- .../synchronizer/task/PerformElasticSearchPut.java | 140 --- .../task/PerformElasticSearchRetrieval.java | 110 --- .../task/PerformElasticSearchUpdate.java | 152 ---- .../task/PersistOperationResultToDisk.java | 157 ---- .../task/RetrieveOperationResultFromDisk.java | 133 --- .../synchronizer/task/StoreDocumentTask.java | 137 --- .../sparky/topology/sync/GeoSyncController.java | 95 ++ .../aai/sparky/topology/sync/GeoSynchronizer.java | 497 +++++++++++ .../org/onap/aai/sparky/util/ConfigHelper.java | 2 +- .../java/org/onap/aai/sparky/util/Encryptor.java | 78 +- .../java/org/onap/aai/sparky/util/ErrorUtil.java | 1 - .../org/onap/aai/sparky/util/KeystoreBuilder.java | 24 +- .../java/org/onap/aai/sparky/util/NodeUtils.java | 144 ++- .../org/onap/aai/sparky/util/RestletUtils.java | 118 +++ .../org/onap/aai/sparky/util/ServletUtils.java | 161 ---- .../aai/sparky/util/SuggestionsPermutation.java | 85 +- .../java/org/onap/aai/sparky/util/TreeWalker.java | 10 +- .../org/onap/aai/sparky/util/test/Encryptor.java | 83 -- .../onap/aai/sparky/util/test/KeystoreBuilder.java | 541 ------------ .../viewandinspect/EntityTypeAggregation.java | 13 +- .../SchemaVisualizationProcessor.java | 174 ++++ .../config/TierSupportUiConstants.java | 352 +------- .../viewandinspect/config/VisualizationConfig.java | 219 ----- .../config/VisualizationConfigs.java | 169 ++++ .../viewandinspect/entity/ActiveInventoryNode.java | 64 +- .../entity/D3VisualizationOutput.java | 66 -- .../sparky/viewandinspect/entity/GraphRequest.java | 56 ++ .../aai/sparky/viewandinspect/entity/JsonNode.java | 70 +- .../aai/sparky/viewandinspect/entity/NodeMeta.java | 22 +- .../entity/NodeProcessingTransaction.java | 2 +- .../viewandinspect/entity/QuerySearchEntity.java | 72 -- .../sparky/viewandinspect/entity/Relationship.java | 4 +- .../viewandinspect/entity/RelationshipList.java | 18 +- .../viewandinspect/entity/SearchResponse.java | 90 -- .../entity/SearchableEntityList.java | 115 +++ .../entity/SelfLinkDeterminationTransaction.java | 3 +- .../sparky/viewandinspect/entity/Violations.java | 125 --- .../enumeration/NodeProcessingAction.java | 1 + .../search/ViewInspectSearchProvider.java | 440 +++++++++ .../services/SearchServiceWrapper.java | 980 --------------------- .../services/VisualizationContext.java | 97 +- .../services/VisualizationService.java | 110 +-- .../services/VisualizationTransformer.java | 122 +-- .../viewandinspect/servlet/SearchServlet.java | 224 ----- .../servlet/VisualizationServlet.java | 200 ----- .../task/CollectNodeSelfLinkTask.java | 29 - .../task/PerformNodeSelfLinkProcessingTask.java | 31 +- .../task/PerformSelfLinkDeterminationTask.java | 16 +- .../sync/ViewInspectEntitySynchronizer.java | 783 ++++++++++++++++ .../sync/ViewInspectSyncController.java | 129 +++ src/main/resources/extApps/aai.war | Bin 0 -> 1372092 bytes src/main/resources/extApps/aai.xml | 1 - src/main/resources/logging/AAIUIMsgs.properties | 125 ++- src/main/scripts/encNameValue.sh | 20 + src/main/scripts/start.sh | 61 +- 241 files changed, 18808 insertions(+), 17931 deletions(-) delete mode 100644 src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/conf/HelloWorldBeans.xml delete mode 100644 src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/helloServlet.route delete mode 100644 src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/helloWorld.route delete mode 100644 src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/jaxrsExample.route delete mode 100644 src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/serverStaticContent.route delete mode 100644 src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/sparky-core-unifiedFilterRequest.route create mode 100644 src/main/config/cadi.properties create mode 100644 src/main/config/csp-cookie-filter.properties create mode 100644 src/main/config/es_sv_mappings.json create mode 100644 src/main/config/es_sv_settings.json create mode 100644 src/main/java/org/onap/aai/sparky/JaxrsUserService.java create mode 100644 src/main/java/org/onap/aai/sparky/Test.java create mode 100644 src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java create mode 100644 src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java create mode 100644 src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java create mode 100644 src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java create mode 100644 src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java create mode 100644 src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java create mode 100644 src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java create mode 100644 src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java create mode 100644 src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java create mode 100644 src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java create mode 100644 src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java create mode 100644 src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java delete mode 100644 src/main/java/org/onap/aai/sparky/config/Configurable.java delete mode 100644 src/main/java/org/onap/aai/sparky/config/exception/ConfigurationException.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java delete mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoaderFilter.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java create mode 100644 src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java create mode 100644 src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java create mode 100644 src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java create mode 100644 src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java delete mode 100644 src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryAdapter.java delete mode 100644 src/main/java/org/onap/aai/sparky/dal/cache/EntityCache.java delete mode 100644 src/main/java/org/onap/aai/sparky/dal/cache/InMemoryEntityCache.java delete mode 100644 src/main/java/org/onap/aai/sparky/dal/cache/PersistentEntityCache.java delete mode 100644 src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchAdapter.java create mode 100644 src/main/java/org/onap/aai/sparky/dal/proxy/config/DataRouterConfig.java create mode 100644 src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java delete mode 100644 src/main/java/org/onap/aai/sparky/dal/servlet/ResettableStreamHttpServletRequest.java create mode 100644 src/main/java/org/onap/aai/sparky/dataintegrity/config/DiUiConstants.java create mode 100644 src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java create mode 100644 src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java create mode 100644 src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java create mode 100644 src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java create mode 100644 src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java create mode 100644 src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java create mode 100644 src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java create mode 100644 src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java create mode 100644 src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java create mode 100644 src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java create mode 100644 src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java create mode 100644 src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java create mode 100644 src/main/java/org/onap/aai/sparky/search/SearchResponse.java delete mode 100644 src/main/java/org/onap/aai/sparky/search/Suggestion.java delete mode 100644 src/main/java/org/onap/aai/sparky/search/SuggestionList.java create mode 100644 src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java delete mode 100644 src/main/java/org/onap/aai/sparky/search/VnfSearchQueryBuilder.java delete mode 100644 src/main/java/org/onap/aai/sparky/search/VnfSearchService.java create mode 100644 src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java create mode 100644 src/main/java/org/onap/aai/sparky/search/entity/ExternalSearchRequestEntity.java create mode 100644 src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java create mode 100644 src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java create mode 100644 src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java create mode 100644 src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java delete mode 100644 src/main/java/org/onap/aai/sparky/suggestivesearch/SuggestionEntity.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/IndexValidator.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/SyncController.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java create mode 100644 src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/AbstractEntitySynchronizer.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/AggregationSuggestionSynchronizer.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/AggregationSynchronizer.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/AutosuggestionSynchronizer.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/CrossEntityReferenceSynchronizer.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/ElasticSearchIndexCleaner.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/IndexCleaner.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/IndexIntegrityValidator.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/IndexSynchronizer.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/IndexValidator.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/MyErrorHandler.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/SearchableEntitySynchronizer.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/SyncController.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/SyncHelper.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/TaskProcessingStats.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/TransactionRateController.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/config/NetworkStatisticsConfig.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/config/SynchronizerConfiguration.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/config/SynchronizerConstants.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/config/TaskProcessorConfig.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/AggregationEntity.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/AggregationSuggestionEntity.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexDocument.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexableCrossEntityReference.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexableEntity.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/MergableEntity.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/ObjectIdCollection.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/SearchableEntity.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/SelfLinkDescriptor.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/SuggestionSearchEntity.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/entity/TransactionStorageType.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/enumeration/OperationState.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/enumeration/SynchronizerState.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/filter/ElasticSearchSynchronizerFilter.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/task/CollectEntitySelfLinkTask.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/task/CollectEntityTypeSelfLinksTask.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/task/GetCrossEntityReferenceEntityTask.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/task/PerformActiveInventoryRetrieval.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchPut.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchRetrieval.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchUpdate.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/task/PersistOperationResultToDisk.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/task/RetrieveOperationResultFromDisk.java delete mode 100644 src/main/java/org/onap/aai/sparky/synchronizer/task/StoreDocumentTask.java create mode 100644 src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java create mode 100644 src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java create mode 100644 src/main/java/org/onap/aai/sparky/util/RestletUtils.java delete mode 100644 src/main/java/org/onap/aai/sparky/util/ServletUtils.java delete mode 100644 src/main/java/org/onap/aai/sparky/util/test/Encryptor.java delete mode 100644 src/main/java/org/onap/aai/sparky/util/test/KeystoreBuilder.java create mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java delete mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfig.java create mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java create mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java delete mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/entity/QuerySearchEntity.java delete mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchResponse.java create mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java delete mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/entity/Violations.java create mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java delete mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/services/SearchServiceWrapper.java delete mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/servlet/SearchServlet.java delete mode 100644 src/main/java/org/onap/aai/sparky/viewandinspect/servlet/VisualizationServlet.java create mode 100644 src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java create mode 100644 src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java create mode 100644 src/main/resources/extApps/aai.war create mode 100644 src/main/scripts/encNameValue.sh (limited to 'src/main') diff --git a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/conf/HelloWorldBeans.xml b/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/conf/HelloWorldBeans.xml deleted file mode 100644 index c052560..0000000 --- a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/conf/HelloWorldBeans.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - diff --git a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/conf/jaxrsBeans.groovy b/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/conf/jaxrsBeans.groovy index b65cb80..da9b558 100644 --- a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/conf/jaxrsBeans.groovy +++ b/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/conf/jaxrsBeans.groovy @@ -3,7 +3,7 @@ beans{ xmlns jaxrs: "http://cxf.apache.org/jaxrs" xmlns util: "http://www.springframework.org/schema/util" - echoService(org.onap.aai.sparky.JaxrsEchoService) + echoService(org.openecomp.sparky.JaxrsEchoService) util.list(id: 'jaxrsServices') { ref(bean:'echoService') diff --git a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/helloServlet.route b/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/helloServlet.route deleted file mode 100644 index 5ede9c1..0000000 --- a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/helloServlet.route +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/helloWorld.route b/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/helloWorld.route deleted file mode 100644 index bc3e178..0000000 --- a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/helloWorld.route +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/jaxrsExample.route b/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/jaxrsExample.route deleted file mode 100644 index 25c1977..0000000 --- a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/jaxrsExample.route +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/serverStaticContent.route b/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/serverStaticContent.route deleted file mode 100644 index bf221c6..0000000 --- a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/serverStaticContent.route +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/sparky-core-unifiedFilterRequest.route b/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/sparky-core-unifiedFilterRequest.route deleted file mode 100644 index 36cf518..0000000 --- a/src/main/ajsc/inventory-ui-service_v1/inventory-ui-service/v1/routes/sparky-core-unifiedFilterRequest.route +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/src/main/config/aaiEntityNodeDescriptors.json b/src/main/config/aaiEntityNodeDescriptors.json index bf95f28..e72bab0 100644 --- a/src/main/config/aaiEntityNodeDescriptors.json +++ b/src/main/config/aaiEntityNodeDescriptors.json @@ -129,6 +129,21 @@ "className": "node-button", "r": "10" } + }, + { + "type": "button", + "name": "icon_triangle_warning", + "class": "node-button", + "shapeAttributes": { + "offset": { + "x": "46", + "y": "-12" + } + }, + "svgAttributes": { + "className": "node-button", + "r": "10" + } }] }, "selectedNodeClass": { @@ -183,6 +198,21 @@ "className": "node-button", "r": "10" } + }, + { + "type": "button", + "name": "icon_triangle_warning", + "class": "node-button", + "shapeAttributes": { + "offset": { + "x": "46", + "y": "-12" + } + }, + "svgAttributes": { + "className": "node-button", + "r": "10" + } }] } } \ No newline at end of file diff --git a/src/main/config/ajsc-override-web.xml b/src/main/config/ajsc-override-web.xml index c66ac89..e267829 100644 --- a/src/main/config/ajsc-override-web.xml +++ b/src/main/config/ajsc-override-web.xml @@ -3,34 +3,22 @@ - - - - - ElasticSearchSynchronizerFilter - /nothingShouldBeSentHere/* - - - - OxmModelLoaderFilter - /nothingShouldBeSentHereEither/* + + + LoginFilter + /* PortalRestAPIProxy /api/v2/* - - - - VisualizationServlet - /visualization/* - - + + springSecurityFilterChain /* - - + + ManagementServlet /mgmt @@ -46,12 +34,6 @@ /services/* - - SearchServlet - /elasticSearchQuery/* - /search/* - - jsp *.jsp diff --git a/src/main/config/cadi.properties b/src/main/config/cadi.properties new file mode 100644 index 0000000..83a5ce0 --- /dev/null +++ b/src/main/config/cadi.properties @@ -0,0 +1,36 @@ +#This properties file is used for defining AAF properties related to the CADI framework. This file is used for running AAF framework + +#In order to test functionality of cadi-ajsc-plugin locally cross domain cookie. Cadi "should" find your hostname for you. +#However, we have seen some situations where this fails. A Local testing +#modification can include modifying your hosts file so that you can use "mywebserver.att.com" for your localhost in order +#to test/verify GLO functionality locally. If you are on a Windows machine, you will already have a machine name associated with +#it that will utilize an AT&T domain such as "sbc.com". You may need to add your domain to this as a comma separated list depending +#upon your particular machine domain. This property is commented out as cadi SHOULD find your machine name. With version 1.2.1 of cadi, +#it appears to resolve Mac machine names as well, now. But, this can be somewhat inconsistent depending on your specific working envrironment. +hostname=mywebserver.att.com + +#Setting csp_domain to PROD will allow for testing using your attuid and password through GLO. +csp_domain=PROD +csp_devl_localhost=true + +basic_realm=csp.att.com +#basic_realm=aaf.att.com +basic_warn=TRUE + +cadi_loglevel=WARN +cadi_keyfile=target/swm/package/nix/dist_files/appl/inventory-ui-service/etc/keyfile + +# Configure AAF +#These are dummy values add appropriate values required +aaf_url=url + +#AJSC - MECHID +#These are dummy values add appropriate values required +aaf_id=dummyid@ajsc.att.com +aaf_password=enc:277edqJCjT0RlUI3BtbDQa-3Ha-CQGd +aaf_timeout=5000 +aaf_clean_interval=30000 +aaf_user_expires=5000 +aaf_high_count=1000 + + diff --git a/src/main/config/csp-cookie-filter.properties b/src/main/config/csp-cookie-filter.properties new file mode 100644 index 0000000..e12109a --- /dev/null +++ b/src/main/config/csp-cookie-filter.properties @@ -0,0 +1,18 @@ +# AT&T Global login page. This is the redirect URL +# Production login page: +# https://www.e-access.att.com/empsvcs/hrpinmgt/pagLogin/ +# +# Test login page: +# https://webtest.csp.att.com/empsvcs/hrpinmgt/pagLogin/ +global.login.url=https://www.e-access.att.com/empsvcs/hrpinmgt/pagLogin/ + +# valid domains for open redirect +redirect-domain=att.com,sbc.com,bls.com,cingular.net + +# MOTS ID of the application +application.id=24153 + +# Required by esGateKeeper. Valid values are: +# DEVL - used during development +# PROD - used in production +gatekeeper.environment=PROD \ No newline at end of file diff --git a/src/main/config/es_sv_mappings.json b/src/main/config/es_sv_mappings.json new file mode 100644 index 0000000..c964ca3 --- /dev/null +++ b/src/main/config/es_sv_mappings.json @@ -0,0 +1,36 @@ +{ + "properties": { + "entityType": { + "type": "string", + "analyzer": "ngram_analyzer", + "search_analyzer": "ngram_analyzer" + }, + "entityPrimaryKeyValue": { + "type": "string", + "index": "not_analyzed" + }, + "searchTagIDs": { + "type": "string" + }, + "searchTags": { + "type": "string", + "analyzer": "ngram_analyzer" + }, + "perspectives" : { + "type": "string", + "index": "not_analyzed" + }, + "crossEntityReferenceValues": { + "type": "string", + "analyzer": "ngram_analyzer" + }, + "link": { + "type": "string", + "index": "not_analyzed" + }, + "lastmodTimestamp": { + "type": "date", + "format": "MMM d y HH:m:s||dd-MM-yyyy HH:mm:ss||yyyy-MM-dd'T'HH:mm:ss.SSSZZ||yyyy-MM-dd HH:mm:ss||MM/dd/yyyy||yyyyMMdd'T'HHmmssZ" + } + } +} \ No newline at end of file diff --git a/src/main/config/es_sv_settings.json b/src/main/config/es_sv_settings.json new file mode 100644 index 0000000..21a357c --- /dev/null +++ b/src/main/config/es_sv_settings.json @@ -0,0 +1,36 @@ +{ + "analysis": { + "filter": { + "ngram_filter": { + "type": "nGram", + "min_gram": 1, + "max_gram": 50, + "token_chars": [ + "letter", + "digit", + "punctuation", + "symbol" + ] + } + }, + "analyzer": { + "ngram_analyzer": { + "type": "custom", + "tokenizer": "whitespace", + "filter": [ + "lowercase", + "asciifolding", + "ngram_filter" + ] + }, + "whitespace_analyzer": { + "type": "custom", + "tokenizer": "whitespace", + "filter": [ + "lowercase", + "asciifolding" + ] + } + } + } +} \ No newline at end of file diff --git a/src/main/config/runner-web.xml b/src/main/config/runner-web.xml index 1c6ccdc..2e39e24 100644 --- a/src/main/config/runner-web.xml +++ b/src/main/config/runner-web.xml @@ -19,27 +19,22 @@ org.springframework.web.context.ContextLoaderListener - + + + PortalRestAPIProxy + org.openecomp.portalsdk.core.onboarding.crossapi.PortalRestAPIProxy + + + + LoginFilter + org.onap.aai.sparky.security.filter.LoginFilter + + ManagementServlet ajsc.ManagementServlet - - - VisualizationServlet - org.onap.aai.sparky.viewandinspect.servlet.VisualizationServlet - - - - ElasticSearchSynchronizerFilter - org.onap.aai.sparky.synchronizer.filter.ElasticSearchSynchronizerFilter - - - - OxmModelLoaderFilter - org.onap.aai.sparky.config.oxm.OxmModelLoaderFilter - - + WriteableRequestFilter com.att.ajsc.csi.writeablerequestfilter.WriteableRequestFilter @@ -59,11 +54,6 @@ ajsc.servlet.AjscCamelServlet - - SearchServlet - org.onap.aai.sparky.viewandinspect.servlet.SearchServlet - - springSecurityFilterChain org.springframework.web.filter.DelegatingFilterProxy @@ -74,16 +64,9 @@ org.springframework.web.servlet.DispatcherServlet 1 + - - PortalRestAPIProxy - org.openecomp.portalsdk.core.onboarding.crossapi.PortalRestAPIProxy - - - - - + diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index 1ab49ff..b77cf1c 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -15,10 +15,8 @@ RUN export JAVA_HOME # Build up the deployment folder structure RUN mkdir -p $MICRO_HOME -copy swm/package/nix/dist_files/appl/sparky-be/1.1.0-SNAPSHOT/ $MICRO_HOME/ -RUN ls -la $MICRO_HOME/ -RUN mkdir -p $BIN_HOME -COPY *.sh $BIN_HOME/ +copy swm/package/nix/dist_files/appl/inventory-ui-service/1.1.0-SNAPSHOT/ $MICRO_HOME/ +RUN ls -la $BIN_HOME/ RUN chmod 755 $BIN_HOME/* RUN ln -s /logs $MICRO_HOME/logs diff --git a/src/main/java/org/onap/aai/sparky/JaxrsEchoService.java b/src/main/java/org/onap/aai/sparky/JaxrsEchoService.java index 8e7e0a2..f7ea619 100644 --- a/src/main/java/org/onap/aai/sparky/JaxrsEchoService.java +++ b/src/main/java/org/onap/aai/sparky/JaxrsEchoService.java @@ -22,14 +22,14 @@ */ package org.onap.aai.sparky; -import com.att.ajsc.beans.PropertiesMapBean; -import com.att.ajsc.filemonitor.AJSCPropertiesMap; - import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; +import com.att.ajsc.beans.PropertiesMapBean; +import com.att.ajsc.filemonitor.AJSCPropertiesMap; + /** * The Class JaxrsEchoService. diff --git a/src/main/java/org/onap/aai/sparky/JaxrsUserService.java b/src/main/java/org/onap/aai/sparky/JaxrsUserService.java new file mode 100644 index 0000000..dc7f5a4 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/JaxrsUserService.java @@ -0,0 +1,61 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky; + +import java.util.HashMap; +import java.util.Map; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; + +/** + * The Class JaxrsUserService. + */ +@Path("/user") +public class JaxrsUserService { + + private static final Map userIdToNameMap; + + static { + userIdToNameMap = new HashMap(); + userIdToNameMap.put("dw113c", "Doug Wait"); + userIdToNameMap.put("so401q", "Stuart O'Day"); + } + + /** + * Lookup user. + * + * @param userId the user id + * @return the string + */ + @GET + @Path("/{userId}") + @Produces("text/plain") + public String lookupUser(@PathParam("userId") String userId) { + String name = userIdToNameMap.get(userId); + return name != null ? name : "unknown id"; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/Test.java b/src/main/java/org/onap/aai/sparky/Test.java new file mode 100644 index 0000000..6efca77 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/Test.java @@ -0,0 +1,27 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky; + +public class Test { + +} diff --git a/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java b/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java new file mode 100644 index 0000000..6d2ec6e --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java @@ -0,0 +1,238 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregatevnf.search; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import javax.json.JsonObject; + +import org.apache.camel.Exchange; +import org.apache.camel.component.restlet.RestletConstants; +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; +import org.onap.aai.sparky.dataintegrity.config.DiUiConstants; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.filters.FilterQueryBuilder; +import org.onap.aai.sparky.search.filters.entity.SearchFilter; +import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.MediaType; +import org.restlet.data.Status; + +public class AggregateSummaryProcessor { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AggregateSummaryProcessor.class); + + private static final String KEY_FILTERS = "filters"; + + private SearchAdapter search = null; + + private String vnfAggregationIndexName; + private String elasticSearchIp; + private String elatsticSearchPort; + + public AggregateSummaryProcessor() { + try { + if (search == null) { + search = new SearchAdapter(); + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Failed to get elastic search configuration with error = " + exc.getMessage()); + } + } + + public void setVnfAggregationIndexName(String vnfAggregationIndexName) { + this.vnfAggregationIndexName = vnfAggregationIndexName; + } + + public void setElasticSearchIp(String elasticSearchIp) { + this.elasticSearchIp = elasticSearchIp; + } + + public void setElatsticSearchPort(String elatsticSearchPort) { + this.elatsticSearchPort = elatsticSearchPort; + } + + public void getFilteredAggregation(Exchange exchange) { + + Response response = + exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class); + + Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class); + + /* + * Disables automatic Apache Camel Restlet component logging which prints out an undesirable log + * entry which includes client (e.g. browser) information + */ + request.setLoggable(false); + + try { + String payload = exchange.getIn().getBody(String.class); + + if (payload == null || payload.isEmpty()) { + + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, "Request Payload is empty"); + + /* + * Don't throw back an error, just return an empty set + */ + + } else { + + JSONObject parameters = new JSONObject(payload); + + JSONArray requestFilters = null; + if (parameters.has(KEY_FILTERS)) { + requestFilters = parameters.getJSONArray(KEY_FILTERS); + } else { + + JSONObject zeroResponsePayload = new JSONObject(); + zeroResponsePayload.put("count", 0); + response.setStatus(Status.SUCCESS_OK); + response.setEntity(zeroResponsePayload.toString(), MediaType.APPLICATION_JSON); + exchange.getOut().setBody(response); + + LOG.error(AaiUiMsgs.ERROR_FILTERS_NOT_FOUND); + return; + } + + if (requestFilters != null && requestFilters.length() > 0) { + List filtersToQuery = new ArrayList(); + for (int i = 0; i < requestFilters.length(); i++) { + JSONObject filterEntry = requestFilters.getJSONObject(i); + filtersToQuery.add(filterEntry); + } + + String jsonResponsePayload = getVnfFilterAggregations(filtersToQuery); + response.setStatus(Status.SUCCESS_OK); + response.setEntity(jsonResponsePayload, MediaType.APPLICATION_JSON); + exchange.getOut().setBody(response); + + } else { + String emptyResponse = getEmptyAggResponse(); + response.setStatus(Status.SUCCESS_OK); + response.setEntity(emptyResponse, MediaType.APPLICATION_JSON); + exchange.getOut().setBody(response); + LOG.error(AaiUiMsgs.ERROR_FILTERS_NOT_FOUND); + } + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "FilterProcessor failed to get filter list due to error = " + exc.getMessage()); + } + } + + private String getEmptyAggResponse() { + JSONObject aggPayload = new JSONObject(); + aggPayload.put("totalChartHits", 0); + aggPayload.put("buckets", new JSONArray()); + JSONObject payload = new JSONObject(); + payload.append("groupby_aggregation", aggPayload); + + return payload.toString(); + } + + private static final String FILTER_ID_KEY = "filterId"; + private static final String FILTER_VALUE_KEY = "filterValue"; + private static final int DEFAULT_SHOULD_MATCH_SCORE = 1; + private static final String VNF_FILTER_AGGREGATION = "vnfFilterAggregation"; + + + private String getVnfFilterAggregations(List filtersToQuery) throws IOException { + + List searchFilters = new ArrayList(); + for (JSONObject filterEntry : filtersToQuery) { + + String filterId = filterEntry.getString(FILTER_ID_KEY); + if (filterId != null) { + SearchFilter filter = new SearchFilter(); + filter.setFilterId(filterId); + + if (filterEntry.has(FILTER_VALUE_KEY)) { + String filterValue = filterEntry.getString(FILTER_VALUE_KEY); + filter.addValue(filterValue); + } + + searchFilters.add(filter); + } + } + + // Create query for summary by entity type + JsonObject vnfSearch = + FilterQueryBuilder.createCombinedBoolAndAggQuery(searchFilters, DEFAULT_SHOULD_MATCH_SCORE); + + // Parse response for summary by entity type query + OperationResult opResult = + search.doPost(getFullUrl(vnfAggregationIndexName, TierSupportUiConstants.ES_SEARCH_API), + vnfSearch.toString(), DiUiConstants.APP_JSON); + + return buildAggregateVnfResponseJson(opResult.getResult()); + + } + + /** + * Get Full URL for search using elastic search configuration. + * + * @param api the api + * @return the full url + */ + private String getFullUrl(String indexName, String api) { + final String host = elasticSearchIp; + final String port = elatsticSearchPort; + return String.format("http://%s:%s/%s/%s", host, port, indexName, api); + } + + private String buildAggregateVnfResponseJson(String responseJsonStr) { + + JSONObject finalOutputToFe = new JSONObject(); + JSONObject responseJson = new JSONObject(responseJsonStr); + + + JSONObject hits = responseJson.getJSONObject("hits"); + int totalHits = hits.getInt("total"); + finalOutputToFe.put("total", totalHits); + + JSONObject aggregations = responseJson.getJSONObject("aggregations"); + String[] aggKeys = JSONObject.getNames(aggregations); + JSONObject aggregationsList = new JSONObject(); + + for (String aggName : aggKeys) { + JSONObject aggregation = aggregations.getJSONObject(aggName); + JSONArray buckets = aggregation.getJSONArray("buckets"); + aggregationsList.put(aggName, buckets); + } + + finalOutputToFe.put("aggregations", aggregationsList); + + return finalOutputToFe.toString(); + } +} diff --git a/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java b/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java new file mode 100644 index 0000000..ec3dfaa --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java @@ -0,0 +1,160 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregatevnf.search; + +import java.util.ArrayList; +import java.util.List; + +import javax.json.JsonObject; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.common.search.CommonSearchSuggestion; +import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; +import org.onap.aai.sparky.dataintegrity.config.DiUiConstants; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.api.SearchProvider; +import org.onap.aai.sparky.search.entity.QuerySearchEntity; +import org.onap.aai.sparky.search.entity.SearchSuggestion; +import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; + +import com.fasterxml.jackson.databind.ObjectMapper; + +public class AggregateVnfSearchProvider implements SearchProvider { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AggregateVnfSearchProvider.class); + + private ObjectMapper mapper; + private static SearchAdapter search = null; + + private String autoSuggestIndexName; + private String elasticSearchIp; + private String elatsticSearchPort; + + public AggregateVnfSearchProvider() { + + mapper = new ObjectMapper(); + + try { + if (search == null) { + search = new SearchAdapter(); + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.CONFIGURATION_ERROR, + "Search Configuration Error. Error = " + exc.getMessage()); + } + } + + public void setAutoSuggestIndexName(String autoSuggestIndexName) { + this.autoSuggestIndexName = autoSuggestIndexName; + } + + public void setElasticSearchIp(String elasticSearchIp) { + this.elasticSearchIp = elasticSearchIp; + } + + public void setElatsticSearchPort(String elatsticSearchPort) { + this.elatsticSearchPort = elatsticSearchPort; + } + + /** + * Get Full URL for search using elastic search configuration. + * + * @param api the api + * @return the full url + */ + private String getFullUrl(String indexName, String api) { + final String host = elasticSearchIp; + final String port = elatsticSearchPort; + return String.format("http://%s:%s/%s/%s", host, port, indexName, api); + } + + @Override + public List search(QuerySearchEntity queryRequest) { + + List returnList = new ArrayList(); + + try { + + /* Create suggestions query */ + JsonObject vnfSearch = VnfSearchQueryBuilder.createSuggestionsQuery( + String.valueOf(queryRequest.getMaxResults()), queryRequest.getQueryStr()); + + /* Parse suggestions response */ + OperationResult opResult = + search.doPost(getFullUrl(autoSuggestIndexName, TierSupportUiConstants.ES_SUGGEST_API), + vnfSearch.toString(), DiUiConstants.APP_JSON); + + String result = opResult.getResult(); + + if (!opResult.wasSuccessful()) { + LOG.error(AaiUiMsgs.ERROR_PARSING_JSON_PAYLOAD_VERBOSE, result); + return returnList; + } + + JSONObject responseJson = new JSONObject(result); + String suggestionsKey = "vnfs"; + JSONArray suggestionsArray = new JSONArray(); + JSONArray suggestions = responseJson.getJSONArray(suggestionsKey); + if (suggestions.length() > 0) { + suggestionsArray = suggestions.getJSONObject(0).getJSONArray("options"); + for (int i = 0; i < suggestionsArray.length(); i++) { + JSONObject querySuggestion = suggestionsArray.getJSONObject(i); + if (querySuggestion != null) { + CommonSearchSuggestion responseSuggestion = new CommonSearchSuggestion(); + responseSuggestion.setText(querySuggestion.getString("text")); + responseSuggestion.setRoute("vnfSearch"); // TODO -> Read route from + // suggestive-search.properties instead of + // hard coding + responseSuggestion + .setHashId(NodeUtils.generateUniqueShaDigest(querySuggestion.getString("text"))); + + // Extract filter list from JSON and add to response suggestion + JSONObject payload = querySuggestion.getJSONObject("payload"); + if (payload.length() > 0) { + JSONArray filterList = payload.getJSONArray("filterList"); + for (int filter = 0; filter < filterList.length(); filter++) { + String filterValueString = filterList.getJSONObject(filter).toString(); + UiFilterValueEntity filterValue = + mapper.readValue(filterValueString, UiFilterValueEntity.class); + responseSuggestion.getFilterValues().add(filterValue); + } + } + returnList.add(responseSuggestion); + } + } + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "Search failed due to error = " + exc.getMessage()); + } + + return returnList; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java b/src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java new file mode 100644 index 0000000..96fea3f --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java @@ -0,0 +1,184 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregatevnf.search; + +import java.util.Map; + +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; + +import org.onap.aai.sparky.dataintegrity.config.DiUiConstants; + +/** + * Build a JSON payload to send to elastic search to get vnf search data. + */ + +public class VnfSearchQueryBuilder { + static final String SEVERITY = DiUiConstants.SEVERITY; + static final String TIMESTAMP = DiUiConstants.KEY_TIMESTAMP; + static final String VIOLATIONS = DiUiConstants.VIOLATIONS; + static final String CATEGORY = DiUiConstants.CATEGORY; + static final String ENTITY_TYPE = DiUiConstants.ENTITY_TYPE; + + static final String ITEM = DiUiConstants.KEY_ITEM; + static final String ITEM_AGG = DiUiConstants.KEY_ITEM_AGG; + static final String BY_ITEM = DiUiConstants.KEY_BY_ITEM; + static final String BUCKETS = DiUiConstants.KEY_BUCKETS; + + /** + * Creates the suggestions query. + * + * @param maxResults maximum number of suggestions to fetch + * @param queryStr query string + * @return the json object + */ + + /* + * { "vnfs" : { "text" : "VNFs", "completion" : { "field" : "entity_suggest", "size": 1 } } } + */ + public static JsonObject createSuggestionsQuery(String maxResults, String queryStr) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + JsonObjectBuilder completionBlob = Json.createObjectBuilder(); + completionBlob.add("field", "entity_suggest"); + completionBlob.add("size", maxResults); + + JsonObjectBuilder jsonAllBuilder = Json.createObjectBuilder(); + jsonAllBuilder.add("text", queryStr); + jsonAllBuilder.add("completion", completionBlob); + + jsonBuilder.add("vnfs", jsonAllBuilder.build()); + return jsonBuilder.build(); + } + + public static JsonObject getTermBlob(String key, String value) { + JsonObjectBuilder termBlobBuilder = Json.createObjectBuilder(); + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder().add(key, value); + return termBlobBuilder.add("term", jsonBuilder.build()).build(); + } + + public static void getSummaryAggsBlob(JsonObjectBuilder aggsBlobBuilder, String aggsKey, + int resultSize) { + JsonObjectBuilder fieldBuilder = + Json.createObjectBuilder().add("field", aggsKey).add("size", resultSize); + JsonObject aggsFieldBlob = fieldBuilder.build(); + JsonObjectBuilder defaultBlobBuilder = Json.createObjectBuilder().add("terms", aggsFieldBlob); + JsonObject defaultBlob = defaultBlobBuilder.build(); + aggsBlobBuilder.add("default", defaultBlob); + } + + public static void buildSingleTermCountQuery(JsonObjectBuilder jsonBuilder, String key, + String value) { + jsonBuilder.add("query", getTermBlob(key, value)); + } + + public static void buildSingleTermSummaryQuery(JsonObjectBuilder jsonBuilder, String key, + String value, String groupByKey) { + JsonObjectBuilder queryBlobBuilder = Json.createObjectBuilder(); + JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); + + queryBlobBuilder.add("constant_score", + Json.createObjectBuilder().add("filter", getTermBlob(key, value))); + + getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); + + jsonBuilder.add("query", queryBlobBuilder.build()); + jsonBuilder.add("aggs", aggsBlobBuilder.build()); + } + + public static void buildMultiTermSummaryQuery(JsonObjectBuilder jsonBuilder, + Map attributes, String groupByKey) { + JsonObjectBuilder queryBlobBuilder = Json.createObjectBuilder(); + JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); + JsonArrayBuilder mustBlobBuilder = Json.createArrayBuilder(); + for (String key : attributes.keySet()) { + mustBlobBuilder.add(getTermBlob(key, attributes.get(key))); + } + JsonArray mustBlob = mustBlobBuilder.build(); + + queryBlobBuilder.add("constant_score", Json.createObjectBuilder().add("filter", + Json.createObjectBuilder().add("bool", Json.createObjectBuilder().add("must", mustBlob)))); + + getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); + + jsonBuilder.add("query", queryBlobBuilder.build()); + jsonBuilder.add("aggs", aggsBlobBuilder.build()); + } + + public static void buildZeroTermSummaryQuery(JsonObjectBuilder jsonBuilder, String groupByKey) { + JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); + + getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); + + jsonBuilder.add("aggs", aggsBlobBuilder.build()); + } + + public static void buildMultiTermCountQuery(JsonObjectBuilder jsonBuilder, + Map attributes) { + JsonArrayBuilder mustBlobBuilder = Json.createArrayBuilder(); + for (String key : attributes.keySet()) { + mustBlobBuilder.add(getTermBlob(key, attributes.get(key))); + } + jsonBuilder.add("query", Json.createObjectBuilder().add("bool", + Json.createObjectBuilder().add("must", mustBlobBuilder))); + } + + + + public static JsonObject createSummaryByEntityTypeQuery(Map attributes, + String groupByKey) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + jsonBuilder.add("size", "0"); // avoid source data + if (attributes.size() == 0) { + buildZeroTermSummaryQuery(jsonBuilder, groupByKey); + } else if (attributes.size() == 1) { + Map.Entry entry = attributes.entrySet().iterator().next(); + buildSingleTermSummaryQuery(jsonBuilder, entry.getKey(), entry.getValue(), groupByKey); + } else { + buildMultiTermSummaryQuery(jsonBuilder, attributes, groupByKey); + } + return jsonBuilder.build(); + } + + public static JsonObject createEntityCountsQuery(Map attributes) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + if (attributes.size() == 1) { + Map.Entry entry = attributes.entrySet().iterator().next(); + buildSingleTermCountQuery(jsonBuilder, entry.getKey(), entry.getValue()); + } else { + buildMultiTermCountQuery(jsonBuilder, attributes); + } + return jsonBuilder.build(); + } + + public static JsonArray getSortCriteria(String sortFieldName, String sortOrder) { + JsonArrayBuilder jsonBuilder = Json.createArrayBuilder(); + jsonBuilder.add(Json.createObjectBuilder().add(sortFieldName, + Json.createObjectBuilder().add("order", sortOrder))); + + return jsonBuilder.build(); + } +} diff --git a/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java b/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java new file mode 100644 index 0000000..6d8decf --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java @@ -0,0 +1,232 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregation.sync; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.config.oxm.SuggestionEntityDescriptor; +import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncController; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class AggregationSyncControllerFactory implements SyncControllerRegistrar { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AggregationSyncControllerFactory.class); + + private ActiveInventoryAdapter aaiAdapter; + private ElasticSearchAdapter esAdapter; + private SuggestionEntityLookup suggestionEntityLookup; + + private Map aggregationEntityToIndexMap; + private Map indexNameToSchemaConfigMap; + + private ElasticSearchEndpointConfig elasticSearchEndpointConfig; + private SyncControllerConfig syncControllerConfig; + private SyncControllerRegistry syncControllerRegistry; + private NetworkStatisticsConfig aaiStatConfig; + private NetworkStatisticsConfig esStatConfig; + + private List syncControllers; + + public AggregationSyncControllerFactory(ElasticSearchEndpointConfig esEndpointConfig, + SyncControllerConfig syncControllerConfig, SyncControllerRegistry syncControllerRegistry, + SuggestionEntityLookup suggestionEntityLookup) { + this.syncControllers = new ArrayList(); + this.elasticSearchEndpointConfig = esEndpointConfig; + this.syncControllerConfig = syncControllerConfig; + this.syncControllerRegistry = syncControllerRegistry; + this.suggestionEntityLookup = suggestionEntityLookup; + } + + public NetworkStatisticsConfig getAaiStatConfig() { + return aaiStatConfig; + } + + public void setAaiStatConfig(NetworkStatisticsConfig aaiStatConfig) { + this.aaiStatConfig = aaiStatConfig; + } + + public NetworkStatisticsConfig getEsStatConfig() { + return esStatConfig; + } + + public void setEsStatConfig(NetworkStatisticsConfig esStatConfig) { + this.esStatConfig = esStatConfig; + } + + public Map getIndexNameToSchemaConfigMap() { + return indexNameToSchemaConfigMap; + } + + public void setIndexNameToSchemaConfigMap( + Map indexNameToSchemaConfigMap) { + this.indexNameToSchemaConfigMap = indexNameToSchemaConfigMap; + } + + public ElasticSearchEndpointConfig getElasticSearchEndpointConfig() { + return elasticSearchEndpointConfig; + } + + public void setElasticSearchEndpointConfig( + ElasticSearchEndpointConfig elasticSearchEndpointConfig) { + this.elasticSearchEndpointConfig = elasticSearchEndpointConfig; + } + + public SyncControllerConfig getSyncControllerConfig() { + return syncControllerConfig; + } + + public void setSyncControllerConfig(SyncControllerConfig syncControllerConfig) { + this.syncControllerConfig = syncControllerConfig; + } + + public ActiveInventoryAdapter getAaiAdapter() { + return aaiAdapter; + } + + public void setAaiAdapter(ActiveInventoryAdapter aaiAdapter) { + this.aaiAdapter = aaiAdapter; + } + + public ElasticSearchAdapter getEsAdapter() { + return esAdapter; + } + + public void setEsAdapter(ElasticSearchAdapter esAdapter) { + this.esAdapter = esAdapter; + } + + public SuggestionEntityLookup getSuggestionEntityLookup() { + return suggestionEntityLookup; + } + + public void setSuggestionEntityLookup(SuggestionEntityLookup suggestionEntityLookup) { + this.suggestionEntityLookup = suggestionEntityLookup; + } + + public Map getAggregationEntityToIndexMap() { + return aggregationEntityToIndexMap; + } + + public void setAggregationEntityToIndexMap(Map aggregationEntityToIndexMap) { + this.aggregationEntityToIndexMap = aggregationEntityToIndexMap; + } + + public void buildControllers() { + + if (syncControllerConfig.isEnabled()) { + + Map suggestionEntitites = + suggestionEntityLookup.getSuggestionSearchEntityDescriptors(); + SyncControllerImpl aggregationSyncController = null; + + for (String entityType : suggestionEntitites.keySet()) { + + String indexName = aggregationEntityToIndexMap.get(entityType); + + if (indexName == null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Could not determine aggregation index name" + " for entity type: " + entityType); + continue; + } + + try { + + aggregationSyncController = new SyncControllerImpl(syncControllerConfig, entityType); + + ElasticSearchSchemaConfig schemaConfig = indexNameToSchemaConfigMap.get(indexName); + + if (schemaConfig == null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Could not determine elastic search schema config for index name: " + indexName); + continue; + } + + IndexIntegrityValidator aggregationIndexValidator = + new IndexIntegrityValidator(esAdapter, schemaConfig, elasticSearchEndpointConfig, + ElasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + aggregationSyncController.registerIndexValidator(aggregationIndexValidator); + + AggregationSynchronizer aggSynchronizer = new AggregationSynchronizer(entityType, + schemaConfig, syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig); + + aggSynchronizer.setAaiAdapter(aaiAdapter); + aggSynchronizer.setElasticSearchAdapter(esAdapter); + + aggregationSyncController.registerEntitySynchronizer(aggSynchronizer); + + IndexCleaner entityDataIndexCleaner = + new ElasticSearchIndexCleaner(esAdapter, elasticSearchEndpointConfig, schemaConfig); + + aggregationSyncController.registerIndexCleaner(entityDataIndexCleaner); + + syncControllers.add(aggregationSyncController); + } catch (Exception exc) { + + exc.printStackTrace(); + + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Failed to build aggregation sync controller. Error : " + exc.getMessage()); + } + + } + } else { + LOG.info(AaiUiMsgs.INFO_GENERIC, "Sync controller with name = " + + syncControllerConfig.getControllerName() + " is disabled"); + } + } + + @Override + public void registerController() { + + buildControllers(); + + if (syncControllerRegistry != null) { + for (SyncController controller : syncControllers) { + syncControllerRegistry.registerSyncController(controller); + } + } + + } +} diff --git a/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java b/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java new file mode 100644 index 0000000..2a115db --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java @@ -0,0 +1,778 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregation.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Deque; +import java.util.EnumSet; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; +import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.SynchronizerConstants; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.AggregationEntity; +import org.onap.aai.sparky.sync.entity.MergableEntity; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchUpdate; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class AutosuggestionSynchronizer. + */ +public class AggregationSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + /** + * The Class RetryAggregationEntitySyncContainer. + */ + private class RetryAggregationEntitySyncContainer { + NetworkTransaction txn; + AggregationEntity ae; + + /** + * Instantiates a new retry aggregation entity sync container. + * + * @param txn the txn + * @param ae the se + */ + public RetryAggregationEntitySyncContainer(NetworkTransaction txn, AggregationEntity ae) { + this.txn = txn; + this.ae = ae; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public AggregationEntity getAggregationEntity() { + return ae; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AggregationSynchronizer.class); + private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; + + private boolean allWorkEnumerated; + private Deque selflinks; + private Deque retryQueue; + private Map retryLimitTracker; + protected ExecutorService esPutExecutor; + private ConcurrentHashMap entityCounters; + private boolean syncInProgress; + private Map contextMap; + private String entityType; + private ElasticSearchSchemaConfig schemaConfig; + + /** + * Instantiates a new entity aggregation synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public AggregationSynchronizer(String entityType, ElasticSearchSchemaConfig schemaConfig, + int numSyncWorkers, int numActiveInventoryWorkers, int numElasticWorkers, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig) + throws Exception { + + super(LOG, "AGGES-" + schemaConfig.getIndexName().toUpperCase(), numSyncWorkers, + numActiveInventoryWorkers, numElasticWorkers, schemaConfig.getIndexName(), aaiStatConfig, + esStatConfig); // multiple + // Autosuggestion + // Entity Synchronizer will + // run for different indices + + this.schemaConfig = schemaConfig; + this.entityType = entityType; + this.allWorkEnumerated = false; + this.entityCounters = new ConcurrentHashMap(); + this.synchronizerName = "Entity Aggregation Synchronizer"; + this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); + this.syncInProgress = false; + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque(); + this.retryQueue = new ConcurrentLinkedDeque(); + this.retryLimitTracker = new ConcurrentHashMap(); + + this.esPutExecutor = NodeUtils.createNamedExecutor("AGGES-ES-PUT", 1, LOG); + + this.aaiEntityStats.intializeEntityCounters(entityType); + this.esEntityStats.intializeEntityCounters(entityType); + + this.contextMap = MDC.getCopyOfContextMap(); + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + final Map contextMap = MDC.getCopyOfContextMap(); + final String entity = this.getEntityType(); + try { + + aaiWorkOnHand.set(1); + + supplyAsync(new Supplier() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiAdapter.getSelfLinksByEntityType(entity); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + // TODO -> LOG, what should be logged here? + + exc.printStackTrace(); + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + }); + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + } + + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetryAggregationEntitySyncContainer rsc = retryQueue.poll(); + if (rsc != null) { + + AggregationEntity ae = rsc.getAggregationEntity(); + NetworkTransaction txn = rsc.getNetworkTransaction(); + + String link = null; + try { + /* + * In this retry flow the se object has already derived its fields + */ + link = getElasticFullUrl("/" + ae.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already + * called incrementAndGet when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, ae); + } + }); + } + + } + } + } + + /** + * Perform document upsert. + * + * @param esGetTxn the es get txn + * @param ae the ae + */ + protected void performDocumentUpsert(NetworkTransaction esGetTxn, AggregationEntity ae) { + /** + *

+ *

    + * As part of the response processing we need to do the following: + *
  • 1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + *
  • 2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + *
  • a) if version is null or RC=404, then standard put, no _update with version tag + *
  • b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic + *
+ *

+ */ + String link = null; + try { + link = getElasticFullUrl("/" + ae.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + String versionNumber = null; + boolean wasEntryDiscovered = false; + if (esGetTxn.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, ae.getEntityPrimaryKeyValue()); + } else if (esGetTxn.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + try { + versionNumber = NodeUtils.extractFieldValueFromObject( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_version"); + } catch (IOException exc) { + String message = + "Error extracting version number from response, aborting aggregation entity sync of " + + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we + * return. + */ + LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetTxn.getOperationResult().getResultCode())); + return; + } + + try { + String jsonPayload = null; + if (wasEntryDiscovered) { + try { + ArrayList sourceObject = new ArrayList(); + NodeUtils.extractObjectsByKey( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_source", sourceObject); + + if (!sourceObject.isEmpty()) { + String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); + MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); + ObjectReader updater = mapper.readerForUpdating(me); + MergableEntity merged = updater.readValue(ae.getAsJson()); + jsonPayload = mapper.writeValueAsString(merged); + } + } catch (IOException exc) { + String message = + "Error extracting source value from response, aborting aggregation entity sync of " + + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + jsonPayload = ae.getAsJson(); + } + + if (wasEntryDiscovered) { + if (versionNumber != null && jsonPayload != null) { + + String requestPayload = + elasticSearchAdapter.buildBulkImportOperationRequest(schemaConfig.getIndexName(), + schemaConfig.getIndexDocType(), ae.getId(), versionNumber, jsonPayload); + + NetworkTransaction transactionTracker = new NetworkTransaction(); + transactionTracker.setEntityType(esGetTxn.getEntityType()); + transactionTracker.setDescriptor(esGetTxn.getDescriptor()); + transactionTracker.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), + requestPayload, elasticSearchAdapter, transactionTracker), esPutExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Aggregation entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, ae); + } + }); + } + + } else { + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetTxn.getEntityType()); + updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync( + new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = + "Aggregation entity sync UPDATE PUT error - " + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, ae); + } + }); + } + } + } catch (Exception exc) { + String message = "Exception caught during aggregation entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + String message = "Aggregation entity re-sync limit reached for " + id + + ", re-sync will no longer be attempted for this entity"; + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + /** + * Process store document result. + * + * @param esPutResult the es put result + * @param esGetResult the es get result + * @param ae the ae + */ + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, AggregationEntity ae) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(ae.getId())) { + esWorkOnHand.incrementAndGet(); + + RetryAggregationEntitySyncContainer rsc = + new RetryAggregationEntitySyncContainer(esGetResult, ae); + retryQueue.push(rsc); + + String message = "Store document failed during aggregation entity synchronization" + + " due to version conflict. Entity will be re-synced."; + LOG.warn(AaiUiMsgs.ERROR_GENERIC, message); + } + } else { + String message = + "Store document failed during aggregation entity synchronization with result code " + + or.getResultCode() + " and result message " + or.getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = OxmEntityLookup.getInstance().getEntityDescriptors() + .get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + linkDescriptor.getSelfLink()); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + + } + + } + + /** + * Fetch document for upsert. + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + // modified + if (!txn.getOperationResult().wasSuccessful()) { + String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return; + } + + try { + final String jsonResult = txn.getOperationResult().getResult(); + if (jsonResult != null && jsonResult.length() > 0) { + + AggregationEntity ae = new AggregationEntity(); + ae.setLink(ActiveInventoryConfig.extractResourcePath(txn.getLink())); + populateAggregationEntityDocument(ae, jsonResult, txn.getDescriptor()); + ae.deriveFields(); + + String link = null; + try { + link = getElasticFullUrl("/" + ae.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, ae); + } + }); + } + } + + } catch (JsonProcessingException exc) { + // TODO -> LOG, waht should be logged here? + } catch (IOException exc) { + // TODO -> LOG, waht should be logged here? + } + } + + + /** + * Populate aggregation entity document. + * + * @param doc the doc + * @param result the result + * @param resultDescriptor the result descriptor + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected void populateAggregationEntityDocument(AggregationEntity doc, String result, + OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { + doc.setEntityType(resultDescriptor.getEntityName()); + JsonNode entityNode = mapper.readTree(result); + Map map = mapper.convertValue(entityNode, Map.class); + doc.copyAttributeKeyValuePair(map); + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + String message = "Could not deserialize JSON (representing operation result) as node tree. " + + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + OxmEntityDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + + descriptor = OxmEntityLookup.getInstance().getEntityDescriptors().get(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + + selflinks.add(new SelfLinkDescriptor(resourceLink, + SynchronizerConstants.NODES_ONLY_MODIFIER, resourceType)); + + + } + } + } + } + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + syncStartedTimeStampInMs = System.currentTimeMillis(); + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "AggregationSynchronizer", "", "Sync", ""); + + return collectAllTheWork(); + } + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return getStatReport(syncDurationInMs, showFinalReport); + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " + + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); + } + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + this.syncInProgress = false; + + return true; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() + */ + @Override + public void clearCache() { + + if (syncInProgress) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Autosuggestion Entity Summarizer in progress, request to clear cache ignored"); + return; + } + + super.clearCache(); + this.resetCounters(); + if (entityCounters != null) { + entityCounters.clear(); + } + + allWorkEnumerated = false; + + } + +} diff --git a/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java b/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java new file mode 100644 index 0000000..5ee11be --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java @@ -0,0 +1,391 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregation.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.Collection; +import java.util.EnumSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import javax.json.Json; +import javax.ws.rs.core.MediaType; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class HistoricalEntitySummarizer. + */ +public class HistoricalEntitySummarizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(HistoricalEntitySummarizer.class); + private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; + + private boolean allWorkEnumerated; + private ConcurrentHashMap entityCounters; + private boolean syncInProgress; + private Map contextMap; + private ElasticSearchSchemaConfig schemaConfig; + + /** + * Instantiates a new historical entity summarizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public HistoricalEntitySummarizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers, + int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig) throws Exception { + super(LOG, "HES", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(), + aaiStatConfig, esStatConfig); + + this.schemaConfig = schemaConfig; + this.allWorkEnumerated = false; + this.entityCounters = new ConcurrentHashMap(); + this.synchronizerName = "Historical Entity Summarizer"; + this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); + this.syncInProgress = false; + this.contextMap = MDC.getCopyOfContextMap(); + this.syncDurationInMs = -1; + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + + Map descriptorMap = + SearchableEntityLookup.getInstance().getSearchableEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "historical entities"); + + return OperationState.ERROR; + } + + Collection entityTypes = descriptorMap.keySet(); + + AtomicInteger asyncWoH = new AtomicInteger(0); + + asyncWoH.set(entityTypes.size()); + + try { + for (String entityType : entityTypes) { + + supplyAsync(new Supplier() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + try { + OperationResult typeLinksResult = aaiAdapter.getSelfLinksByEntityType(entityType); + updateActiveInventoryCounters(HttpMethod.GET, entityType, typeLinksResult); + processEntityTypeSelfLinks(entityType, typeLinksResult); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc.getMessage()); + + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + asyncWoH.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, error.getMessage()); + } + + }); + + } + + + while (asyncWoH.get() > 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + indexName + " summarizer waiting for all the links to be processed."); + } + + Thread.sleep(250); + } + + esWorkOnHand.set(entityCounters.size()); + + // start doing the real work + allWorkEnumerated = true; + + insertEntityTypeCounters(); + + if (LOG.isDebugEnabled()) { + + StringBuilder sb = new StringBuilder(128); + + sb.append("\n\nHistorical Entity Counters:"); + + for (Entry entry : entityCounters.entrySet()) { + sb.append("\n").append(entry.getKey()).append(" = ").append(entry.getValue().get()); + } + + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, sb.toString()); + + } + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, exc.getMessage()); + + + esWorkOnHand.set(0); + allWorkEnumerated = true; + + return OperationState.ERROR; + } + + return OperationState.OK; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "HistoricalEntitySynchronizer", "", "Sync", ""); + + if (syncInProgress) { + LOG.info(AaiUiMsgs.HISTORICAL_SYNC_PENDING); + return OperationState.PENDING; + } + + clearCache(); + + syncInProgress = true; + this.syncStartedTimeStampInMs = System.currentTimeMillis(); + allWorkEnumerated = false; + + return collectAllTheWork(); + } + + /** + * Process entity type self links. + * + * @param entityType the entity type + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(String entityType, OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc.getMessage()); + return; + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData != null && resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + entityCounters.put(entityType, new AtomicInteger(resultDataArrayNode.size())); + } + } + + } + + /** + * Insert entity type counters. + */ + private void insertEntityTypeCounters() { + + if (esWorkOnHand.get() <= 0) { + return; + } + + SimpleDateFormat dateFormat = new SimpleDateFormat(INSERTION_DATE_TIME_FORMAT); + Timestamp timestamp = new Timestamp(System.currentTimeMillis()); + String currentFormattedTimeStamp = dateFormat.format(timestamp); + + Set> entityCounterEntries = entityCounters.entrySet(); + + for (Entry entityCounterEntry : entityCounterEntries) { + + supplyAsync(new Supplier() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + String jsonString = + Json.createObjectBuilder().add("count", entityCounterEntry.getValue().get()) + .add("entityType", entityCounterEntry.getKey()) + .add("timestamp", currentFormattedTimeStamp).build().toString(); + + String link = null; + try { + link = getElasticFullUrl("", indexName); + OperationResult or = + elasticSearchAdapter.doPost(link, jsonString, MediaType.APPLICATION_JSON_TYPE); + updateElasticSearchCounters(HttpMethod.POST, entityCounterEntry.getKey(), or); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_STORE_FAILURE, exc.getMessage()); + } + + return null; + } + + }, esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + }); + + } + + while (esWorkOnHand.get() > 0) { + + try { + Thread.sleep(500); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.INTERRUPTED, "historical Entities", exc.getMessage()); + } + } + + } + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return this.getStatReport(syncDurationInMs, showFinalReport); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " + + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); + } + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + this.syncInProgress = false; + + return true; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() + */ + @Override + public void clearCache() { + + if (syncInProgress) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Historical Entity Summarizer in progress, request to clear cache ignored"); + return; + } + + super.clearCache(); + this.resetCounters(); + if (entityCounters != null) { + entityCounters.clear(); + } + + allWorkEnumerated = false; + + } + +} diff --git a/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java b/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java new file mode 100644 index 0000000..1f7db2e --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java @@ -0,0 +1,90 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregation.sync; + +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class HistoricalEntitySyncController extends SyncControllerImpl + implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + + public HistoricalEntitySyncController(SyncControllerConfig syncControllerConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + int syncFrequencyInMinutes, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig) throws Exception { + super(syncControllerConfig); + + // final String controllerName = "Historical Entity Count Synchronizer"; + + long taskFrequencyInMs = syncFrequencyInMinutes * 60 * 1000; + + setDelayInMs(taskFrequencyInMs); + setSyncFrequencyInMs(taskFrequencyInMs); + + IndexIntegrityValidator entityCounterHistoryValidator = new IndexIntegrityValidator(esAdapter, + schemaConfig, endpointConfig, ElasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(entityCounterHistoryValidator); + + HistoricalEntitySummarizer historicalSummarizer = new HistoricalEntitySummarizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig); + + historicalSummarizer.setAaiAdapter(aaiAdapter); + historicalSummarizer.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(historicalSummarizer); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + @Override + public void registerController() { + if (syncControllerRegistry != null) { + if (syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + + } +} diff --git a/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java b/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java index 9d2fec6..6e7d854 100644 --- a/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java +++ b/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java @@ -20,6 +20,7 @@ * * ECOMP is a trademark and service mark of AT&T Intellectual Property. */ + package org.onap.aai.sparky.analytics; import java.util.HashMap; @@ -31,35 +32,6 @@ import java.util.concurrent.atomic.AtomicInteger; public class AbstractStatistics implements ComponentStatistics { private HashMap namedCounters; - - /** - * @return the namedCounters - */ - public HashMap getNamedCounters() { - return namedCounters; - } - - /** - * @param namedCounters the namedCounters to set - */ - public void setNamedCounters(HashMap namedCounters) { - this.namedCounters = namedCounters; - } - - /** - * @return the namedHistograms - */ - public HashMap getNamedHistograms() { - return namedHistograms; - } - - /** - * @param namedHistograms the namedHistograms to set - */ - public void setNamedHistograms(HashMap namedHistograms) { - this.namedHistograms = namedHistograms; - } - private HashMap namedHistograms; /** @@ -73,7 +45,7 @@ public class AbstractStatistics implements ComponentStatistics { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.analytics.ComponentStatistics#addCounter(java.lang.String) + * @see org.openecomp.sparky.analytics.ComponentStatistics#addCounter(java.lang.String) */ /* * sync-lock the creation of counters during initialization, but run time should not use lock @@ -96,7 +68,7 @@ public class AbstractStatistics implements ComponentStatistics { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.analytics.ComponentStatistics#pegCounter(java.lang.String) + * @see org.openecomp.sparky.analytics.ComponentStatistics#pegCounter(java.lang.String) */ @Override public void pegCounter(String key) { @@ -112,7 +84,7 @@ public class AbstractStatistics implements ComponentStatistics { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.analytics.ComponentStatistics#incrementCounter(java.lang.String, int) + * @see org.openecomp.sparky.analytics.ComponentStatistics#incrementCounter(java.lang.String, int) */ @Override public void incrementCounter(String key, int value) { @@ -129,7 +101,7 @@ public class AbstractStatistics implements ComponentStatistics { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.analytics.ComponentStatistics#addHistogram(java.lang.String, + * @see org.openecomp.sparky.analytics.ComponentStatistics#addHistogram(java.lang.String, * java.lang.String, long, int, int) */ @Override @@ -147,7 +119,7 @@ public class AbstractStatistics implements ComponentStatistics { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.analytics.ComponentStatistics#updateHistogram(java.lang.String, long) + * @see org.openecomp.sparky.analytics.ComponentStatistics#updateHistogram(java.lang.String, long) */ @Override public void updateHistogram(String key, long value) { @@ -161,7 +133,7 @@ public class AbstractStatistics implements ComponentStatistics { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.analytics.ComponentStatistics#reset() + * @see org.openecomp.sparky.analytics.ComponentStatistics#reset() */ @Override public void reset() { diff --git a/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java b/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java index 622693c..50941cc 100644 --- a/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java +++ b/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java @@ -20,7 +20,6 @@ * * ECOMP is a trademark and service mark of AT&T Intellectual Property. */ - package org.onap.aai.sparky.analytics; /** @@ -33,62 +32,6 @@ public class HistoricalCounter { private double min; - /** - * @return the totalOfSamples - */ - public double getTotalOfSamples() { - return totalOfSamples; - } - - /** - * @param totalOfSamples the totalOfSamples to set - */ - public void setTotalOfSamples(double totalOfSamples) { - this.totalOfSamples = totalOfSamples; - } - - /** - * @return the maintainSingleValue - */ - public boolean isMaintainSingleValue() { - return maintainSingleValue; - } - - /** - * @param maintainSingleValue the maintainSingleValue to set - */ - public void setMaintainSingleValue(boolean maintainSingleValue) { - this.maintainSingleValue = maintainSingleValue; - } - - /** - * @param min the min to set - */ - public void setMin(double min) { - this.min = min; - } - - /** - * @param max the max to set - */ - public void setMax(double max) { - this.max = max; - } - - /** - * @param numSamples the numSamples to set - */ - public void setNumSamples(long numSamples) { - this.numSamples = numSamples; - } - - /** - * @param value the value to set - */ - public void setValue(double value) { - this.value = value; - } - private double max; private double totalOfSamples; @@ -175,6 +118,31 @@ public class HistoricalCounter { return (totalOfSamples / numSamples); } + public void setMin(double min) { + this.min = min; + } + + public void setMax(double max) { + this.max = max; + } + + public double getTotalOfSamples() { + return totalOfSamples; + } + + public void setTotalOfSamples(double totalOfSamples) { + this.totalOfSamples = totalOfSamples; + } + + public void setNumSamples(long numSamples) { + this.numSamples = numSamples; + } + + public void setMaintainSingleValue(boolean maintainSingleValue) { + this.maintainSingleValue = maintainSingleValue; + } + + /** * Reset. */ diff --git a/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java b/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java new file mode 100644 index 0000000..950eb45 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java @@ -0,0 +1,97 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.autosuggestion.sync; + +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class AutoSuggestionSyncController extends SyncControllerImpl + implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + + public AutoSuggestionSyncController(SyncControllerConfig syncControllerConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig) + throws Exception { + super(syncControllerConfig); + + // final String controllerName = "Auto Suggestion Synchronizer"; + + IndexIntegrityValidator autoSuggestionIndexValidator = new IndexIntegrityValidator(esAdapter, + schemaConfig, endpointConfig, ElasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(autoSuggestionIndexValidator); + + AutosuggestionSynchronizer suggestionSynchronizer = new AutosuggestionSynchronizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig); + + suggestionSynchronizer.setAaiAdapter(aaiAdapter); + suggestionSynchronizer.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(suggestionSynchronizer); + + IndexCleaner autosuggestIndexCleaner = + new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig); + + registerIndexCleaner(autosuggestIndexCleaner); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + + + @Override + public void registerController() { + + if (syncControllerRegistry != null) { + if (syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + + } +} diff --git a/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java b/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java new file mode 100644 index 0000000..4ce7ce3 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java @@ -0,0 +1,749 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.autosuggestion.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Deque; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SuggestionEntityDescriptor; +import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.SynchronizerConstants; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.util.SuggestionsPermutation; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class AutosuggestionSynchronizer. + */ +public class AutosuggestionSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + private class RetrySuggestionEntitySyncContainer { + NetworkTransaction txn; + SuggestionSearchEntity ssec; + + /** + * Instantiates a new RetrySuggestionEntitySyncContainer. + * + * @param txn the txn + * @param icer the icer + */ + public RetrySuggestionEntitySyncContainer(NetworkTransaction txn, SuggestionSearchEntity icer) { + this.txn = txn; + this.ssec = icer; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public SuggestionSearchEntity getSuggestionSearchEntity() { + return ssec; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AutosuggestionSynchronizer.class); + private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; + + private boolean allWorkEnumerated; + private Deque selflinks; + private ConcurrentHashMap entityCounters; + private boolean syncInProgress; + private Map contextMap; + protected ExecutorService esPutExecutor; + private Deque retryQueue; + private Map retryLimitTracker; + + /** + * Instantiates a new historical entity summarizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public AutosuggestionSynchronizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers, + int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig) throws Exception { + super(LOG, "ASES-" + schemaConfig.getIndexName().toUpperCase(), internalSyncWorkers, aaiWorkers, + esWorkers, schemaConfig.getIndexName(), aaiStatConfig, esStatConfig); + + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque(); + this.entityCounters = new ConcurrentHashMap(); + this.synchronizerName = "Autosuggestion Entity Synchronizer"; + this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); + this.syncInProgress = false; + this.contextMap = MDC.getCopyOfContextMap(); + this.esPutExecutor = NodeUtils.createNamedExecutor("SUES-ES-PUT", 5, LOG); + this.syncDurationInMs = -1; + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + final Map contextMap = MDC.getCopyOfContextMap(); + Map descriptorMap = + SuggestionEntityLookup.getInstance().getSuggestionSearchEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES); + LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES); + return OperationState.ERROR; + } + + Collection syncTypes = descriptorMap.keySet(); + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the of + * the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + // TODO -> LOG, what should be logged here? + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + }); + + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + syncStartedTimeStampInMs = System.currentTimeMillis(); + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "AutosuggestionSynchronizer", "", "Sync", ""); + + return collectAllTheWork(); + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + String message = "Could not deserialize JSON (representing operation result) as node tree. " + + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + OxmEntityDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + + descriptor = OxmEntityLookup.getInstance().getEntityDescriptors().get(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + selflinks.add(new SelfLinkDescriptor(resourceLink, + SynchronizerConstants.NODES_ONLY_MODIFIER, resourceType)); + + + } + } + } + } + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = OxmEntityLookup.getInstance().getEntityDescriptors() + .get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + linkDescriptor.getSelfLink()); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + + } + + } + + /* + * Return a set of valid suggestion attributes for the provided entityName that are present in the + * JSON + * + * @param node JSON node in which the attributes should be found + * + * @param entityName Name of the entity + * + * @return List of all valid suggestion attributes(key's) + */ + public List getSuggestableAttrNamesFromReponse(JsonNode node, String entityName) { + List suggestableAttr = new ArrayList(); + HashMap desc = + SuggestionEntityLookup.getInstance().getSuggestionSearchEntityOxmModel().get(entityName); + String attr = desc.get("suggestibleAttributes"); + suggestableAttr = Arrays.asList(attr.split(",")); + List suggestableValue = new ArrayList<>(); + for (String attribute : suggestableAttr) { + if (node.get(attribute) != null && node.get(attribute).asText().length() > 0) { + suggestableValue.add(attribute); + } + } + return suggestableValue; + } + + /** + * Fetch all the documents for upsert. Based on the number of permutations that are available the + * number of documents will be different + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + if (!txn.getOperationResult().wasSuccessful()) { + String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return; + } + try { + final String jsonResult = txn.getOperationResult().getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + // Step 1: Calculate the number of possible permutations of attributes + String entityName = txn.getDescriptor().getEntityName(); + JsonNode entityNode = mapper.readTree(jsonResult); + + List availableSuggestableAttrName = + getSuggestableAttrNamesFromReponse(entityNode, entityName); + + ArrayList> uniqueLists = + SuggestionsPermutation.getNonEmptyUniqueLists(availableSuggestableAttrName); + // Now we have a list of all possible permutations for the status that are + // defined for this entity type. Try inserting a document for every combination. + for (ArrayList uniqueList : uniqueLists) { + + SuggestionSearchEntity sse = + new SuggestionSearchEntity(SuggestionEntityLookup.getInstance()); + sse.setSuggestableAttr(uniqueList); + sse.setFilterBasedPayloadFromResponse(entityNode, entityName, uniqueList); + sse.setLink(ActiveInventoryConfig.extractResourcePath(txn.getLink())); + populateSuggestionSearchEntityDocument(sse, jsonResult, txn); + // The unique id for the document will be created at derive fields + sse.deriveFields(); + // Insert the document only if it has valid statuses + if (sse.isSuggestableDoc()) { + String link = null; + try { + link = getElasticFullUrl("/" + sse.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, sse); + } + }); + } + } + } + } + } catch (JsonProcessingException exc) { + // TODO -> LOG, waht should be logged here? + } catch (IOException exc) { + // TODO -> LOG, waht should be logged here? + } + } + + protected void populateSuggestionSearchEntityDocument(SuggestionSearchEntity sse, String result, + NetworkTransaction txn) throws JsonProcessingException, IOException { + + OxmEntityDescriptor resultDescriptor = txn.getDescriptor(); + + sse.setEntityType(resultDescriptor.getEntityName()); + + JsonNode entityNode = mapper.readTree(result); + + List primaryKeyValues = new ArrayList(); + String pkeyValue = null; + + for (String keyName : resultDescriptor.getPrimaryKeyAttributeNames()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + String message = "populateSuggestionSearchEntityDocument()," + + " pKeyValue is null for entityType = " + resultDescriptor.getEntityName(); + LOG.warn(AaiUiMsgs.WARN_GENERIC, message); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + sse.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + sse.generateSuggestionInputPermutations(); + } + + protected void performDocumentUpsert(NetworkTransaction esGetTxn, SuggestionSearchEntity sse) { + /** + *

+ *

    + * As part of the response processing we need to do the following: + *
  • 1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + *
  • 2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + *
  • a) if version is null or RC=404, then standard put, no _update with version tag + *
  • b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic + *
+ *

+ */ + String link = null; + try { + link = getElasticFullUrl("/" + sse.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + boolean wasEntryDiscovered = false; + if (esGetTxn.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, sse.getEntityPrimaryKeyValue()); + } else if (esGetTxn.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. and 500 for es not + * found TODO -> Should we return. + */ + LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetTxn.getOperationResult().getResultCode())); + return; + } + // Insert a new document only if the paylod is different. + // This is determined by hashing the payload and using it as a id for the document + // + if (!wasEntryDiscovered) { + try { + String jsonPayload = null; + + jsonPayload = sse.getAsJson(); + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetTxn.getEntityType()); + updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync( + new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Suggestion search entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, sse); + } + }); + } + } catch (Exception exc) { + String message = + "Exception caught during suggestion search entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } + } + } + + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, SuggestionSearchEntity sse) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(sse.getId())) { + esWorkOnHand.incrementAndGet(); + + RetrySuggestionEntitySyncContainer rssec = + new RetrySuggestionEntitySyncContainer(esGetResult, sse); + retryQueue.push(rssec); + + String message = "Store document failed during suggestion search entity synchronization" + + " due to version conflict. Entity will be re-synced."; + LOG.warn(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } + } else { + String message = + "Store document failed during suggestion search entity synchronization with result code " + + or.getResultCode() + " and result message " + or.getResult(); + LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } + } + } + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetrySuggestionEntitySyncContainer susc = retryQueue.poll(); + if (susc != null) { + + SuggestionSearchEntity sus = susc.getSuggestionSearchEntity(); + NetworkTransaction txn = susc.getNetworkTransaction(); + + String link = null; + try { + /* + * In this retry flow the se object has already derived its fields + */ + link = getElasticFullUrl("/" + sus.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already + * called incrementAndGet when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, sus); + } + }); + } + + } + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + String message = "Searchable entity re-sync limit reached for " + id + + ", re-sync will no longer be attempted for this entity"; + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return getStatReport(syncDurationInMs, showFinalReport); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " + + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); + } + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + this.syncInProgress = false; + + return true; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() + */ + @Override + public void clearCache() { + + if (syncInProgress) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Autosuggestion Entity Summarizer in progress, request to clear cache ignored"); + return; + } + + super.clearCache(); + this.resetCounters(); + if (entityCounters != null) { + entityCounters.clear(); + } + + allWorkEnumerated = false; + + } + +} diff --git a/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java b/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java new file mode 100644 index 0000000..c6fa69b --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java @@ -0,0 +1,192 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.autosuggestion.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.util.Map; +import java.util.concurrent.ExecutorService; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.AggregationSuggestionEntity; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + + +public class VnfAliasSuggestionSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(VnfAliasSuggestionSynchronizer.class); + + private boolean isSyncInProgress; + private boolean shouldPerformRetry; + private Map contextMap; + protected ExecutorService esPutExecutor; + + public VnfAliasSuggestionSynchronizer(ElasticSearchSchemaConfig schemaConfig, + int internalSyncWorkers, int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig) throws Exception { + super(LOG, "VASS-" + schemaConfig.getIndexName().toUpperCase(), internalSyncWorkers, aaiWorkers, + esWorkers, schemaConfig.getIndexName(), aaiStatConfig, esStatConfig); + + this.isSyncInProgress = false; + this.shouldPerformRetry = false; + this.synchronizerName = "VNFs Alias Suggestion Synchronizer"; + this.contextMap = MDC.getCopyOfContextMap(); + this.esPutExecutor = NodeUtils.createNamedExecutor("ASS-ES-PUT", 2, LOG); + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand); + } + + if (totalWorkOnHand > 0 || !isSyncInProgress) { + return false; + } + + return true; + } + + @Override + public OperationState doSync() { + isSyncInProgress = true; + this.syncDurationInMs = -1; + syncStartedTimeStampInMs = System.currentTimeMillis(); + + syncEntity(); + + while (!isSyncDone()) { + try { + if (shouldPerformRetry) { + syncEntity(); + } + Thread.sleep(1000); + } catch (Exception exc) { + // We don't care about this exception + } + } + + return OperationState.OK; + } + + private void syncEntity() { + String txnId = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnId, synchronizerName, "", "Sync", ""); + + AggregationSuggestionEntity syncEntity = new AggregationSuggestionEntity(); + syncEntity.deriveFields(); + syncEntity.initializeFilters(); + + String link = null; + try { + link = getElasticFullUrl("/" + syncEntity.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + } + + try { + String jsonPayload = null; + jsonPayload = syncEntity.getAsJson(); + if (link != null && jsonPayload != null) { + + NetworkTransaction elasticPutTxn = new NetworkTransaction(); + elasticPutTxn.setLink(link); + elasticPutTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + final Map contextMap = MDC.getCopyOfContextMap(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, elasticPutTxn, elasticSearchAdapter, + contextMap), esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Aggregation suggestion entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + wasEsOperationSuccessful(result); + } + }); + } + } catch (Exception exc) { + String message = + "Exception caught during aggregation suggestion entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message); + } + } + + private void wasEsOperationSuccessful(NetworkTransaction result) { + if (result != null) { + OperationResult opResult = result.getOperationResult(); + + if (!opResult.wasSuccessful()) { + shouldPerformRetry = true; + } else { + isSyncInProgress = false; + shouldPerformRetry = false; + } + } + } + + @Override + public SynchronizerState getState() { + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + } + + @Override + public String getStatReport(boolean shouldDisplayFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return getStatReport(syncDurationInMs, shouldDisplayFinalReport); + } + + @Override + public void shutdown() { + this.shutdownExecutors(); + } +} diff --git a/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java b/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java new file mode 100644 index 0000000..3376eed --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java @@ -0,0 +1,95 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.autosuggestion.sync; + +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class VnfAliasSyncController extends SyncControllerImpl implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + + public VnfAliasSyncController(SyncControllerConfig syncControllerConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig) + throws Exception { + super(syncControllerConfig); + + // final String controllerName = "VNFs Alias Suggestion Synchronizer"; + + IndexIntegrityValidator indexValidator = new IndexIntegrityValidator(esAdapter, schemaConfig, + endpointConfig, ElasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(indexValidator); + + VnfAliasSuggestionSynchronizer synchronizer = new VnfAliasSuggestionSynchronizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig); + + synchronizer.setAaiAdapter(aaiAdapter); + synchronizer.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(synchronizer); + + + IndexCleaner indexCleaner = + new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig); + + registerIndexCleaner(indexCleaner); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + @Override + public void registerController() { + + if (syncControllerRegistry != null) { + if (syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + + } + + +} diff --git a/src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java b/src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java new file mode 100644 index 0000000..8a3f119 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java @@ -0,0 +1,88 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.common.search; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.search.entity.SearchSuggestion; +import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; + +@JsonInclude(Include.NON_NULL) +public class CommonSearchSuggestion implements SearchSuggestion { + protected String hashId; + protected String route; + protected String text; + protected List filterValues = new ArrayList<>(); + + public CommonSearchSuggestion() {} + + public CommonSearchSuggestion(String hashId, String route, String text, String perspective, + List filterValues) { + this.hashId = hashId; + this.route = route; + this.text = text; + this.filterValues = filterValues; + } + + public List getFilterValues() { + return filterValues; + } + + public String getHashId() { + return hashId; + } + + public String getRoute() { + return route; + } + + public String getText() { + return text; + } + + public void setHashId(String hashId) { + this.hashId = hashId; + } + + public void setRoute(String route) { + this.route = route; + } + + public void setText(String text) { + this.text = text; + } + + @Override + public String toString() { + return "CommonSearchSuggestion [" + (hashId != null ? "hashId=" + hashId + ", " : "") + + (route != null ? "route=" + route + ", " : "") + + (text != null ? "text=" + text + ", " : "") + + (filterValues != null ? "filterValues=" + filterValues : "") + "]"; + } + + +} diff --git a/src/main/java/org/onap/aai/sparky/config/Configurable.java b/src/main/java/org/onap/aai/sparky/config/Configurable.java deleted file mode 100644 index d108bef..0000000 --- a/src/main/java/org/onap/aai/sparky/config/Configurable.java +++ /dev/null @@ -1,43 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.config; - -import org.onap.aai.sparky.config.exception.ConfigurationException; - -/** - * The Interface Configurable. - */ -public interface Configurable { - - public boolean isValid(); - - public boolean isInitialized(); - - /** - * Load config. - * - * @throws ConfigurationException the configuration exception - */ - public void loadConfig() throws ConfigurationException; - -} diff --git a/src/main/java/org/onap/aai/sparky/config/exception/ConfigurationException.java b/src/main/java/org/onap/aai/sparky/config/exception/ConfigurationException.java deleted file mode 100644 index f796c38..0000000 --- a/src/main/java/org/onap/aai/sparky/config/exception/ConfigurationException.java +++ /dev/null @@ -1,31 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.config.exception; - - -/** - * The Class ConfigurationException. - */ -public class ConfigurationException extends Exception { - -} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java b/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java index e4a9f90..1df9296 100644 --- a/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java +++ b/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java @@ -20,7 +20,6 @@ * * ECOMP is a trademark and service mark of AT&T Intellectual Property. */ - package org.onap.aai.sparky.config.oxm; import java.util.ArrayList; diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java b/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java new file mode 100644 index 0000000..f0e6d4e --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java @@ -0,0 +1,65 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +public class CrossEntityReferenceDescriptor extends OxmEntityDescriptor { + protected CrossEntityReference crossEntityReference; + + public CrossEntityReference getCrossEntityReference() { + return crossEntityReference; + } + + public void setCrossEntityReference(CrossEntityReference crossEntityReference) { + this.crossEntityReference = crossEntityReference; + } + + /** + * Checks for cross entity references. + * + * @return true, if successful + */ + public boolean hasCrossEntityReferences() { + if (this.crossEntityReference == null) { + return false; + } + if (!this.crossEntityReference.getReferenceAttributes().isEmpty()) { + return true; + } + return false; + } + + + @Override + public String toString() { + return "CrossEntityReferenceDescriptor [" + + (crossEntityReference != null ? "crossEntityReference=" + crossEntityReference + ", " + : "") + + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + + + +} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java b/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java new file mode 100644 index 0000000..81fe943 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java @@ -0,0 +1,154 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public class CrossEntityReferenceLookup implements OxmModelProcessor { + + // TODO: kill singleton collaborator pattern + private static CrossEntityReferenceLookup instance; + + private Map> crossReferenceEntityOxmModel; + private Map crossReferenceEntityDescriptors; + + + private CrossEntityReferenceLookup() { + crossReferenceEntityOxmModel = new LinkedHashMap>(); + crossReferenceEntityDescriptors = new HashMap(); + } + + public synchronized static CrossEntityReferenceLookup getInstance() { + + /* + * I hate this method and I want it to go away. The singleton pattern is transitory, I want this + * class to be wired via a bean reference instead. But from the starting point, it would require + * fixing all the classes across the code base up front and I don't want this task to expand + * beyond just refactoring the OxmModelLoader. For now I'll keep the singleton pattern, but I + * really want to get rid of it once we are properly spring wired. + */ + + if (instance == null) { + instance = new CrossEntityReferenceLookup(); + } + + return instance; + } + + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + LinkedHashMap oxmProperties = new LinkedHashMap(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map properties = entity.getDescriptor().getProperties(); + if (properties != null) { + for (Map.Entry entry : properties.entrySet()) { + + if (entry.getKey().equalsIgnoreCase("crossEntityReference")) { + oxmProperties.put("crossEntityReference", entry.getValue()); + } + } + } + + if (oxmProperties.containsKey("crossEntityReference")) { + crossReferenceEntityOxmModel.put(entityName, oxmProperties); + } + + } + + for (Entry> crossRefModel : crossReferenceEntityOxmModel + .entrySet()) { + HashMap attribute = crossRefModel.getValue(); + CrossEntityReferenceDescriptor entity = new CrossEntityReferenceDescriptor(); + entity.setEntityName(attribute.get("entityName")); + entity.setPrimaryKeyAttributeNames( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + + List crossEntityRefTokens = + Arrays.asList(attribute.get("crossEntityReference").split(",")); + + if (crossEntityRefTokens.size() >= 2) { + CrossEntityReference entityRef = new CrossEntityReference(); + entityRef.setTargetEntityType(crossEntityRefTokens.get(0)); + + for (int i = 1; i < crossEntityRefTokens.size(); i++) { + entityRef.addReferenceAttribute(crossEntityRefTokens.get(i)); + } + + entity.setCrossEntityReference(entityRef); + } + crossReferenceEntityDescriptors.put(attribute.get("entityName"), entity); + } + + } + + public Map> getCrossReferenceEntityOxmModel() { + return crossReferenceEntityOxmModel; + } + + public void setCrossReferenceEntityOxmModel( + Map> crossReferenceEntityOxmModel) { + this.crossReferenceEntityOxmModel = crossReferenceEntityOxmModel; + } + + public Map getCrossReferenceEntityDescriptors() { + return crossReferenceEntityDescriptors; + } + + public void setCrossReferenceEntityDescriptors( + Map crossReferenceEntityDescriptors) { + this.crossReferenceEntityDescriptors = crossReferenceEntityDescriptors; + } + + + +} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java b/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java new file mode 100644 index 0000000..5a45842 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java @@ -0,0 +1,59 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +public class GeoEntityDescriptor extends OxmEntityDescriptor { + + protected String geoLatName; + + protected String geoLongName; + + public String getGeoLatName() { + return geoLatName; + } + + public void setGeoLatName(String geoLatName) { + this.geoLatName = geoLatName; + } + + public String getGeoLongName() { + return geoLongName; + } + + public void setGeoLongName(String geoLongName) { + this.geoLongName = geoLongName; + } + + @Override + public String toString() { + return "GeoEntityDescriptor [" + (geoLatName != null ? "geoLatName=" + geoLatName + ", " : "") + + (geoLongName != null ? "geoLongName=" + geoLongName + ", " : "") + + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + + + +} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java b/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java new file mode 100644 index 0000000..f8b1ceb --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java @@ -0,0 +1,155 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public class GeoEntityLookup implements OxmModelProcessor { + + // TODO: kill singleton collaborator pattern + private static GeoEntityLookup instance; + + private Map> geoEntityOxmModel; + + private Map geoEntityDescriptors; + + private GeoEntityLookup() { + geoEntityOxmModel = new LinkedHashMap>(); + geoEntityDescriptors = new HashMap(); + } + + public synchronized static GeoEntityLookup getInstance() { + + /* + * I hate this method and I want it to go away. The singleton pattern is transitory, I want this + * class to be wired via a bean reference instead. But from the starting point, it would require + * fixing all the classes across the code base up front and I don't want this task to expand + * beyond just refactoring the OxmModelLoader. For now I'll keep the singleton pattern, but I + * really want to get rid of it once we are properly spring wired. + */ + + if (instance == null) { + instance = new GeoEntityLookup(); + } + + return instance; + } + + public Map> getGeoEntityOxmModel() { + return geoEntityOxmModel; + } + + public void setGeoEntityOxmModel(Map> geoEntityOxmModel) { + this.geoEntityOxmModel = geoEntityOxmModel; + } + + public Map getGeoEntityDescriptors() { + return geoEntityDescriptors; + } + + public void setGeoEntityDescriptors(Map geoEntityDescriptors) { + this.geoEntityDescriptors = geoEntityDescriptors; + } + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + LinkedHashMap oxmProperties = new LinkedHashMap(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map properties = entity.getDescriptor().getProperties(); + + if (properties != null) { + for (Map.Entry entry : properties.entrySet()) { + + if (entry.getKey().equalsIgnoreCase("geoLat")) { + if (entry.getValue().length() > 0) { + oxmProperties.put("geoLat", entry.getValue()); + } + } else if (entry.getKey().equalsIgnoreCase("geoLong")) { + if (entry.getValue().length() > 0) { + oxmProperties.put("geoLong", entry.getValue()); + } + } + } + } + + if (oxmProperties.containsKey("geoLat") && oxmProperties.containsKey("geoLong")) { + geoEntityOxmModel.put(entityName, oxmProperties); + } + + } + + for (Entry> entityModel : geoEntityOxmModel.entrySet()) { + + HashMap attribute = entityModel.getValue(); + + GeoOxmEntityDescriptor entity = new GeoOxmEntityDescriptor(); + + entity.setEntityName(attribute.get("entityName")); + + if (attribute.containsKey("primaryKeyAttributeNames")) { + + entity.setPrimaryKeyAttributeNames( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + + if (attribute.containsKey("geoLat") || attribute.containsKey("geoLong")) { + entity.setGeoLatName(attribute.get("geoLat")); + entity.setGeoLongName(attribute.get("geoLong")); + } + + geoEntityDescriptors.put(attribute.get("entityName"), entity); + } + } + + } + + +} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java b/src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java new file mode 100644 index 0000000..595c81a --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java @@ -0,0 +1,69 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +public class GeoOxmEntityDescriptor extends OxmEntityDescriptor { + + private String geoLatName; + + private String geoLongName; + + public String getGeoLatName() { + return geoLatName; + } + + public void setGeoLatName(String geoLatName) { + this.geoLatName = geoLatName; + } + + public String getGeoLongName() { + return geoLongName; + } + + public void setGeoLongName(String geoLongName) { + this.geoLongName = geoLongName; + } + + /** + * Checks for geo entity. + * + * @return true, if successful + */ + public boolean hasGeoEntity() { + return (this.geoLongName != null && this.geoLatName != null); + } + + @Override + public String toString() { + return "GeoOxmEntityDescriptor [" + + (geoLatName != null ? "geoLatName=" + geoLatName + ", " : "") + + (geoLongName != null ? "geoLongName=" + geoLongName + ", " : "") + + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + + + +} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java b/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java index 379cca2..3b3fabd 100644 --- a/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java +++ b/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java @@ -22,28 +22,18 @@ */ package org.onap.aai.sparky.config.oxm; +import java.util.ArrayList; import java.util.List; -import org.onap.aai.sparky.synchronizer.entity.SuggestionSearchEntity; - -/** - * The Class OxmEntityDescriptor. - */ public class OxmEntityDescriptor { - private String entityName; - - private List primaryKeyAttributeName; - - private List searchableAttributes; - - private CrossEntityReference crossEntityReference; + protected String entityName; - private String geoLatName; + protected List primaryKeyAttributeNames; - private String geoLongName; - - private SuggestionSearchEntity suggestionSearchEntity; + public OxmEntityDescriptor() { + primaryKeyAttributeNames = new ArrayList(); + } public String getEntityName() { return entityName; @@ -53,124 +43,24 @@ public class OxmEntityDescriptor { this.entityName = entityName; } - public List getPrimaryKeyAttributeName() { - return primaryKeyAttributeName; - } - - public void setPrimaryKeyAttributeName(List primaryKeyAttributeName) { - this.primaryKeyAttributeName = primaryKeyAttributeName; - } - - public List getSearchableAttributes() { - return searchableAttributes; - } - - public void setSearchableAttributes(List searchableAttributes) { - this.searchableAttributes = searchableAttributes; - } - - /** - * Checks for searchable attributes. - * - * @return true, if successful - */ - public boolean hasSearchableAttributes() { - - if (this.searchableAttributes == null) { - return false; - } - - if (this.searchableAttributes.size() > 0) { - return true; - } - - return false; - - } - - public CrossEntityReference getCrossEntityReference() { - return crossEntityReference; - } - - public void setCrossEntityReference(CrossEntityReference crossEntityReference) { - this.crossEntityReference = crossEntityReference; + public List getPrimaryKeyAttributeNames() { + return primaryKeyAttributeNames; } - /** - * Checks for cross entity references. - * - * @return true, if successful - */ - public boolean hasCrossEntityReferences() { - if (this.crossEntityReference == null) { - return false; - } - if (!this.crossEntityReference.getReferenceAttributes().isEmpty()) { - return true; - } - return false; + public void setPrimaryKeyAttributeNames(List primaryKeyAttributeNames) { + this.primaryKeyAttributeNames = primaryKeyAttributeNames; } - public String getGeoLatName() { - return geoLatName; - } - - public void setGeoLatName(String geoLatName) { - this.geoLatName = geoLatName; - } - - public String getGeoLongName() { - return geoLongName; - } - - public void setGeoLongName(String geoLongName) { - this.geoLongName = geoLongName; - } - - /** - * Checks for geo entity. - * - * @return true, if successful - */ - public boolean hasGeoEntity() { - - if (this.geoLongName != null && this.geoLatName != null) { - return true; - } - - return false; - - } - - public SuggestionSearchEntity getSuggestionSearchEntity() { - return this.suggestionSearchEntity; - } - - public void setSuggestionSearchEntity(SuggestionSearchEntity suggestionSearchEntity) { - this.suggestionSearchEntity = suggestionSearchEntity; - } - - /** - * Checks for non-null, populated SuggestionSearchEntity. - * - * @return true, if successful - */ - public boolean hasSuggestionSearchEntity() { - if (this.suggestionSearchEntity == null) { - return false; - } - if (!this.suggestionSearchEntity.getSuggestionConnectorWords().isEmpty()) { - return true; - } - return false; + public void addPrimaryKeyName(String name) { + primaryKeyAttributeNames.add(name); } @Override public String toString() { - return "OxmEntityDescriptor [entityName=" + entityName + ", primaryKeyAttributeName=" - + primaryKeyAttributeName + ", searchableAttributes=" + searchableAttributes - + ", crossEntityReference=" + crossEntityReference + ", geoLatName=" + geoLatName - + ", geoLongName=" + geoLongName + ", suggestionSearchEntity=" + suggestionSearchEntity + return "OxmEntityDescriptor [" + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + "]"; } + } diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java b/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java new file mode 100644 index 0000000..168a4b1 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java @@ -0,0 +1,151 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public class OxmEntityLookup implements OxmModelProcessor { + + // TODO: kill singleton collaborator pattern + private static OxmEntityLookup instance; + + private Map> oxmModel; + + private Map entityTypeLookup; + + private Map entityDescriptors; + + + private OxmEntityLookup() { + oxmModel = new LinkedHashMap>(); + entityTypeLookup = new LinkedHashMap(); + entityDescriptors = new HashMap(); + } + + public synchronized static OxmEntityLookup getInstance() { + + /* + * I hate this method and I want it to go away. The singleton pattern is transitory, I want this + * class to be wired via a bean reference instead. But from the starting point, it would require + * fixing all the classes across the code base up front and I don't want this task to expand + * beyond just refactoring the OxmModelLoader. For now I'll keep the singleton pattern, but I + * really want to get rid of it once we are properly spring wired. + */ + + if (instance == null) { + instance = new OxmEntityLookup(); + } + + return instance; + } + + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + LinkedHashMap oxmProperties = new LinkedHashMap(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + entityTypeLookup.put(entityName, entity); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map properties = entity.getDescriptor().getProperties(); + + oxmModel.put(entityName, oxmProperties); + + } + + for (Entry> entityModel : oxmModel.entrySet()) { + HashMap attribute = entityModel.getValue(); + OxmEntityDescriptor entity = new OxmEntityDescriptor(); + + entity.setEntityName(attribute.get("entityName")); + + if (attribute.containsKey("primaryKeyAttributeNames")) { + + entity.setPrimaryKeyAttributeNames( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + + entityDescriptors.put(attribute.get("entityName"), entity); + } + } + + } + + public Map> getOxmModel() { + return oxmModel; + } + + public void setOxmModel(Map> oxmModel) { + this.oxmModel = oxmModel; + } + + public Map getEntityTypeLookup() { + return entityTypeLookup; + } + + public void setEntityTypeLookup(Map entityTypeLookup) { + this.entityTypeLookup = entityTypeLookup; + } + + public Map getEntityDescriptors() { + return entityDescriptors; + } + + public void setEntityDescriptors(Map entityDescriptors) { + this.entityDescriptors = entityDescriptors; + } + + public void addEntityDescriptor(String type, OxmEntityDescriptor descriptor) { + if (this.entityDescriptors != null) { + this.entityDescriptors.put(type, descriptor); + } + } + +} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java b/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java index 853a537..b953917 100644 --- a/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java +++ b/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java @@ -22,485 +22,163 @@ */ package org.onap.aai.sparky.config.oxm; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; -import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; +import java.util.HashSet; import java.util.Map; -import java.util.Map.Entry; -import java.util.Vector; +import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.eclipse.persistence.dynamic.DynamicType; -import org.eclipse.persistence.internal.oxm.mappings.Descriptor; import org.eclipse.persistence.jaxb.JAXBContextProperties; import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContextFactory; -import org.eclipse.persistence.mappings.DatabaseMapping; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.entity.SuggestionSearchEntity; -import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; import org.onap.aai.cl.api.Logger; import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; +import org.springframework.core.io.support.ResourcePatternResolver; -/** - * The Class OxmModelLoader. - */ public class OxmModelLoader { - private static OxmModelLoader instance; - private static final Logger LOG = LoggerFactory.getInstance().getLogger(OxmModelLoader.class); - private Map> oxmModel = - new LinkedHashMap>(); - - private Map entityTypeLookup = new LinkedHashMap(); - - private Map> searchableOxmModel = - new LinkedHashMap>(); - - private Map> crossReferenceEntityOxmModel = - new LinkedHashMap>(); - - private Map> geoEntityOxmModel = - new LinkedHashMap>(); - - private Map> suggestionSearchEntityOxmModel = - new LinkedHashMap>(); - - private Map entityDescriptors = - new HashMap(); - - private Map searchableEntityDescriptors = - new HashMap(); - - private Map crossReferenceEntityDescriptors = - new HashMap(); - - private Map geoEntityDescriptors = - new HashMap(); - - private Map suggestionSearchEntityDescriptors = - new HashMap(); - - public static OxmModelLoader getInstance() { - if (instance == null) { - instance = new OxmModelLoader(); - LOG.info(AaiUiMsgs.INITIALIZE_OXM_MODEL_LOADER); - instance.loadModels(); - } + /* + * The intent of this parameter is to be able to programmatically over-ride the latest AAI schema + * version discovered from the aai-schema jar file. This property is optional, but if set on the + * bean or by another class in the system, then it will override the spec version that is loaded. + * + * If the latestVersionOverride is greater than 0 then it will set the latest version to the + * specified version, and that stream will be returned if available. + */ - return instance; + protected int oxmApiVersionOverride; + protected Set processors; + private int latestVersionNum = 0; - } + private final static Pattern p = Pattern.compile("aai_oxm_(v)(.*).xml"); - /** - * Instantiates a new oxm model loader. - */ public OxmModelLoader() { + this(-1, new HashSet()); + } + public OxmModelLoader(int apiVersionOverride, Set oxmModelProcessors) { + this.oxmApiVersionOverride = apiVersionOverride; + this.processors = oxmModelProcessors; } - /** - * Load models. - */ - private void loadModels() { - // find latest version of OXM file in folder - String version = findLatestOxmVersion(); - if (version == null) { - LOG.error(AaiUiMsgs.OXM_FILE_NOT_FOUND, TierSupportUiConstants.CONFIG_OXM_LOCATION); - return; + protected synchronized Map getStreamHandlesForOxmFromResource() { + Map listOfOxmFiles = new HashMap(); + ClassLoader oxmClassLoader = OxmModelLoader.class.getClassLoader(); + ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(oxmClassLoader); + Resource[] resources = null; + try { + resources = resolver.getResources("classpath*:/oxm/aai_oxm*.xml"); + } catch (IOException ex) { + LOG.error(AaiUiMsgs.OXM_LOADING_ERROR, ex.getMessage()); } - // load the latest version based on file name - loadModel(version); + if (resources == null) { + LOG.error(AaiUiMsgs.OXM_LOADING_ERROR, "No OXM schema files found on classpath"); + } + + for (Resource resource : resources) { + Matcher m = p.matcher(resource.getFilename()); + if (m.matches()) { + try { + listOfOxmFiles.put(new Integer(m.group(2)), resource.getInputStream()); + } catch (Exception e) { + LOG.error(AaiUiMsgs.OXM_LOADING_ERROR, resource.getFilename(), e.getMessage()); + } + } + } + return listOfOxmFiles; } /** - * Load model. - * - * @param version the version + * Load an oxm model. + * + * @param inputStream file handle for oxm */ - public void loadModel(String version) { - String fileName = loadOxmFileName(version); - - try (FileInputStream inputStream = new FileInputStream(new File(fileName))) { - Map properties = new HashMap(); - properties.put(JAXBContextProperties.OXM_METADATA_SOURCE, inputStream); - + protected void loadModel(InputStream inputStream) { + Map properties = new HashMap(); + properties.put(JAXBContextProperties.OXM_METADATA_SOURCE, inputStream); + try { final DynamicJAXBContext oxmContext = DynamicJAXBContextFactory .createContextFromOXM(Thread.currentThread().getContextClassLoader(), properties); + parseOxmContext(oxmContext); // populateSearchableOxmModel(); - LOG.info(AaiUiMsgs.OXM_LOAD_SUCCESS); - - } catch (FileNotFoundException fnf) { - LOG.info(AaiUiMsgs.OXM_READ_ERROR_NONVERBOSE); - LOG.error(AaiUiMsgs.OXM_READ_ERROR_VERBOSE, fileName); + LOG.info(AaiUiMsgs.OXM_LOAD_SUCCESS, String.valueOf(latestVersionNum)); } catch (Exception exc) { LOG.info(AaiUiMsgs.OXM_PARSE_ERROR_NONVERBOSE); - LOG.error(AaiUiMsgs.OXM_PARSE_ERROR_VERBOSE, fileName, exc.getMessage()); + LOG.error(AaiUiMsgs.OXM_PARSE_ERROR_VERBOSE, "OXM v" + latestVersionNum, exc.getMessage()); } } /** - * Parses the oxm context. - * - * @param oxmContext the oxm context + * Load the latest oxm model. */ - private void parseOxmContext(DynamicJAXBContext oxmContext) { - @SuppressWarnings("rawtypes") - List descriptorsList = oxmContext.getXMLContext().getDescriptors(); - - for (@SuppressWarnings("rawtypes") - Descriptor desc : descriptorsList) { - - DynamicType entity = oxmContext.getDynamicType(desc.getAlias()); - - LinkedHashMap oxmProperties = new LinkedHashMap(); - - // Not all fields have key attributes - if (desc.getPrimaryKeyFields() != null) { - oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() - .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); - } - - String entityName = desc.getDefaultRootElement(); - - entityTypeLookup.put(entityName, entity); - - // add entityName - oxmProperties.put("entityName", entityName); - - Map properties = entity.getDescriptor().getProperties(); - if (properties != null) { - for (Map.Entry entry : properties.entrySet()) { - - if (entry.getKey().equalsIgnoreCase("searchable")) { - oxmProperties.put("searchableAttributes", entry.getValue()); - } else if (entry.getKey().equalsIgnoreCase("crossEntityReference")) { - oxmProperties.put("crossEntityReference", entry.getValue()); - } else if (entry.getKey().equalsIgnoreCase("geoLat")) { - if (entry.getValue().length() > 0) { - oxmProperties.put("geoLat", entry.getValue()); - } - } else if (entry.getKey().equalsIgnoreCase("geoLong")) { - if (entry.getValue().length() > 0) { - oxmProperties.put("geoLong", entry.getValue()); - } - } else if (entry.getKey().equalsIgnoreCase("containsSuggestibleProps")) { - - oxmProperties.put("containsSuggestibleProps", "true"); - - Vector descriptorMaps = entity.getDescriptor().getMappings(); - List listOfSuggestableAttributes = new ArrayList(); - - for (DatabaseMapping descMap : descriptorMaps) { - if (descMap.isAbstractDirectMapping()) { - - if (descMap.getProperties().get("suggestibleOnSearch") != null) { - String suggestableOnSearchString = - String.valueOf(descMap.getProperties().get("suggestibleOnSearch")); - - boolean isSuggestibleOnSearch = Boolean.valueOf(suggestableOnSearchString); - - if (isSuggestibleOnSearch) { - /* Grab attribute types for suggestion */ - String attributeName = - descMap.getField().getName().replaceAll("/text\\(\\)", ""); - listOfSuggestableAttributes.add(attributeName); - - if (descMap.getProperties().get("suggestionVerbs") != null) { - String suggestionVerbsString = - String.valueOf(descMap.getProperties().get("suggestionVerbs")); - - oxmProperties.put("suggestionVerbs", suggestionVerbsString); - } - } - } - } - } - if (!listOfSuggestableAttributes.isEmpty()) { - oxmProperties.put("suggestibleAttributes", - String.join(",", listOfSuggestableAttributes)); - } - } else if (entry.getKey().equalsIgnoreCase("suggestionAliases")) { - oxmProperties.put("suggestionAliases", entry.getValue()); - } - } - } - - oxmModel.put(entityName, oxmProperties); - - // Add all searchable entity types for reserve lookup - if (oxmProperties.containsKey("searchableAttributes")) { - searchableOxmModel.put(entityName, oxmProperties); - } + public synchronized void loadLatestOxmModel() { - if (oxmProperties.containsKey("crossEntityReference")) { - crossReferenceEntityOxmModel.put(entityName, oxmProperties); - } - - if (oxmProperties.containsKey("geoLat") && oxmProperties.containsKey("geoLong")) { - geoEntityOxmModel.put(entityName, oxmProperties); - } - - if (oxmProperties.containsKey("containsSuggestibleProps")) { - suggestionSearchEntityOxmModel.put(entityName, oxmProperties); - } - } - - for (Entry> entityModel : oxmModel.entrySet()) { - HashMap attribute = entityModel.getValue(); - OxmEntityDescriptor entity = new OxmEntityDescriptor(); - entity.setEntityName(attribute.get("entityName")); - if (attribute.containsKey("primaryKeyAttributeNames")) { - - entity.setPrimaryKeyAttributeName( - Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); - if (attribute.containsKey("searchableAttributes")) { - entity.setSearchableAttributes( - Arrays.asList(attribute.get("searchableAttributes").split(","))); - } else if (attribute.containsKey("crossEntityReference")) { - List crossEntityRefTokens = - Arrays.asList(attribute.get("crossEntityReference").split(",")); - - if (crossEntityRefTokens.size() >= 2) { - CrossEntityReference entityRef = new CrossEntityReference(); - entityRef.setTargetEntityType(crossEntityRefTokens.get(0)); - - for (int i = 1; i < crossEntityRefTokens.size(); i++) { - entityRef.addReferenceAttribute(crossEntityRefTokens.get(i)); - } - - entity.setCrossEntityReference(entityRef); - } else { - LOG.error(AaiUiMsgs.OXM_PROP_DEF_ERR_CROSS_ENTITY_REF, attribute.get("entityName"), - attribute.get("crossEntityReference")); - } - } - - if (attribute.containsKey("geoLat") || attribute.containsKey("geoLong")) { - entity.setGeoLatName(attribute.get("geoLat")); - entity.setGeoLongName(attribute.get("geoLong")); - } - - if (attribute.containsKey("suggestionVerbs")) { - String entityName = attribute.get("entityName"); - SuggestionSearchEntity suggestionSearchEntity = new SuggestionSearchEntity(this); - suggestionSearchEntity.setEntityType(entityName); - - entity.setSuggestionSearchEntity(suggestionSearchEntity); - } - - entityDescriptors.put(attribute.get("entityName"), entity); - } - } + LOG.info(AaiUiMsgs.INITIALIZE_OXM_MODEL_LOADER); - - for (Entry> searchableModel : searchableOxmModel.entrySet()) { - HashMap attribute = searchableModel.getValue(); - OxmEntityDescriptor entity = new OxmEntityDescriptor(); - entity.setEntityName(attribute.get("entityName")); - entity.setPrimaryKeyAttributeName( - Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); - entity - .setSearchableAttributes(Arrays.asList(attribute.get("searchableAttributes").split(","))); - searchableEntityDescriptors.put(attribute.get("entityName"), entity); - } - - for (Entry> geoEntityModel : geoEntityOxmModel.entrySet()) { - HashMap attribute = geoEntityModel.getValue(); - OxmEntityDescriptor entity = new OxmEntityDescriptor(); - entity.setEntityName(attribute.get("entityName")); - entity.setPrimaryKeyAttributeName( - Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); - entity.setGeoLatName(attribute.get("geoLat")); - entity.setGeoLongName(attribute.get("geoLong")); - geoEntityDescriptors.put(attribute.get("entityName"), entity); + // find handles for available oxm models + final Map listOfOxmStreams = getStreamHandlesForOxmFromResource(); + if (listOfOxmStreams.isEmpty()) { + LOG.error(AaiUiMsgs.OXM_FILE_NOT_FOUND); + return; } - for (Entry> crossRefModel : crossReferenceEntityOxmModel - .entrySet()) { - HashMap attribute = crossRefModel.getValue(); - OxmEntityDescriptor entity = new OxmEntityDescriptor(); - entity.setEntityName(attribute.get("entityName")); - entity.setPrimaryKeyAttributeName( - Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); - + InputStream stream = null; - List crossEntityRefTokens = - Arrays.asList(attribute.get("crossEntityReference").split(",")); - - if (crossEntityRefTokens.size() >= 2) { - CrossEntityReference entityRef = new CrossEntityReference(); - entityRef.setTargetEntityType(crossEntityRefTokens.get(0)); + if (oxmApiVersionOverride > 0) { + latestVersionNum = oxmApiVersionOverride; + LOG.warn(AaiUiMsgs.WARN_GENERIC, "Overriding AAI Schema with version = " + latestVersionNum); + stream = listOfOxmStreams.get(latestVersionNum); + } else { - for (int i = 1; i < crossEntityRefTokens.size(); i++) { - entityRef.addReferenceAttribute(crossEntityRefTokens.get(i)); + for (Integer key : listOfOxmStreams.keySet()) { + if (key.intValue() > latestVersionNum) { + latestVersionNum = key.intValue(); + stream = listOfOxmStreams.get(key); } - - entity.setCrossEntityReference(entityRef); } - crossReferenceEntityDescriptors.put(attribute.get("entityName"), entity); } - for (Entry> suggestionEntityModel : suggestionSearchEntityOxmModel - .entrySet()) { - HashMap attribute = suggestionEntityModel.getValue(); - - String entityName = attribute.get("entityName"); - SuggestionSearchEntity suggestionSearchEntity = new SuggestionSearchEntity(this); - suggestionSearchEntity.setEntityType(entityName); + // load the latest oxm file + loadModel(stream); - if (attribute.get("suggestionVerbs") != null) { - suggestionSearchEntity.setSuggestionConnectorWords( - Arrays.asList(attribute.get("suggestionVerbs").split(","))); - } - - if (attribute.get("suggestionAliases") != null) { - suggestionSearchEntity - .setSuggestionAliases(Arrays.asList(attribute.get("suggestionAliases").split(","))); - } - - if (attribute.get("suggestibleAttributes") != null) { - suggestionSearchEntity.setSuggestionPropertyTypes( - Arrays.asList(attribute.get("suggestibleAttributes").split(","))); - } - - OxmEntityDescriptor entity = new OxmEntityDescriptor(); - entity.setSuggestionSearchEntity(suggestionSearchEntity); - entity.setEntityName(entityName); - - if (attribute.get("primaryKeyAttributeNames") != null) { - entity.setPrimaryKeyAttributeName( - Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); - } - - suggestionSearchEntityDescriptors.put(entityName, entity); - } } - /** - * Find latest oxm version. - * - * @return the string - */ - public String findLatestOxmVersion() { - File[] listOxmFiles = loadOxmFolder().listFiles(); - - if (listOxmFiles == null) { - return null; - } - - Integer latestVersion = -1; - - Pattern oxmFileNamePattern = Pattern.compile("^aai_oxm_v([0-9]*).xml"); - - for (File file : listOxmFiles) { - if (file.isFile()) { - String fileName = file.getName(); - Matcher matcher = oxmFileNamePattern.matcher(fileName); - if (matcher.matches()) { - if (latestVersion <= Integer.parseInt(matcher.group(1))) { - latestVersion = Integer.parseInt(matcher.group(1)); - } - } - } - - } - if (latestVersion != -1) { - return "v" + latestVersion.toString(); - } else { - return null; - } - + public int getLatestVersionNum() { + return latestVersionNum; } - /** - * Load oxm folder. - * - * @return the file - */ - public File loadOxmFolder() { - return new File(TierSupportUiConstants.CONFIG_OXM_LOCATION); + public void setLatestVersionNum(int latestVersionNum) { + this.latestVersionNum = latestVersionNum; } /** - * Load oxm file name. + * Parses the oxm context. * - * @param version the version - * @return the string - */ - public String loadOxmFileName(String version) { - return new String(TierSupportUiConstants.CONFIG_OXM_LOCATION + "aai_oxm_" + version + ".xml"); - } - - /* - * Get the original representation of the OXM Model - */ - public Map> getOxmModel() { - return oxmModel; - } - - /* - * Get the searchable raw map entity types + * @param oxmContext the oxm context */ - public Map> getSearchableOxmModel() { - return searchableOxmModel; - } - - public Map> getCrossReferenceEntityOxmModel() { - return crossReferenceEntityOxmModel; - } - - public Map getEntityDescriptors() { - return entityDescriptors; - } + private void parseOxmContext(DynamicJAXBContext oxmContext) { - /** - * Gets the entity descriptor. - * - * @param type the type - * @return the entity descriptor - */ - public OxmEntityDescriptor getEntityDescriptor(String type) { - return entityDescriptors.get(type); - } + if (processors != null && processors.size() > 0) { - public Map getSearchableEntityDescriptors() { - return searchableEntityDescriptors; - } + for (OxmModelProcessor processor : processors) { - /** - * Gets the searchable entity descriptor. - * - * @param entityType the entity type - * @return the searchable entity descriptor - */ - public OxmEntityDescriptor getSearchableEntityDescriptor(String entityType) { - return searchableEntityDescriptors.get(entityType); - } + processor.processOxmModel(oxmContext); - public Map getCrossReferenceEntityDescriptors() { - return crossReferenceEntityDescriptors; - } + } - public Map getGeoEntityDescriptors() { - return geoEntityDescriptors; - } + } - public Map getSuggestionSearchEntityDescriptors() { - return suggestionSearchEntityDescriptors; } } diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoaderFilter.java b/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoaderFilter.java deleted file mode 100644 index 0ddf80a..0000000 --- a/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoaderFilter.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.config.oxm; - -import java.io.IOException; -import java.net.UnknownHostException; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; - -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.cl.mdc.MdcContext; - -/** - * The Class OxmModelLoaderFilter. - */ -public class OxmModelLoaderFilter implements Filter { - /* - * (non-Javadoc) - * - * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, - * javax.servlet.FilterChain) - */ - @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) - throws IOException, ServletException { - - /* - * However, we will setup the filtermap with a url that should never get it, so we shouldn't - * ever be in here. - */ - - chain.doFilter(request, response); - } - - /* - * (non-Javadoc) - * - * @see javax.servlet.Filter#init(javax.servlet.FilterConfig) - */ - @Override - public void init(FilterConfig filterConfig) throws ServletException { - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "OxmModelLoaderFilter", "", "Init", ""); - - try { - OxmModelLoader.getInstance(); - } catch (Exception exc) { - throw new ServletException("Caught an exception while initializing OXM model loader filter", - exc); - } - - } - - /* - * (non-Javadoc) - * - * @see javax.servlet.Filter#destroy() - */ - @Override - public void destroy() { - // TODO Auto-generated method stub - - } - -} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java b/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java new file mode 100644 index 0000000..b8e7c6f --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java @@ -0,0 +1,31 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public interface OxmModelProcessor { + + public void processOxmModel(DynamicJAXBContext jaxbContext); + +} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java b/src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java new file mode 100644 index 0000000..d8a27ac --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java @@ -0,0 +1,138 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public class SearchableEntityLookup implements OxmModelProcessor { + + // TODO: kill singleton collaborator pattern + private static SearchableEntityLookup instance; + + private Map> searchableOxmModel; + private Map searchableEntityDescriptors; + + private SearchableEntityLookup() { + searchableOxmModel = new LinkedHashMap>(); + searchableEntityDescriptors = new HashMap(); + } + + public synchronized static SearchableEntityLookup getInstance() { + + /* + * I hate this method and I want it to go away. The singleton pattern is transitory, I want this + * class to be wired via a bean reference instead. But from the starting point, it would require + * fixing all the classes across the code base up front and I don't want this task to expand + * beyond just refactoring the OxmModelLoader. For now I'll keep the singleton pattern, but I + * really want to get rid of it once we are properly spring wired. + */ + + if (instance == null) { + instance = new SearchableEntityLookup(); + } + + return instance; + } + + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + LinkedHashMap oxmProperties = new LinkedHashMap(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map properties = entity.getDescriptor().getProperties(); + if (properties != null) { + for (Map.Entry entry : properties.entrySet()) { + + if (entry.getKey().equalsIgnoreCase("searchable")) { + oxmProperties.put("searchableAttributes", entry.getValue()); + } + } + } + + // Add all searchable entity types for reserve lookup + if (oxmProperties.containsKey("searchableAttributes")) { + searchableOxmModel.put(entityName, oxmProperties); + } + + } + + for (Entry> searchableModel : searchableOxmModel.entrySet()) { + HashMap attribute = searchableModel.getValue(); + SearchableOxmEntityDescriptor entity = new SearchableOxmEntityDescriptor(); + entity.setEntityName(attribute.get("entityName")); + entity.setPrimaryKeyAttributeNames( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + entity + .setSearchableAttributes(Arrays.asList(attribute.get("searchableAttributes").split(","))); + searchableEntityDescriptors.put(attribute.get("entityName"), entity); + } + + } + + public Map> getSearchableOxmModel() { + return searchableOxmModel; + } + + public void setSearchableOxmModel(Map> searchableOxmModel) { + this.searchableOxmModel = searchableOxmModel; + } + + public Map getSearchableEntityDescriptors() { + return searchableEntityDescriptors; + } + + public void setSearchableEntityDescriptors( + Map searchableEntityDescriptors) { + this.searchableEntityDescriptors = searchableEntityDescriptors; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java b/src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java new file mode 100644 index 0000000..cdd5ad0 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java @@ -0,0 +1,73 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.List; + +public class SearchableOxmEntityDescriptor extends OxmEntityDescriptor { + + protected List searchableAttributes; + + public List getSearchableAttributes() { + return searchableAttributes; + } + + public void setSearchableAttributes(List searchableAttributes) { + this.searchableAttributes = searchableAttributes; + } + + public void addSearchableAttribute(String attributeName) { + searchableAttributes.add(attributeName); + } + + /** + * Checks for searchable attributes. + * + * @return true, if successful + */ + public boolean hasSearchableAttributes() { + + if (this.searchableAttributes == null) { + return false; + } + + if (this.searchableAttributes.size() > 0) { + return true; + } + + return false; + + } + + @Override + public String toString() { + return "SearchableOxmEntityDescriptor [" + + (searchableAttributes != null ? "searchableAttributes=" + searchableAttributes + ", " + : "") + + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java b/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java new file mode 100644 index 0000000..c72068a --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java @@ -0,0 +1,52 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity; + +public class SuggestionEntityDescriptor extends OxmEntityDescriptor { + + protected SuggestionSearchEntity suggestionSearchEntity; + + public SuggestionSearchEntity getSuggestionSearchEntity() { + return suggestionSearchEntity; + } + + public void setSuggestionSearchEntity(SuggestionSearchEntity suggestionSearchEntity) { + this.suggestionSearchEntity = suggestionSearchEntity; + } + + @Override + public String toString() { + return "SuggestionEntityDescriptor [" + + (suggestionSearchEntity != null + ? "suggestionSearchEntity=" + suggestionSearchEntity + ", " : "") + + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + + + +} diff --git a/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java b/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java new file mode 100644 index 0000000..758ae60 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java @@ -0,0 +1,197 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Vector; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; +import org.eclipse.persistence.mappings.DatabaseMapping; +import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity; + +public class SuggestionEntityLookup implements OxmModelProcessor { + + // TODO: kill singleton collaborator pattern + private static SuggestionEntityLookup instance; + + private Map> suggestionSearchEntityOxmModel; + private Map suggestionSearchEntityDescriptors; + + private SuggestionEntityLookup() { + suggestionSearchEntityOxmModel = new LinkedHashMap>(); + suggestionSearchEntityDescriptors = new HashMap(); + } + + public synchronized static SuggestionEntityLookup getInstance() { + + /* + * I hate this method and I want it to go away. The singleton pattern is transitory, I want this + * class to be wired via a bean reference instead. But from the starting point, it would require + * fixing all the classes across the code base up front and I don't want this task to expand + * beyond just refactoring the OxmModelLoader. For now I'll keep the singleton pattern, but I + * really want to get rid of it once we are properly spring wired. + */ + + if (instance == null) { + instance = new SuggestionEntityLookup(); + } + + return instance; + } + + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + LinkedHashMap oxmProperties = new LinkedHashMap(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map properties = entity.getDescriptor().getProperties(); + if (properties != null) { + for (Map.Entry entry : properties.entrySet()) { + + + if (entry.getKey().equalsIgnoreCase("containsSuggestibleProps")) { + + oxmProperties.put("containsSuggestibleProps", "true"); + + Vector descriptorMaps = entity.getDescriptor().getMappings(); + List listOfSuggestableAttributes = new ArrayList(); + + for (DatabaseMapping descMap : descriptorMaps) { + if (descMap.isAbstractDirectMapping()) { + + if (descMap.getProperties().get("suggestibleOnSearch") != null) { + String suggestableOnSearchString = + String.valueOf(descMap.getProperties().get("suggestibleOnSearch")); + + boolean isSuggestibleOnSearch = Boolean.valueOf(suggestableOnSearchString); + + if (isSuggestibleOnSearch) { + /* Grab attribute types for suggestion */ + String attributeName = + descMap.getField().getName().replaceAll("/text\\(\\)", ""); + listOfSuggestableAttributes.add(attributeName); + + if (descMap.getProperties().get("suggestionVerbs") != null) { + String suggestionVerbsString = + String.valueOf(descMap.getProperties().get("suggestionVerbs")); + + oxmProperties.put("suggestionVerbs", suggestionVerbsString); + } + } + } + } + } + + if (!listOfSuggestableAttributes.isEmpty()) { + oxmProperties.put("suggestibleAttributes", + String.join(",", listOfSuggestableAttributes)); + } + } else if (entry.getKey().equalsIgnoreCase("suggestionAliases")) { + oxmProperties.put("suggestionAliases", entry.getValue()); + } + } + } + + if (oxmProperties.containsKey("containsSuggestibleProps")) { + suggestionSearchEntityOxmModel.put(entityName, oxmProperties); + } + } + + for (Entry> suggestionEntityModel : suggestionSearchEntityOxmModel + .entrySet()) { + HashMap attribute = suggestionEntityModel.getValue(); + + String entityName = attribute.get("entityName"); + SuggestionSearchEntity suggestionSearchEntity = new SuggestionSearchEntity(this); + suggestionSearchEntity.setEntityType(entityName); + + if (attribute.get("suggestionAliases") != null) { + suggestionSearchEntity + .setSuggestionAliases(Arrays.asList(attribute.get("suggestionAliases").split(","))); + } + + if (attribute.get("suggestibleAttributes") != null) { + suggestionSearchEntity.setSuggestionPropertyTypes( + Arrays.asList(attribute.get("suggestibleAttributes").split(","))); + } + + SuggestionEntityDescriptor entity = new SuggestionEntityDescriptor(); + entity.setSuggestionSearchEntity(suggestionSearchEntity); + entity.setEntityName(entityName); + + if (attribute.get("primaryKeyAttributeNames") != null) { + entity.setPrimaryKeyAttributeNames( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + } + + suggestionSearchEntityDescriptors.put(entityName, entity); + } + } + + public Map> getSuggestionSearchEntityOxmModel() { + return suggestionSearchEntityOxmModel; + } + + public void setSuggestionSearchEntityOxmModel( + Map> suggestionSearchEntityOxmModel) { + this.suggestionSearchEntityOxmModel = suggestionSearchEntityOxmModel; + } + + public Map getSuggestionSearchEntityDescriptors() { + return suggestionSearchEntityDescriptors; + } + + public void setSuggestionSearchEntityDescriptors( + Map suggestionSearchEntityDescriptors) { + this.suggestionSearchEntityDescriptors = suggestionSearchEntityDescriptors; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java b/src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java new file mode 100644 index 0000000..39ee8c5 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java @@ -0,0 +1,949 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.crossentityreference.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.CrossEntityReference; +import org.onap.aai.sparky.config.oxm.CrossEntityReferenceDescriptor; +import org.onap.aai.sparky.config.oxm.CrossEntityReferenceLookup; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; +import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.SynchronizerConstants; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.IndexableCrossEntityReference; +import org.onap.aai.sparky.sync.entity.MergableEntity; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchUpdate; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class CrossEntityReferenceSynchronizer. + */ +public class CrossEntityReferenceSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + /** + * The Class RetryCrossEntitySyncContainer. + */ + private class RetryCrossEntitySyncContainer { + NetworkTransaction txn; + IndexableCrossEntityReference icer; + + /** + * Instantiates a new retry cross entity sync container. + * + * @param txn the txn + * @param icer the icer + */ + public RetryCrossEntitySyncContainer(NetworkTransaction txn, + IndexableCrossEntityReference icer) { + this.txn = txn; + this.icer = icer; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public IndexableCrossEntityReference getIndexableCrossEntityReference() { + return icer; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(CrossEntityReferenceSynchronizer.class); + + private static final String SERVICE_INSTANCE = "service-instance"; + + private Deque selflinks; + private Deque retryQueue; + private Map retryLimitTracker; + private boolean isAllWorkEnumerated; + protected ExecutorService esPutExecutor; + + + /** + * Instantiates a new cross entity reference synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public CrossEntityReferenceSynchronizer(ElasticSearchSchemaConfig schemaConfig, + int internalSyncWorkers, int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig) throws Exception { + super(LOG, "CERS", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(), + aaiStatConfig, esStatConfig); + this.selflinks = new ConcurrentLinkedDeque(); + this.retryQueue = new ConcurrentLinkedDeque(); + this.retryLimitTracker = new ConcurrentHashMap(); + this.synchronizerName = "Cross Reference Entity Synchronizer"; + this.isAllWorkEnumerated = false; + this.esPutExecutor = NodeUtils.createNamedExecutor("CERS-ES-PUT", 5, LOG); + this.aaiEntityStats.intializeEntityCounters( + CrossEntityReferenceLookup.getInstance().getCrossReferenceEntityDescriptors().keySet()); + + this.esEntityStats.intializeEntityCounters( + CrossEntityReferenceLookup.getInstance().getCrossReferenceEntityDescriptors().keySet()); + this.syncDurationInMs = -1; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "CrossEntitySynchronizer", "", "Sync", ""); + + resetCounters(); + syncStartedTimeStampInMs = System.currentTimeMillis(); + launchSyncFlow(); + return OperationState.OK; + } + + @Override + public SynchronizerState getState() { + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return getStatReport(syncDurationInMs, showFinalReport); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (totalWorkOnHand > 0 || !isAllWorkEnumerated) { + return false; + } + + return true; + } + + /** + * Launch sync flow. + * + * @return the operation state + */ + private OperationState launchSyncFlow() { + final Map contextMap = MDC.getCopyOfContextMap(); + Map descriptorMap = + CrossEntityReferenceLookup.getInstance().getCrossReferenceEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.ERROR_LOADING_OXM); + + return OperationState.ERROR; + } + + Collection syncTypes = descriptorMap.keySet(); + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the of + * the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + // TODO -> LOG, what should be logged here? + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); + } + }); + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + isAllWorkEnumerated = true; + performSync(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + } + + /** + * Perform sync. + */ + private void performSync() { + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + CrossEntityReferenceDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = CrossEntityReferenceLookup.getInstance().getCrossReferenceEntityDescriptors() + .get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + if (descriptor.hasCrossEntityReferences()) { + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setQueryParameters(linkDescriptor.getDepthModifier()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.SELF_LINK_GET, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.SELF_LINK_CROSS_REF_SYNC); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + } + } + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + // TODO // TODO -> LOG, waht should be logged here? + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + CrossEntityReferenceLookup cerLookup = CrossEntityReferenceLookup.getInstance(); + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + CrossEntityReferenceDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + descriptor = cerLookup.getCrossReferenceEntityDescriptors().get(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + if (descriptor.hasCrossEntityReferences()) { + selflinks.add(new SelfLinkDescriptor(resourceLink, + SynchronizerConstants.DEPTH_ALL_MODIFIER, resourceType)); + } + } + } + } + } + } + + + + /** + * By providing the entity type and a json node for the entity, determine the primary key name(s) + * + primary key value(s) sufficient to build an entity query string of the following format: + * + * .: + * + * @return - a composite string in the above format or null + */ + private String determineEntityQueryString(String entityType, JsonNode entityJsonNode) { + + OxmEntityDescriptor entityDescriptor = + OxmEntityLookup.getInstance().getEntityDescriptors().get(entityType); + + String queryString = null; + + if (entityDescriptor != null) { + + final List primaryKeyNames = entityDescriptor.getPrimaryKeyAttributeNames(); + final List keyValues = new ArrayList(); + NodeUtils.extractFieldValuesFromObject(entityJsonNode, primaryKeyNames, keyValues); + + queryString = entityType + "." + NodeUtils.concatArray(primaryKeyNames, "/") + ":" + + NodeUtils.concatArray(keyValues); + + } + + return queryString; + + + } + + /** + * Fetch document for upsert. + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + + if (!txn.getOperationResult().wasSuccessful()) { + LOG.error(AaiUiMsgs.SELF_LINK_GET, txn.getOperationResult().getResult()); + return; + } + + CrossEntityReferenceDescriptor cerDescriptor = CrossEntityReferenceLookup.getInstance() + .getCrossReferenceEntityDescriptors().get(txn.getDescriptor().getEntityName()); + + if (cerDescriptor != null && cerDescriptor.hasCrossEntityReferences()) { + + final String jsonResult = txn.getOperationResult().getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + /** + * Here's what we are going to do: + * + *
  • Extract primary key name and value from the parent type. + *
  • Extract the primary key and value from the nested child instance. + *
  • Build a generic query to discover the self-link for the nested-child-instance using + * parent and child. + *
  • Set the self-link on the child. + *
  • Generate the id that will allow the elastic-search upsert to work. + *
  • Rinse and repeat. + */ + + CrossEntityReference cerDefinition = cerDescriptor.getCrossEntityReference(); + + if (cerDefinition != null) { + JsonNode convertedNode = null; + try { + convertedNode = + NodeUtils.convertJsonStrToJsonNode(txn.getOperationResult().getResult()); + + final String parentEntityQueryString = + determineEntityQueryString(txn.getEntityType(), convertedNode); + + List extractedParentEntityAttributeValues = new ArrayList(); + + NodeUtils.extractFieldValuesFromObject(convertedNode, + cerDefinition.getReferenceAttributes(), extractedParentEntityAttributeValues); + + List nestedTargetEntityInstances = new ArrayList(); + NodeUtils.extractObjectsByKey(convertedNode, cerDefinition.getTargetEntityType(), + nestedTargetEntityInstances); + + for (JsonNode targetEntityInstance : nestedTargetEntityInstances) { + + if (cerDescriptor != null) { + + String childEntityType = cerDefinition.getTargetEntityType(); + + List childPrimaryKeyNames = cerDescriptor.getPrimaryKeyAttributeNames(); + + List childKeyValues = new ArrayList(); + NodeUtils.extractFieldValuesFromObject(targetEntityInstance, childPrimaryKeyNames, + childKeyValues); + + String childEntityQueryKeyString = + childEntityType + "." + NodeUtils.concatArray(childPrimaryKeyNames, "/") + ":" + + NodeUtils.concatArray(childKeyValues); + + /** + * Build generic-query to query child instance self-link from AAI + */ + List orderedQueryKeyParams = new ArrayList(); + + /** + * At present, there is an issue with resolving the self-link using the + * generic-query with nothing more than the service-instance identifier and the + * service-subscription. There is another level of detail we don't have access to + * unless we parse it out of the service-subscription self-link, which is a coupling + * I would like to avoid. Fortunately, there is a workaround, but only for + * service-instances, which is presently our only use-case for the + * cross-entity-reference in R1707. Going forwards hopefully there will be other + * ways to resolve a child self-link using parental embedded meta data that we don't + * currently have. + * + * The work-around with the service-instance entity-type is that it's possible to + * request the self-link using only the service-instance-id because of a historical + * AAI functional query requirement that it be possible to query a service-instance + * only by it's service-instance-id. This entity type is the only one in the system + * that can be queried this way which makes it a very limited workaround, but good + * enough for the current release. + */ + + if (SERVICE_INSTANCE.equals(childEntityType)) { + orderedQueryKeyParams.clear(); + orderedQueryKeyParams.add(childEntityQueryKeyString); + } else { + orderedQueryKeyParams.add(parentEntityQueryString); + orderedQueryKeyParams.add(childEntityQueryKeyString); + } + + String genericQueryStr = null; + try { + genericQueryStr = + aaiAdapter.getGenericQueryForSelfLink(childEntityType, orderedQueryKeyParams); + + if (genericQueryStr != null) { + aaiWorkOnHand.incrementAndGet(); + + OperationResult aaiQueryResult = aaiAdapter.queryActiveInventoryWithRetries( + genericQueryStr, "application/json", aaiAdapter.getNumRequestRetries()); + + aaiWorkOnHand.decrementAndGet(); + + if (aaiQueryResult != null && aaiQueryResult.wasSuccessful()) { + + Collection entityLinks = new ArrayList(); + JsonNode genericQueryResult = null; + try { + genericQueryResult = + NodeUtils.convertJsonStrToJsonNode(aaiQueryResult.getResult()); + + if (genericQueryResult != null) { + + NodeUtils.extractObjectsByKey(genericQueryResult, "resource-link", + entityLinks); + + String selfLink = null; + + if (entityLinks.size() != 1) { + /** + * an ambiguity exists where we can't reliably determine the self link, + * this should be a permanent error + */ + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_SELFLINK_AMBIGUITY, + String.valueOf(entityLinks.size())); + } else { + selfLink = ((JsonNode) entityLinks.toArray()[0]).asText(); + + SearchableEntityLookup searchableEntityLookup = + SearchableEntityLookup.getInstance(); + + SearchableOxmEntityDescriptor searchableDescriptor = + searchableEntityLookup.getSearchableEntityDescriptors() + .get(txn.getEntityType()); + + if (searchableDescriptor != null + && searchableDescriptor.getSearchableAttributes().size() > 0) { + + IndexableCrossEntityReference icer = + getPopulatedDocument(targetEntityInstance, cerDescriptor); + + for (String parentCrossEntityReferenceAttributeValue : extractedParentEntityAttributeValues) { + icer.addCrossEntityReferenceValue( + parentCrossEntityReferenceAttributeValue); + } + + icer.setLink(ActiveInventoryConfig.extractResourcePath(selfLink)); + + icer.deriveFields(); + + String link = null; + try { + link = getElasticFullUrl("/" + icer.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, + exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync( + new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, + error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, icer); + } + }); + } + } + } + } else { + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DURING_AAI_RESPONSE_CONVERSION); + } + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, JsonNode.class.toString(), + exc.getLocalizedMessage()); + } + + } else { + String message = "Entity sync failed because AAI query failed with error " + + aaiQueryResult.getResult(); + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); + } + + } else { + String message = + "Entity Sync failed because generic query str could not be determined."; + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); + } + } catch (Exception exc) { + String message = + "Failed to sync entity because generation of generic query failed with error = " + + exc.getMessage(); + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); + } + + } + } + + } catch (IOException ioe) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, ioe.getMessage()); + } + } + + } + + } else { + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DESCRIPTOR_NOT_FOUND, txn.getEntityType()); + } + } + + /** + * Perform document upsert. + * + * @param esGetResult the es get result + * @param icer the icer + */ + protected void performDocumentUpsert(NetworkTransaction esGetResult, + IndexableCrossEntityReference icer) { + /** + *

    + *

      + * As part of the response processing we need to do the following: + *
    • 1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + *
    • 2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + *
    • a) if version is null or RC=404, then standard put, no _update with version tag + *
    • b) if version != null, do PUT with _update?version= (versionNumber) in the URI to elastic + *
    + *

    + */ + String link = null; + try { + link = getElasticFullUrl("/" + icer.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + boolean wasEntryDiscovered = false; + String versionNumber = null; + if (esGetResult.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, icer.getEntityPrimaryKeyValue()); + } else if (esGetResult.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + try { + versionNumber = NodeUtils.extractFieldValueFromObject( + NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()), + "_version"); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "version Number", + icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage()); + return; + } + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we + * return. + */ + LOG.info(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetResult.getOperationResult().getResultCode())); + return; + } + + try { + String jsonPayload = null; + if (wasEntryDiscovered) { + try { + ArrayList sourceObject = new ArrayList(); + NodeUtils.extractObjectsByKey( + NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()), + "_source", sourceObject); + + if (!sourceObject.isEmpty()) { + String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); + MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); + ObjectReader updater = mapper.readerForUpdating(me); + MergableEntity merged = updater.readValue(icer.getAsJson()); + jsonPayload = mapper.writeValueAsString(merged); + } + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "source value", + icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage()); + return; + } + } else { + jsonPayload = icer.getAsJson(); + } + + if (wasEntryDiscovered) { + if (versionNumber != null && jsonPayload != null) { + + String requestPayload = elasticSearchAdapter.buildBulkImportOperationRequest( + getIndexName(), ElasticSearchConfig.getConfig().getType(), icer.getId(), + versionNumber, jsonPayload); + + NetworkTransaction transactionTracker = new NetworkTransaction(); + transactionTracker.setEntityType(esGetResult.getEntityType()); + transactionTracker.setDescriptor(esGetResult.getDescriptor()); + transactionTracker.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), + requestPayload, elasticSearchAdapter, transactionTracker), esPutExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetResult, icer); + } + }); + } + + } else { + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetResult.getEntityType()); + updateElasticTxn.setDescriptor(esGetResult.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync( + new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetResult, icer); + } + }); + } + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, exc.getLocalizedMessage()); + } + } + + /** + * Process store document result. + * + * @param esPutResult the es put result + * @param esGetResult the es get result + * @param icer the icer + */ + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, IndexableCrossEntityReference icer) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(icer.getId())) { + + esWorkOnHand.incrementAndGet(); + + RetryCrossEntitySyncContainer rsc = new RetryCrossEntitySyncContainer(esGetResult, icer); + retryQueue.push(rsc); + + LOG.warn(AaiUiMsgs.ES_CROSS_REF_SYNC_VERSION_CONFLICT); + } + } else { + LOG.error(AaiUiMsgs.ES_CROSS_REF_SYNC_FAILURE, String.valueOf(or.getResultCode()), + or.getResult()); + } + } + } + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetryCrossEntitySyncContainer rsc = retryQueue.poll(); + if (rsc != null) { + + IndexableCrossEntityReference icer = rsc.getIndexableCrossEntityReference(); + NetworkTransaction txn = rsc.getNetworkTransaction(); + + String link = null; + try { + // In this retry flow the icer object has already + // derived its fields + link = getElasticFullUrl("/" + icer.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow and we did + * that for this request already when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, icer); + } + }); + } + + } + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_RESYNC_LIMIT, id); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + /** + * Gets the populated document. + * + * @param entityNode the entity node + * @param resultDescriptor the result descriptor + * @return the populated document + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected IndexableCrossEntityReference getPopulatedDocument(JsonNode entityNode, + OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { + + IndexableCrossEntityReference icer = new IndexableCrossEntityReference(); + + icer.setEntityType(resultDescriptor.getEntityName()); + + List primaryKeyValues = new ArrayList(); + String pkeyValue = null; + + for (String keyName : resultDescriptor.getPrimaryKeyAttributeNames()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName()); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + icer.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + + return icer; + + } +} diff --git a/src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java b/src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java new file mode 100644 index 0000000..40bb98c --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java @@ -0,0 +1,460 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URLEncoder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; + +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.UriBuilder; + +import org.apache.http.NameValuePair; +import org.apache.http.client.utils.URIBuilder; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.client.RestClient; +import org.onap.aai.restclient.enums.RestAuthenticationMode; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.Encryptor; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; + + + +/** + * The Class ActiveInventoryAdapter. + */ + +public class ActiveInventoryAdapter { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ActiveInventoryAdapter.class); + + private static final String HEADER_TRANS_ID = "X-TransactionId"; + private static final String HEADER_FROM_APP_ID = "X-FromAppId"; + private static final String HEADER_AUTHORIZATION = "Authorization"; + + private static final String HTTP_SCHEME = "http"; + private static final String HTTPS_SCHEME = "https"; + + private static final String TRANSACTION_ID_PREFIX = "txnId-"; + private static final String UI_APP_NAME = "AAI-UI"; + + private OxmModelLoader oxmModelLoader; + private OxmEntityLookup oxmEntityLookup; + + private RestClient restClient; + + private String activeInventoryIpAddress; + private String activeInventoryServerPort; + private int numRequestRetries; + private String basicAuthUserName; + private String basicAuthPassword; + private RestAuthenticationMode restAuthenticationMode; + private int connectTimeoutInMs; + private int readTimeoutInMs; + + /** + * Instantiates a new active inventory adapter. + * + */ + + public ActiveInventoryAdapter(OxmModelLoader oxmModelLoader, + RestAuthenticationMode authenticationMode, boolean validateServerHostname, + boolean validateServerCertChain, String certFileName, String certPassword, + String truststoreFileName, int connectTimeoutInMs, int readTimeoutInMs) + throws ElasticSearchOperationException, IOException { + + this.oxmModelLoader = oxmModelLoader; + this.restAuthenticationMode = authenticationMode; + this.connectTimeoutInMs = connectTimeoutInMs; + this.readTimeoutInMs = readTimeoutInMs; + + + Encryptor enc = new Encryptor(); + String certFileNameFullPath = TierSupportUiConstants.CONFIG_AUTH_LOCATION + certFileName; + String decryptedCertPassword = enc.decryptValue(certPassword); + String truststoreFileNameFullPath = + TierSupportUiConstants.CONFIG_AUTH_LOCATION + truststoreFileName; + + this.restClient = new RestClient().authenticationMode(authenticationMode) + .validateServerCertChain(validateServerCertChain) + .validateServerHostname(validateServerHostname).clientCertFile(certFileNameFullPath) + .clientCertPassword(decryptedCertPassword).trustStore(truststoreFileNameFullPath) + .connectTimeoutMs(connectTimeoutInMs).readTimeoutMs(readTimeoutInMs); + + } + + public ActiveInventoryAdapter(OxmModelLoader oxmModelLoader, + RestAuthenticationMode authenticationMode, boolean validateServerHostname, + boolean validateServerCertChain, String basicAuthUserName, String basicAuthPassword, + int connectTimeoutInMs, int readTimeoutInMs) + throws ElasticSearchOperationException, IOException { + + this.oxmModelLoader = oxmModelLoader; + this.restAuthenticationMode = authenticationMode; + + this.restClient = new RestClient().authenticationMode(authenticationMode) + .validateServerCertChain(validateServerCertChain) + .validateServerHostname(validateServerHostname).connectTimeoutMs(connectTimeoutInMs) + .readTimeoutMs(readTimeoutInMs); + + this.basicAuthUserName = basicAuthUserName; + this.basicAuthPassword = basicAuthPassword; + + } + + + protected Map> getMessageHeaders() { + + Map> headers = new HashMap>(); + + headers.putIfAbsent(HEADER_FROM_APP_ID, new ArrayList()); + headers.get(HEADER_FROM_APP_ID).add(UI_APP_NAME); + + headers.putIfAbsent(HEADER_TRANS_ID, new ArrayList()); + headers.get(HEADER_TRANS_ID).add(TRANSACTION_ID_PREFIX + NodeUtils.getRandomTxnId()); + + if (restAuthenticationMode == RestAuthenticationMode.SSL_BASIC) { + + headers.putIfAbsent(HEADER_AUTHORIZATION, new ArrayList()); + headers.get(HEADER_AUTHORIZATION).add(getBasicAuthenticationCredentials()); + + } + + return headers; + } + + protected String getBasicAuthenticationCredentials() { + String usernameAndPassword = String.join(":", basicAuthUserName, basicAuthPassword); + return "Basic " + java.util.Base64.getEncoder().encodeToString(usernameAndPassword.getBytes()); + } + + public int getNumRequestRetries() { + return numRequestRetries; + } + + + + public void setNumRequestRetries(int numRequestRetries) { + this.numRequestRetries = numRequestRetries; + } + + public OxmEntityLookup getOxmEntityLookup() { + return oxmEntityLookup; + } + + public void setOxmEntityLookup(OxmEntityLookup oxmEntityLookup) { + this.oxmEntityLookup = oxmEntityLookup; + } + + public String getActiveInventoryIpAddress() { + return activeInventoryIpAddress; + } + + public void setActiveInventoryIpAddress(String activeInventoryIpAddress) { + this.activeInventoryIpAddress = activeInventoryIpAddress; + } + + public String getActiveInventoryServerPort() { + return activeInventoryServerPort; + } + + public void setActiveInventoryServerPort(String activeInventoryServerPort) { + this.activeInventoryServerPort = activeInventoryServerPort; + } + + protected String getResourceBasePath() { + + String versionStr = null; + if (oxmModelLoader != null) { + versionStr = String.valueOf(oxmModelLoader.getLatestVersionNum()); + } + + return "/aai/v" + versionStr; + + } + + public int getConnectTimeoutInMs() { + return this.connectTimeoutInMs; + } + + public int getReadTimeoutInMs() { + return this.readTimeoutInMs; + } + + /** + * Gets the full url. + * + * @param resourceUrl the resource url + * @return the full url + * @throws Exception the exception + */ + private String getFullUrl(String resourceUrl) throws Exception { + final String basePath = getResourceBasePath(); + return String.format("https://%s:%s%s%s", activeInventoryIpAddress, activeInventoryServerPort, + basePath, resourceUrl); + } + + public String getGenericQueryForSelfLink(String startNodeType, List queryParams) + throws Exception { + + URIBuilder urlBuilder = new URIBuilder(getFullUrl("/search/generic-query")); + + for (String queryParam : queryParams) { + urlBuilder.addParameter("key", queryParam); + } + + urlBuilder.addParameter("start-node-type", startNodeType); + urlBuilder.addParameter("include", startNodeType); + + final String constructedLink = urlBuilder.toString(); + + return constructedLink; + + } + + + public OperationResult getSelfLinksByEntityType(String entityType) throws Exception { + + /* + * For this one, I want to dynamically construct the nodes-query for self-link discovery as a + * utility method that will use the OXM model entity data to drive the query as well. + */ + + if (entityType == null) { + throw new NullPointerException( + "Failed to getSelfLinksByEntityType() because entityType is null"); + } + + OxmEntityDescriptor entityDescriptor = oxmEntityLookup.getEntityDescriptors().get(entityType); + + if (entityDescriptor == null) { + throw new NoSuchElementException("Failed to getSelfLinksByEntityType() because could" + + " not find entity descriptor from OXM with type = " + entityType); + } + + String link = null; + final String primaryKeyStr = + NodeUtils.concatArray(entityDescriptor.getPrimaryKeyAttributeNames(), "/"); + + link = getFullUrl("/search/nodes-query?search-node-type=" + entityType + "&filter=" + + primaryKeyStr + ":EXISTS"); + + + return restClient.get(link, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE); + + } + + public OperationResult getSelfLinkForEntity(String entityType, String primaryKeyName, + String primaryKeyValue) throws Exception { + + if (entityType == null) { + throw new NullPointerException("Failed to getSelfLinkForEntity() because entityType is null"); + } + + if (primaryKeyName == null) { + throw new NullPointerException( + "Failed to getSelfLinkForEntity() because primaryKeyName is null"); + } + + if (primaryKeyValue == null) { + throw new NullPointerException( + "Failed to getSelfLinkForEntity() because primaryKeyValue is null"); + } + + /* + * Try to protect ourselves from illegal URI formatting exceptions caused by characters that + * aren't natively supported in a URI, but can be escaped to make them legal. + */ + + String encodedEntityType = URLEncoder.encode(entityType, "UTF-8"); + String encodedPrimaryKeyName = URLEncoder.encode(primaryKeyName, "UTF-8"); + String encodedPrimaryKeyValue = URLEncoder.encode(primaryKeyValue, "UTF-8"); + + String link = null; + + if ("service-instance".equals(entityType)) { + + link = getFullUrl("/search/generic-query?key=" + encodedEntityType + "." + + encodedPrimaryKeyName + ":" + encodedPrimaryKeyValue + "&start-node-type=" + + encodedEntityType + "&include=customer&depth=2"); + + } else { + + link = + getFullUrl("/search/generic-query?key=" + encodedEntityType + "." + encodedPrimaryKeyName + + ":" + encodedPrimaryKeyValue + "&start-node-type=" + encodedEntityType); + + } + + return queryActiveInventoryWithRetries(link, "application/json", numRequestRetries); + + } + + + /** + * Our retry conditions should be very specific. + * + * @param r the r + * @return true, if successful + */ + private boolean shouldRetryRequest(OperationResult r) { + + if (r == null) { + return true; + } + + int rc = r.getResultCode(); + + if (rc == 200) { + return false; + } + + if (rc == 404) { + return false; + } + + return true; + + } + + /** + * Query active inventory. + * + * @param url the url + * @param acceptContentType the accept content type + * @return the operation result + */ + // package protected for test classes instead of private + OperationResult queryActiveInventory(String url, String acceptContentType) { + + return restClient.get(url, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE); + + } + + public OperationResult queryActiveInventoryWithRetries(String url, String responseType, + int numRetries) { + + OperationResult result = null; + + for (int retryCount = 0; retryCount < numRetries; retryCount++) { + + LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_SEQ, url, String.valueOf(retryCount + 1)); + + result = queryActiveInventory(url, responseType); + + /** + * Record number of times we have attempted the request to later summarize how many times we + * are generally retrying over thousands of messages in a sync. + * + * If the number of retries is surprisingly high, then we need to understand why that is as + * the number of retries is also causing a heavier load on AAI beyond the throttling controls + * we already have in place in term of the transaction rate controller and number of + * parallelized threads per task processor. + */ + + result.setNumRetries(retryCount); + + if (!shouldRetryRequest(result)) { + + result.setFromCache(false); + LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_DONE_SEQ, url, String.valueOf(retryCount + 1)); + + return result; + } + + try { + /* + * Sleep between re-tries to be nice to the target system. + */ + Thread.sleep(50); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.QUERY_AAI_WAIT_INTERRUPTION, exc.getLocalizedMessage()); + break; + } + LOG.error(AaiUiMsgs.QUERY_AAI_RETRY_FAILURE_WITH_SEQ, url, String.valueOf(retryCount + 1)); + + } + + LOG.info(AaiUiMsgs.QUERY_AAI_RETRY_MAXED_OUT, url); + + return result; + + } + + public String repairSelfLink(String selfLink) { + return repairSelfLink(selfLink, null); + } + + /** + * This method adds a scheme, host and port (if missing) to the passed-in URI. If these parts of + * the URI are already present, they will not be duplicated. + * + * @param selflink The URI to repair + * @param queryParams The query parameters as a single string + * @return The corrected URI (i.e. includes a scheme/host/port) + */ + public String repairSelfLink(String selflink, String queryParams) { + if (selflink == null) { + return selflink; + } + + UriBuilder builder = UriBuilder.fromPath(selflink).host(activeInventoryIpAddress) + .port(Integer.parseInt(activeInventoryServerPort)); + + switch (restAuthenticationMode) { + + case SSL_BASIC: + case SSL_CERT: { + builder.scheme(HTTPS_SCHEME); + break; + } + + default: { + builder.scheme(HTTP_SCHEME); + } + } + + boolean includeQueryParams = ((null != queryParams) && (!"".equals(queryParams))); + + /* + * builder.build().toString() will encode special characters to hexadecimal pairs prefixed with + * a '%' so we're adding the query parameters separately, in their UTF-8 representations, so + * that characters such as '?', '&', etc. remain intact as needed by the synchronizer + */ + return (builder.build().toString() + (includeQueryParams ? queryParams : "")); + } + +} diff --git a/src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java b/src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java new file mode 100644 index 0000000..1e2bb8d --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java @@ -0,0 +1,120 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.client.RestClient; +import org.onap.aai.restclient.enums.RestAuthenticationMode; + +/** + * The Class ElasticSearchAdapter. + * + */ +public class ElasticSearchAdapter { + + private static final String BULK_IMPORT_INDEX_TEMPLATE = + "{\"index\":{\"_index\":\"%s\",\"_type\":\"%s\",\"_id\":\"%s\", \"_version\":\"%s\"}}\n"; + + private RestClient restClient; + + /** + * Instantiates a new elastic search adapter. + */ + public ElasticSearchAdapter(RestAuthenticationMode restAuthenticationMode, int connectTimeoutInMs, + int readTimeoutInMs) { + + this.restClient = new RestClient().authenticationMode(restAuthenticationMode) + .connectTimeoutMs(connectTimeoutInMs).readTimeoutMs(readTimeoutInMs); + + } + + protected Map> getMessageHeaders() { + Map> headers = new HashMap>(); + // insert mandatory headers if there are any + return headers; + } + + public OperationResult doGet(String url, MediaType acceptContentType) { + return restClient.get(url, getMessageHeaders(), acceptContentType); + } + + public OperationResult doDelete(String url, MediaType acceptContentType) { + return restClient.delete(url, getMessageHeaders(), acceptContentType); + } + + public OperationResult doPost(String url, String jsonPayload, MediaType acceptContentType) { + return restClient.post(url, jsonPayload, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE, + acceptContentType); + } + + public OperationResult doPut(String url, String jsonPayload, MediaType acceptContentType) { + return restClient.put(url, jsonPayload, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE, + acceptContentType); + } + + public OperationResult doPatch(String url, String jsonPayload, MediaType acceptContentType) { + + Map> headers = getMessageHeaders(); + headers.putIfAbsent("X-HTTP-Method-Override", new ArrayList()); + headers.get("X-HTTP-Method-Override").add("PATCH"); + + return restClient.post(url, jsonPayload, headers, MediaType.APPLICATION_JSON_TYPE, + acceptContentType); + } + + public OperationResult doHead(String url, MediaType acceptContentType) { + return restClient.head(url, getMessageHeaders(), acceptContentType); + } + + public OperationResult doBulkOperation(String url, String payload) { + return restClient.put(url, payload, getMessageHeaders(), + MediaType.APPLICATION_FORM_URLENCODED_TYPE, MediaType.APPLICATION_JSON_TYPE); + } + + public String buildBulkImportOperationRequest(String index, String type, String id, + String version, String payload) { + + StringBuilder requestPayload = new StringBuilder(128); + + requestPayload.append(String.format(BULK_IMPORT_INDEX_TEMPLATE, index, type, id, version)); + requestPayload.append(payload).append("\n"); + + return requestPayload.toString(); + + } + + public OperationResult retrieveEntityById(String host, String port, String indexName, + String docType, String resourceUrl) { + String esUrl = + String.format("http://%s:%s/%s/%s/%s", host, port, indexName, docType, resourceUrl); + return doGet(esUrl, MediaType.APPLICATION_JSON_TYPE); + } + +} diff --git a/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java b/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java index da24c80..fbc89c3 100644 --- a/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java +++ b/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java @@ -22,9 +22,10 @@ */ package org.onap.aai.sparky.dal; +import org.onap.aai.restclient.client.OperationResult; import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; import org.onap.aai.sparky.dal.rest.HttpMethod; -import org.onap.aai.sparky.dal.rest.OperationResult; + /** * The Class NetworkTransaction. @@ -37,12 +38,16 @@ public class NetworkTransaction { private String link; + private String queryParameters; + private HttpMethod operationType; private OxmEntityDescriptor descriptor; private long createdTimeStampInMs; + private long opTimeInMs; + private long taskAgeInMs; /** @@ -50,6 +55,7 @@ public class NetworkTransaction { */ public NetworkTransaction() { this.createdTimeStampInMs = System.currentTimeMillis(); + this.opTimeInMs = 0L; } /** @@ -64,6 +70,7 @@ public class NetworkTransaction { this.operationType = method; this.entityType = entityType; this.operationResult = or; + this.opTimeInMs = 0L; } public HttpMethod getOperationType() { @@ -109,6 +116,22 @@ public class NetworkTransaction { this.link = link; } + public String getQueryParameters() { + return queryParameters; + } + + public void setQueryParameters(String queryParameters) { + this.queryParameters = queryParameters; + } + + public long getOpTimeInMs() { + return opTimeInMs; + } + + public void setOpTimeInMs(long opTimeInMs) { + this.opTimeInMs = opTimeInMs; + } + public OxmEntityDescriptor getDescriptor() { return descriptor; } diff --git a/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryAdapter.java b/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryAdapter.java deleted file mode 100644 index 08a6584..0000000 --- a/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryAdapter.java +++ /dev/null @@ -1,407 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.dal.aai; - -import java.io.IOException; -import java.net.URLEncoder; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.NoSuchElementException; - -import org.apache.http.client.utils.URIBuilder; -import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryRestConfig; -import org.onap.aai.sparky.dal.aai.enums.RestAuthenticationMode; -import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.rest.RestClientBuilder; -import org.onap.aai.sparky.dal.rest.RestfulDataAccessor; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.security.SecurityContextFactory; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; - -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.WebResource.Builder; - - -/** - * The Class ActiveInventoryAdapter. - */ - -/** - * @author davea - * - */ -public class ActiveInventoryAdapter extends RestfulDataAccessor - implements ActiveInventoryDataProvider { - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(ActiveInventoryAdapter.class); - - private static final String HEADER_TRANS_ID = "X-TransactionId"; - private static final String HEADER_FROM_APP_ID = "X-FromAppId"; - private static final String HEADER_AUTHORIZATION = "Authorization"; - - private static final String TRANSACTION_ID_PREFIX = "txnId-"; - private static final String UI_APP_NAME = "AAI-UI"; - - - private ActiveInventoryConfig config; - - /** - * Instantiates a new active inventory adapter. - * - * @param restClientBuilder the rest client builder - * @throws ElasticSearchOperationException the elastic search operation exception - * @throws IOException Signals that an I/O exception has occurred. - */ - public ActiveInventoryAdapter(RestClientBuilder restClientBuilder) - throws ElasticSearchOperationException, IOException { - super(restClientBuilder); - - try { - this.config = ActiveInventoryConfig.getConfig(); - } catch (Exception exc) { - throw new ElasticSearchOperationException("Error getting active inventory configuration", - exc); - } - - clientBuilder.setUseHttps(true); - - clientBuilder.setValidateServerHostname(config.getAaiSslConfig().isValidateServerHostName()); - - SecurityContextFactory sslContextFactory = clientBuilder.getSslContextFactory(); - - sslContextFactory.setServerCertificationChainValidationEnabled( - config.getAaiSslConfig().isValidateServerCertificateChain()); - - if (config.getAaiRestConfig().getAuthenticationMode() == RestAuthenticationMode.SSL_CERT) { - sslContextFactory.setClientCertFileName(config.getAaiSslConfig().getKeystoreFilename()); - sslContextFactory.setClientCertPassword(config.getAaiSslConfig().getKeystorePassword()); - sslContextFactory.setTrustStoreFileName(config.getAaiSslConfig().getTruststoreFilename()); - } - - clientBuilder.setConnectTimeoutInMs(config.getAaiRestConfig().getConnectTimeoutInMs()); - clientBuilder.setReadTimeoutInMs(config.getAaiRestConfig().getReadTimeoutInMs()); - - } - - /* - * (non-Javadoc) - * - * @see - * org.onap.aai.sparky.dal.rest.RestfulDataAccessor#setClientDefaults(com.sun.jersey.api.client. - * Client, java.lang.String, java.lang.String, java.lang.String) - */ - @Override - protected Builder setClientDefaults(Client client, String url, String payloadContentType, - String acceptContentType) { - Builder builder = super.setClientDefaults(client, url, payloadContentType, acceptContentType); - - builder = builder.header(HEADER_FROM_APP_ID, UI_APP_NAME); - byte bytes[] = new byte[6]; - txnIdGenerator.nextBytes(bytes); - builder = - builder.header(HEADER_TRANS_ID, TRANSACTION_ID_PREFIX + ByteBuffer.wrap(bytes).getInt()); - - if (config.getAaiRestConfig().getAuthenticationMode() == RestAuthenticationMode.SSL_BASIC) { - builder = builder.header(HEADER_AUTHORIZATION, - config.getAaiSslConfig().getBasicAuthenticationCredentials()); - } - - return builder; - } - - /** - * Gets the full url. - * - * @param resourceUrl the resource url - * @return the full url - * @throws Exception the exception - */ - private String getFullUrl(String resourceUrl) throws Exception { - ActiveInventoryRestConfig aaiRestConfig = ActiveInventoryConfig.getConfig().getAaiRestConfig(); - final String host = aaiRestConfig.getHost(); - final String port = aaiRestConfig.getPort(); - final String basePath = aaiRestConfig.getResourceBasePath(); - return String.format("https://%s:%s%s%s", host, port, basePath, resourceUrl); - } - - public String getGenericQueryForSelfLink(String startNodeType, List queryParams) - throws Exception { - - URIBuilder urlBuilder = new URIBuilder(getFullUrl("/search/generic-query")); - - for (String queryParam : queryParams) { - urlBuilder.addParameter("key", queryParam); - } - - urlBuilder.addParameter("start-node-type", startNodeType); - urlBuilder.addParameter("include", startNodeType); - - final String constructedLink = urlBuilder.toString(); - - // TODO: debug log for constructed link - - return constructedLink; - - } - - - /* - * (non-Javadoc) - * - * @see - * org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider#getSelfLinksByEntityType(java.lang. - * String) - */ - @Override - public OperationResult getSelfLinksByEntityType(String entityType) throws Exception { - - /* - * For this one, I want to dynamically construct the nodes-query for self-link discovery as a - * utility method that will use the OXM model entity data to drive the query as well. - */ - - if (entityType == null) { - throw new NullPointerException( - "Failed to getSelfLinksByEntityType() because entityType is null"); - } - - OxmEntityDescriptor entityDescriptor = - OxmModelLoader.getInstance().getEntityDescriptor(entityType); - - if (entityDescriptor == null) { - throw new NoSuchElementException("Failed to getSelfLinksByEntityType() because could" - + " not find entity descriptor from OXM with type = " + entityType); - } - - String link = null; - final String primaryKeyStr = - NodeUtils.concatArray(entityDescriptor.getPrimaryKeyAttributeName(), "/"); - - link = getFullUrl("/search/nodes-query?search-node-type=" + entityType + "&filter=" - + primaryKeyStr + ":EXISTS"); - - - - return doGet(link, "application/json"); - - } - - /* - * (non-Javadoc) - * - * @see - * org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider#getSelfLinkForEntity(java.lang.String, - * java.lang.String, java.lang.String) - */ - @Override - public OperationResult getSelfLinkForEntity(String entityType, String primaryKeyName, - String primaryKeyValue) throws Exception { - - if (entityType == null) { - throw new NullPointerException("Failed to getSelfLinkForEntity() because entityType is null"); - } - - if (primaryKeyName == null) { - throw new NullPointerException( - "Failed to getSelfLinkForEntity() because primaryKeyName is null"); - } - - if (primaryKeyValue == null) { - throw new NullPointerException( - "Failed to getSelfLinkForEntity() because primaryKeyValue is null"); - } - - - /* - * Try to protect ourselves from illegal URI formatting exceptions caused by characters that - * aren't natively supported in a URI, but can be escaped to make them legal. - */ - - String encodedEntityType = URLEncoder.encode(entityType, "UTF-8"); - String encodedPrimaryKeyName = URLEncoder.encode(primaryKeyName, "UTF-8"); - String encodedPrimaryKeyValue = URLEncoder.encode(primaryKeyValue, "UTF-8"); - - String link = null; - - if ("service-instance".equals(entityType)) { - - link = getFullUrl("/search/generic-query?key=" + encodedEntityType + "." - + encodedPrimaryKeyName + ":" + encodedPrimaryKeyValue + "&start-node-type=" - + encodedEntityType + "&include=customer&depth=2"); - - } else { - - link = - getFullUrl("/search/generic-query?key=" + encodedEntityType + "." + encodedPrimaryKeyName - + ":" + encodedPrimaryKeyValue + "&start-node-type=" + encodedEntityType); - - } - - return queryActiveInventoryWithRetries(link, "application/json", - this.config.getAaiRestConfig().getNumRequestRetries()); - - } - - - /** - * Our retry conditions should be very specific. - * - * @param r the r - * @return true, if successful - */ - private boolean shouldRetryRequest(OperationResult r) { - - if (r == null) { - return true; - } - - int rc = r.getResultCode(); - - if (rc == 200) { - return false; - } - - if (rc == 404) { - return false; - } - - return true; - - } - - /** - * Query active inventory. - * - * @param url the url - * @param acceptContentType the accept content type - * @return the operation result - */ - // package protected for test classes instead of private - OperationResult queryActiveInventory(String url, String acceptContentType) { - return doGet(url, acceptContentType); - } - - /* - * (non-Javadoc) - * - * @see - * org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider#queryActiveInventoryWithRetries(java. - * lang.String, java.lang.String, int) - */ - @Override - public OperationResult queryActiveInventoryWithRetries(String url, String responseType, - int numRetries) { - - OperationResult result = null; - - for (int x = 0; x < numRetries; x++) { - - LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_SEQ, url, String.valueOf(x + 1)); - - result = queryActiveInventory(url, responseType); - - /** - * Record number of times we have attempted the request to later summarize how many times we - * are generally retrying over thousands of messages in a sync. - * - * If the number of retries is surprisingly high, then we need to understand why that is as - * the number of retries is also causing a heavier load on AAI beyond the throttling controls - * we already have in place in term of the transaction rate controller and number of - * parallelized threads per task processor. - */ - - result.setNumRequestRetries(x); - - if (!shouldRetryRequest(result)) { - - /* - * if (myConfig.getAaiRestConfig().isCacheEnabled()) { - * - * CachedHttpRequest cachedRequest = new CachedHttpRequest(); - * cachedRequest.setHttpRequestMethod("GET"); cachedRequest.setPayload(""); - * cachedRequest.setPayloadMimeType(""); cachedRequest.setUrl(url); - * cachedRequest.setOperationType( TransactionStorageType.ACTIVE_INVENTORY_QUERY.getIndex() - * ); - * - * CachedHttpResponse cachedResponse = new CachedHttpResponse(); - * cachedResponse.setPayload(result.getResult()); - * cachedResponse.setPayloadMimeType("application/json"); - * cachedResponse.setStatusCode(result.getResultCode()); - * - * CachedHttpTransaction txn = new CachedHttpTransaction(cachedRequest, cachedResponse); - * storageProvider.persistTransaction(txn); - * - * } - */ - - - result.setResolvedLinkFromServer(true); - LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_DONE_SEQ, url, String.valueOf(x + 1)); - - return result; - } - - try { - /* - * Sleep between re-tries to be nice to the target system. - */ - Thread.sleep(50); - } catch (InterruptedException exc) { - LOG.error(AaiUiMsgs.QUERY_AAI_WAIT_INTERRUPTION, exc.getLocalizedMessage()); - break; - } - LOG.error(AaiUiMsgs.QUERY_AAI_RETRY_FAILURE_WITH_SEQ, url, String.valueOf(x + 1)); - } - - - result.setResolvedLinkFailure(true); - LOG.info(AaiUiMsgs.QUERY_AAI_RETRY_MAXED_OUT, url); - - return result; - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.rest.RestfulDataAccessor#shutdown() - */ - @Override - public void shutdown() { - // TODO Auto-generated method stub - - if (entityCache != null) { - entityCache.shutdown(); - } - - } - - -} diff --git a/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryDataProvider.java b/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryDataProvider.java index 21fb4e6..75e7a54 100644 --- a/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryDataProvider.java +++ b/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryDataProvider.java @@ -30,6 +30,11 @@ import org.onap.aai.sparky.dal.rest.RestDataProvider; /** * The Interface ActiveInventoryDataProvider. */ + +/* + * TODO: DELETE ME + */ + public interface ActiveInventoryDataProvider extends RestDataProvider { /** @@ -81,7 +86,7 @@ public interface ActiveInventoryDataProvider extends RestDataProvider { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#shutdown() + * @see org.openecomp.sparky.dal.rest.RestDataProvider#shutdown() */ @Override void shutdown(); diff --git a/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java b/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java index c1ed906..6ffebef 100644 --- a/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java +++ b/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java @@ -29,10 +29,9 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicInteger; -import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; -import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.restclient.client.OperationResult; import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.rest.OperationResult; + /** * The Class ActiveInventoryEntityStatistics. @@ -51,9 +50,6 @@ public class ActiveInventoryEntityStatistics { private static final String ERROR = "Error"; - private OxmModelLoader loader; - - private Map> activeInventoryEntityStatistics; /** @@ -76,30 +72,6 @@ public class ActiveInventoryEntityStatistics { } - /* - * private void createSearchableActiveInventoryEntityStatistics() { - * - * Map descriptors = loader.getSearchableEntityDescriptors(); - * - * if(descriptors == null) { return; } - * - * OxmEntityDescriptor d = null; for ( String key : descriptors.keySet() ) { d = - * descriptors.get(key); activeInventoryEntityStatistics.put(d.getEntityName(), - * createEntityOpStats()); } - * - * } - */ - - /* - * private void createCrossEntityReferenceActiveInventoryEntityStatistics() { - * - * Map descriptors = loader.getCrossReferenceEntityDescriptors(); - * - * - * } - */ - - /** * Initializecreate active inventory entity statistics. */ @@ -126,11 +98,8 @@ public class ActiveInventoryEntityStatistics { * * @param loader the loader */ - public ActiveInventoryEntityStatistics(OxmModelLoader loader) { - this.loader = loader; + public ActiveInventoryEntityStatistics() { activeInventoryEntityStatistics = new HashMap>(); - // createSearchableActiveInventoryEntityStatistics(); - // createCrossEntityReferenceActiveInventoryEntityStatistics(); reset(); } @@ -139,21 +108,29 @@ public class ActiveInventoryEntityStatistics { * * @param descriptors the descriptors */ - public void initializeCountersFromOxmEntityDescriptors( - Map descriptors) { + public void intializeEntityCounters(String... entityTypes) { + + if (entityTypes != null && entityTypes.length > 0) { + for (String entityType : entityTypes) { + activeInventoryEntityStatistics.put(entityType, createEntityOpStats()); + } - if (descriptors == null) { - return; } - OxmEntityDescriptor descriptor = null; - for (String key : descriptors.keySet()) { - descriptor = descriptors.get(key); - activeInventoryEntityStatistics.put(descriptor.getEntityName(), createEntityOpStats()); + } + + public void intializeEntityCounters(Set entityTypes) { + + if (entityTypes != null && entityTypes.size() > 0) { + for (String entityType : entityTypes) { + activeInventoryEntityStatistics.put(entityType, createEntityOpStats()); + } } + } + /** * Reset. */ @@ -230,8 +207,8 @@ public class ActiveInventoryEntityStatistics { opStats.get(NO_PAYLOAD).incrementAndGet(); } - if (or.getNumRequestRetries() > 0) { - opStats.get(NUM_RETRIES).addAndGet(or.getNumRequestRetries()); + if (or.getNumRetries() > 0) { + opStats.get(NUM_RETRIES).addAndGet(or.getNumRetries()); } } diff --git a/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java b/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java index eb4eb6c..329d0f0 100644 --- a/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java +++ b/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java @@ -22,12 +22,13 @@ */ package org.onap.aai.sparky.dal.aai; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; import org.onap.aai.sparky.analytics.AbstractStatistics; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.rest.OperationResult; import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; /** * The Class ActiveInventoryProcessingExceptionStatistics. diff --git a/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventoryConfig.java b/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventoryConfig.java index e88ca51..d311993 100644 --- a/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventoryConfig.java +++ b/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventoryConfig.java @@ -26,30 +26,25 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.Properties; -import javax.ws.rs.core.UriBuilder; - +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.config.TaskProcessorConfig; import org.onap.aai.sparky.util.ConfigHelper; import org.onap.aai.sparky.util.Encryptor; import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; /** * The Class ActiveInventoryConfig. */ public class ActiveInventoryConfig { - - public static final String CONFIG_FILE = TierSupportUiConstants.DYNAMIC_CONFIG_APP_LOCATION + "aai.properties"; private static ActiveInventoryConfig instance; + private static final Logger LOG = LoggerFactory.getInstance().getLogger(ActiveInventoryConfig.class); - private static final String HTTP_SCHEME = "http"; - private static final String HTTPS_SCHEME = "https"; + public static ActiveInventoryConfig getConfig() throws Exception { if (instance == null) { @@ -61,7 +56,6 @@ public class ActiveInventoryConfig { private ActiveInventoryRestConfig aaiRestConfig; private ActiveInventorySslConfig aaiSslConfig; - private TaskProcessorConfig taskProcessorConfig; /** * Instantiates a new active inventory config. @@ -71,37 +65,18 @@ public class ActiveInventoryConfig { protected ActiveInventoryConfig() throws Exception { Properties props = ConfigHelper.loadConfigFromExplicitPath(CONFIG_FILE); - aaiRestConfig = new ActiveInventoryRestConfig(props); - aaiSslConfig = new ActiveInventorySslConfig(props, new Encryptor()); - - taskProcessorConfig = new TaskProcessorConfig(); - taskProcessorConfig - .initializeFromProperties(ConfigHelper.getConfigWithPrefix("aai.taskProcessor", props)); - - + initialize(props); } - protected ActiveInventoryConfig(Properties props) throws Exception { + public ActiveInventoryConfig(Properties props) throws Exception { + initialize(props); + } + private void initialize(Properties props) { aaiRestConfig = new ActiveInventoryRestConfig(props); aaiSslConfig = new ActiveInventorySslConfig(props, new Encryptor()); - - taskProcessorConfig = new TaskProcessorConfig(); - taskProcessorConfig - .initializeFromProperties(ConfigHelper.getConfigWithPrefix("aai.taskProcessor", props)); - - } - public TaskProcessorConfig getTaskProcessorConfig() { - return taskProcessorConfig; - } - - public void setTaskProcessorConfig(TaskProcessorConfig taskProcessorConfig) { - this.taskProcessorConfig = taskProcessorConfig; - } - - public ActiveInventoryRestConfig getAaiRestConfig() { return aaiRestConfig; } @@ -118,35 +93,11 @@ public class ActiveInventoryConfig { this.aaiSslConfig = aaiSslConfig; } - public String repairSelfLink(String selflink) { - - if (selflink == null) { - return selflink; - } - - UriBuilder builder = UriBuilder.fromPath(selflink).host(aaiRestConfig.getHost()) - .port(Integer.parseInt(aaiRestConfig.getPort())); - switch (aaiRestConfig.getAuthenticationMode()) { - - case SSL_BASIC: - case SSL_CERT: { - builder.scheme(HTTPS_SCHEME); - break; - } - - default: { - builder.scheme(HTTP_SCHEME); - } - } - - return builder.build().toString(); - - } public static String extractResourcePath(String selflink) { try { - return new URI(selflink).getPath(); + return new URI(selflink).getRawPath(); } catch (URISyntaxException uriSyntaxException) { LOG.error(AaiUiMsgs.ERROR_EXTRACTING_RESOURCE_PATH_FROM_LINK, uriSyntaxException.getMessage()); @@ -165,9 +116,6 @@ public class ActiveInventoryConfig { + aaiSslConfig + "]"; } - public URI getBaseUri() { - return UriBuilder.fromUri("https://" + aaiRestConfig.getHost() + ":" + aaiRestConfig.getPort() - + aaiRestConfig.getResourceBasePath()).build(); - } + } diff --git a/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventoryRestConfig.java b/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventoryRestConfig.java index 5ed537b..617a74c 100644 --- a/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventoryRestConfig.java +++ b/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventoryRestConfig.java @@ -36,27 +36,6 @@ public class ActiveInventoryRestConfig { private String host; - /** - * @return the cacheFailures - */ - public boolean isCacheFailures() { - return cacheFailures; - } - - /** - * @param cacheFailures the cacheFailures to set - */ - public void setCacheFailures(boolean cacheFailures) { - this.cacheFailures = cacheFailures; - } - - /** - * @param shallowEntities the shallowEntities to set - */ - public void setShallowEntities(List shallowEntities) { - this.shallowEntities = shallowEntities; - } - private String port; private int connectTimeoutInMs; @@ -67,20 +46,6 @@ public class ActiveInventoryRestConfig { private int numResolverWorkers; - private boolean useCacheOnly; - - private boolean cacheEnabled; - - private boolean cacheFailures; - - private String storageFolderOverride; - - int numCacheWorkers; - - private long maxTimeToLiveInMs; - - private String resourceBasePath; - private List shallowEntities; private RestAuthenticationMode authenticationMode; @@ -96,13 +61,12 @@ public class ActiveInventoryRestConfig { */ public ActiveInventoryRestConfig(Properties props) { - if (props == null) { + if (props == null || props.isEmpty()) { return; } Properties restProps = ConfigHelper.getConfigWithPrefix("aai.rest", props); - resourceBasePath = restProps.getProperty("resourceBasePath", "/aai/v7"); host = restProps.getProperty("host", "localhost"); port = restProps.getProperty("port", "8443"); numRequestRetries = Integer.parseInt(restProps.getProperty("numRequestRetries", "5")); @@ -114,23 +78,6 @@ public class ActiveInventoryRestConfig { String shallowEntitiesProperty = restProps.getProperty("shallowEntities", ""); shallowEntities = Arrays.asList(shallowEntitiesProperty.split(",")); - Properties cacheProps = ConfigHelper.getConfigWithPrefix("aai.rest.cache", props); - cacheEnabled = Boolean.parseBoolean(cacheProps.getProperty("enabled", "false")); - storageFolderOverride = cacheProps.getProperty("storageFolderOverride", null); - cacheFailures = Boolean.parseBoolean(cacheProps.getProperty("cacheFailures", "false")); - useCacheOnly = Boolean.parseBoolean(cacheProps.getProperty("useCacheOnly", "false")); - numCacheWorkers = Integer.parseInt(cacheProps.getProperty("numWorkers", "5")); - - - if (storageFolderOverride != null && storageFolderOverride.length() == 0) { - storageFolderOverride = null; - } - /* - * The expectation of this parameter is that if the value > 0, then the cached resources will be - * served back instead of dipping AAI/DataLayer as long as the current resource age from the - * cached instance is < maxTimeToLiveInMs. - */ - maxTimeToLiveInMs = Long.parseLong(cacheProps.getProperty("maxTimeToLiveInMs", "-1")); authenticationMode = RestAuthenticationMode.getRestAuthenticationMode(restProps.getProperty("authenticationMode", RestAuthenticationMode.SSL_CERT.getAuthenticationModeLabel())); @@ -154,26 +101,6 @@ public class ActiveInventoryRestConfig { this.authenticationMode = authenticationMode; } - public int getNumCacheWorkers() { - return numCacheWorkers; - } - - public void setNumCacheWorkers(int numCacheWorkers) { - this.numCacheWorkers = numCacheWorkers; - } - - /** - * Should cache failures. - * - * @return true, if successful - */ - public boolean shouldCacheFailures() { - return cacheFailures; - } - - public void setShouldCacheFailures(boolean enabled) { - this.cacheFailures = enabled; - } /** * Checks if is shallow entity. @@ -195,14 +122,6 @@ public class ActiveInventoryRestConfig { return false; } - public boolean isUseCacheOnly() { - return useCacheOnly; - } - - public void setUseCacheOnly(boolean useCacheOnly) { - this.useCacheOnly = useCacheOnly; - } - public int getNumResolverWorkers() { return numResolverWorkers; } @@ -211,30 +130,6 @@ public class ActiveInventoryRestConfig { this.numResolverWorkers = numResolverWorkers; } - public long getMaxTimeToLiveInMs() { - return maxTimeToLiveInMs; - } - - public void setMaxTimeToLiveInMs(long maxTimeToLiveInMs) { - this.maxTimeToLiveInMs = maxTimeToLiveInMs; - } - - public boolean isCacheEnabled() { - return cacheEnabled; - } - - public void setCacheEnabled(boolean cacheEnabled) { - this.cacheEnabled = cacheEnabled; - } - - public String getStorageFolderOverride() { - return storageFolderOverride; - } - - public void setStorageFolderOverride(String storageFolderOverride) { - this.storageFolderOverride = storageFolderOverride; - } - public String getHost() { return host; } @@ -243,10 +138,6 @@ public class ActiveInventoryRestConfig { return port; } - public String getResourceBasePath() { - return resourceBasePath; - } - public void setHost(String host) { this.host = host; } @@ -255,29 +146,6 @@ public class ActiveInventoryRestConfig { this.port = port; } - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - - - public void setResourceBasePath(String resourceBasePath) { - this.resourceBasePath = resourceBasePath; - } - - @Override - public String toString() { - return "ActiveInventoryRestConfig [host=" + host + ", port=" + port + ", connectTimeoutInMs=" - + connectTimeoutInMs + ", readTimeoutInMs=" + readTimeoutInMs + ", numRequestRetries=" - + numRequestRetries + ", numResolverWorkers=" + numResolverWorkers + ", useCacheOnly=" - + useCacheOnly + ", cacheEnabled=" + cacheEnabled + ", cacheFailures=" + cacheFailures - + ", storageFolderOverride=" + storageFolderOverride + ", numCacheWorkers=" - + numCacheWorkers + ", maxTimeToLiveInMs=" + maxTimeToLiveInMs + ", resourceBasePath=" - + resourceBasePath + ", shallowEntities=" + shallowEntities + ", authenticationMode=" - + authenticationMode + "]"; - } - public int getConnectTimeoutInMs() { return connectTimeoutInMs; } diff --git a/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventorySslConfig.java b/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventorySslConfig.java index 080a787..75ce36a 100644 --- a/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventorySslConfig.java +++ b/src/main/java/org/onap/aai/sparky/dal/aai/config/ActiveInventorySslConfig.java @@ -56,7 +56,7 @@ public class ActiveInventorySslConfig { */ public ActiveInventorySslConfig(Properties props, Encryptor encryptor) { - if (props == null) { + if (props == null || props.isEmpty()) { return; } @@ -197,20 +197,6 @@ public class ActiveInventorySslConfig { return "Basic " + java.util.Base64.getEncoder().encodeToString(usernameAndPassword.getBytes()); } - /** - * @return the enableSslDebug - */ - public boolean isEnableSslDebug() { - return enableSslDebug; - } - - /** - * @param enableSslDebug the enableSslDebug to set - */ - public void setEnableSslDebug(boolean enableSslDebug) { - this.enableSslDebug = enableSslDebug; - } - /* * (non-Javadoc) * diff --git a/src/main/java/org/onap/aai/sparky/dal/cache/EntityCache.java b/src/main/java/org/onap/aai/sparky/dal/cache/EntityCache.java deleted file mode 100644 index 04baf7c..0000000 --- a/src/main/java/org/onap/aai/sparky/dal/cache/EntityCache.java +++ /dev/null @@ -1,60 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.dal.cache; - -import org.onap.aai.sparky.dal.rest.OperationResult; - -/** - * The Interface EntityCache. - * - * @author davea. - */ -public interface EntityCache { - - /** - * Gets the. - * - * @param entityKey the entity key - * @param link the link - * @return the operation result - */ - public OperationResult get(String entityKey, String link); - - /** - * Put. - * - * @param entityKey the entity key - * @param result the result - */ - public void put(String entityKey, OperationResult result); - - /** - * Shutdown. - */ - public void shutdown(); - - /** - * Clear. - */ - public void clear(); -} diff --git a/src/main/java/org/onap/aai/sparky/dal/cache/InMemoryEntityCache.java b/src/main/java/org/onap/aai/sparky/dal/cache/InMemoryEntityCache.java deleted file mode 100644 index 5245e29..0000000 --- a/src/main/java/org/onap/aai/sparky/dal/cache/InMemoryEntityCache.java +++ /dev/null @@ -1,107 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.dal.cache; - -import java.util.concurrent.ConcurrentHashMap; - -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; - -/** - * The Class InMemoryEntityCache. - * - * @author davea. - */ -public class InMemoryEntityCache implements EntityCache { - - private ConcurrentHashMap cachedEntityData; - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(InMemoryEntityCache.class); - - /** - * Instantiates a new in memory entity cache. - */ - public InMemoryEntityCache() { - cachedEntityData = new ConcurrentHashMap(); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.cache.EntityCache#put(java.lang.String, - * org.onap.aai.sparky.dal.rest.OperationResult) - */ - @Override - public void put(String key, OperationResult data) { - if (data == null) { - return; - } - - if (cachedEntityData.putIfAbsent(key, data) != null) { - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DATA_CACHE_SUCCESS, key); - } - } - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.cache.EntityCache#get(java.lang.String, java.lang.String) - */ - @Override - public OperationResult get(String entityKey, String link) { - - if (link != null) { - return cachedEntityData.get(link); - } - - return null; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.cache.EntityCache#shutdown() - */ - @Override - public void shutdown() { - // TODO Auto-generated method stub - // nothing to do - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.cache.EntityCache#clear() - */ - @Override - public void clear() { - cachedEntityData.clear(); - } - -} diff --git a/src/main/java/org/onap/aai/sparky/dal/cache/PersistentEntityCache.java b/src/main/java/org/onap/aai/sparky/dal/cache/PersistentEntityCache.java deleted file mode 100644 index f64b3c7..0000000 --- a/src/main/java/org/onap/aai/sparky/dal/cache/PersistentEntityCache.java +++ /dev/null @@ -1,256 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.dal.cache; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.Files; -import java.nio.file.LinkOption; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; - -import org.onap.aai.sparky.dal.aai.ActiveInventoryAdapter; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.task.PersistOperationResultToDisk; -import org.onap.aai.sparky.synchronizer.task.RetrieveOperationResultFromDisk; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; - -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * The Class PersistentEntityCache. - */ -public class PersistentEntityCache implements EntityCache { - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(ActiveInventoryAdapter.class); - - /* - * TODO:
  • implement time-to-live on the cache, maybe pull in one of Guava's eviction caches? - *
  • implement abstract-base-cache to hold common cach-y things (like ttl) - */ - - private static final String DEFAULT_OUTPUT_PATH = "offlineEntityCache"; - private ExecutorService persistentExecutor; - private ObjectMapper mapper; - private String storagePath; - - /** - * Instantiates a new persistent entity cache. - */ - public PersistentEntityCache() { - this(null, 10); - } - - /** - * Instantiates a new persistent entity cache. - * - * @param numWorkers the num workers - */ - public PersistentEntityCache(int numWorkers) { - this(null, numWorkers); - } - - /** - * Instantiates a new persistent entity cache. - * - * @param storageFolderOverride the storage folder override - * @param numWorkers the num workers - */ - public PersistentEntityCache(String storageFolderOverride, int numWorkers) { - persistentExecutor = NodeUtils.createNamedExecutor("PEC", numWorkers, LOG); - mapper = new ObjectMapper(); - - if (storageFolderOverride != null && storageFolderOverride.length() > 0) { - this.storagePath = storageFolderOverride; - } else { - this.storagePath = DEFAULT_OUTPUT_PATH; - } - } - - /** - * Generate offline storage path from uri. - * - * @param link the link - * @return the string - */ - private String generateOfflineStoragePathFromUri(String link) { - - try { - URI uri = new URI(link); - - String modHost = uri.getHost().replace(".", "_"); - - String[] tokens = uri.getPath().split("\\/"); - List resourcePathAndDomain = new ArrayList(); - - if (tokens.length >= 4) { - - int numElements = 0; - for (String w : tokens) { - - if (numElements > 3) { - break; - } - - if (w.length() > 0) { - resourcePathAndDomain.add(w); - numElements++; - } - - } - } else { - return this.storagePath + "\\"; - } - - return this.storagePath + "\\" + modHost + "\\" - + NodeUtils.concatArray(resourcePathAndDomain, "_") + "\\"; - - } catch (Exception exc) { - LOG.error(AaiUiMsgs.OFFLINE_STORAGE_PATH_ERROR, link, exc.getMessage()); - } - - return this.storagePath + "\\"; - - } - - /** - * Creates the dirs. - * - * @param directoryPath the directory path - */ - private void createDirs(String directoryPath) { - if (directoryPath == null) { - return; - } - - Path path = Paths.get(directoryPath); - // if directory exists? - if (!Files.exists(path)) { - try { - Files.createDirectories(path); - } catch (IOException exc) { - LOG.error(AaiUiMsgs.DISK_CREATE_DIR_IO_ERROR, exc.getMessage()); - } - } - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.cache.EntityCache#get(java.lang.String, java.lang.String) - */ - @Override - public OperationResult get(String key, String link) { - - final String storagePath = generateOfflineStoragePathFromUri(link); - createDirs(storagePath); - final String persistentFileName = storagePath + "\\" + key + ".json"; - - CompletableFuture task = supplyAsync( - new RetrieveOperationResultFromDisk(persistentFileName, mapper, LOG), persistentExecutor); - - try { - /* - * this will do a blocking get, but it will be blocking only on the thread that executed this - * method which should be one of the persistentWorker threads from the executor. - */ - return task.get(); - } catch (InterruptedException | ExecutionException exc) { - // TODO Auto-generated catch block - LOG.error(AaiUiMsgs.DISK_NAMED_DATA_READ_IO_ERROR, "txn", exc.getMessage()); - } - - return null; - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.cache.EntityCache#put(java.lang.String, - * org.onap.aai.sparky.dal.rest.OperationResult) - */ - @Override - public void put(String key, OperationResult data) { - - final String storagePath = generateOfflineStoragePathFromUri(data.getRequestLink()); - createDirs(storagePath); - final String persistentFileName = storagePath + "\\" + key + ".json"; - - Path persistentFilePath = Paths.get(persistentFileName); - - if (!Files.exists(persistentFilePath, LinkOption.NOFOLLOW_LINKS)) { - - supplyAsync(new PersistOperationResultToDisk(persistentFileName, data, mapper, LOG), - persistentExecutor).whenComplete((opResult, error) -> { - - if (error != null) { - LOG.error(AaiUiMsgs.DISK_DATA_WRITE_IO_ERROR, "entity", error.getMessage()); - } - - }); - } - - } - - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.cache.EntityCache#shutdown() - */ - @Override - public void shutdown() { - if (persistentExecutor != null) { - persistentExecutor.shutdown(); - } - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.cache.EntityCache#clear() - */ - @Override - public void clear() { - /* - * do nothing for this one, as it is not clear if we we really want to clear on the on-disk - * cache or not - */ - } - -} diff --git a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchAdapter.java b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchAdapter.java deleted file mode 100644 index 9962bcb..0000000 --- a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchAdapter.java +++ /dev/null @@ -1,213 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.dal.elasticsearch; - -import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.onap.aai.sparky.dal.rest.HttpMethod; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.rest.RestDataProvider; -import org.onap.aai.sparky.dal.rest.RestfulDataAccessor; - -/** - * The Class ElasticSearchAdapter. - * - * @author davea. - */ -public class ElasticSearchAdapter implements ElasticSearchDataProvider { - - private static final String BULK_IMPORT_INDEX_TEMPLATE = - "{\"index\":{\"_index\":\"%s\",\"_type\":\"%s\",\"_id\":\"%s\", \"_version\":\"%s\"}}\n"; - - private final RestDataProvider restDataProvider; - private final ElasticSearchConfig esConfig; - - /** - * Instantiates a new elastic search adapter. - * - * @param provider the provider - */ - public ElasticSearchAdapter(RestDataProvider provider, ElasticSearchConfig esConfig) { - this.restDataProvider = provider; - this.esConfig = esConfig; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doGet(java.lang.String, java.lang.String) - */ - @Override - public OperationResult doGet(String url, String acceptContentType) { - return restDataProvider.doGet(url, acceptContentType); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doDelete(java.lang.String, java.lang.String) - */ - @Override - public OperationResult doDelete(String url, String acceptContentType) { - return restDataProvider.doDelete(url, acceptContentType); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doPost(java.lang.String, java.lang.String, - * java.lang.String) - */ - @Override - public OperationResult doPost(String url, String jsonPayload, String acceptContentType) { - return restDataProvider.doPost(url, jsonPayload, acceptContentType); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doPut(java.lang.String, java.lang.String, - * java.lang.String) - */ - @Override - public OperationResult doPut(String url, String jsonPayload, String acceptContentType) { - return restDataProvider.doPut(url, jsonPayload, acceptContentType); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doPatch(java.lang.String, java.lang.String, - * java.lang.String) - */ - @Override - public OperationResult doPatch(String url, String jsonPayload, String acceptContentType) { - return restDataProvider.doPatch(url, jsonPayload, acceptContentType); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doHead(java.lang.String, java.lang.String) - */ - @Override - public OperationResult doHead(String url, String acceptContentType) { - return restDataProvider.doHead(url, acceptContentType); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#clearCache() - */ - @Override - public void clearCache() { - restDataProvider.clearCache(); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.elasticsearch.ElasticSearchDataProvider#doBulkOperation(java.lang. - * String, java.lang.String) - */ - @Override - public OperationResult doBulkOperation(String url, String payload) { - - return doRestfulOperation(HttpMethod.PUT, url, payload, - RestfulDataAccessor.APPLICATION_X_WWW_FORM_URL_ENCODED, - RestfulDataAccessor.APPLICATION_JSON); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.dal.elasticsearch.ElasticSearchDataProvider#shutdown() - */ - @Override - public void shutdown() { - restDataProvider.shutdown(); - } - - /* - * (non-Javadoc) - * - * @see - * org.onap.aai.sparky.dal.rest.RestDataProvider#doRestfulOperation(org.onap.aai.sparky.dal.rest. - * HttpMethod, java.lang.String, java.lang.String, java.lang.String, java.lang.String) - */ - @Override - public OperationResult doRestfulOperation(HttpMethod method, String url, String payload, - String payloadType, String acceptContentType) { - return restDataProvider.doRestfulOperation(method, url, payload, payloadType, - acceptContentType); - } - - /* - * (non-Javadoc) - * - * @see - * org.onap.aai.sparky.dal.elasticsearch.ElasticSearchDataProvider#buildBulkImportOperationRequest - * (java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String) - */ - @Override - public String buildBulkImportOperationRequest(String index, String type, String id, - String version, String payload) { - - StringBuilder requestPayload = new StringBuilder(128); - - requestPayload.append(String.format(BULK_IMPORT_INDEX_TEMPLATE, index, type, id, version)); - requestPayload.append(payload).append("\n"); - - return requestPayload.toString(); - - } - - @Override - public OperationResult retrieveEntityById(String entityId) throws Exception { - - String url = esConfig.getElasticFullUrl("/" + entityId); - return doGet(url, "application/json"); - } - - /** - * @return the bulkImportIndexTemplate - */ - public static String getBulkImportIndexTemplate() { - return BULK_IMPORT_INDEX_TEMPLATE; - } - - /** - * @return the restDataProvider - */ - public RestDataProvider getRestDataProvider() { - return restDataProvider; - } - - /** - * @return the esConfig - */ - public ElasticSearchConfig getEsConfig() { - return esConfig; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchDataProvider.java b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchDataProvider.java index 416e251..90075fe 100644 --- a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchDataProvider.java +++ b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchDataProvider.java @@ -25,6 +25,10 @@ package org.onap.aai.sparky.dal.elasticsearch; import org.onap.aai.sparky.dal.rest.OperationResult; import org.onap.aai.sparky.dal.rest.RestDataProvider; +/* + * TODO: DELETE ME + */ + /** * The Interface ElasticSearchDataProvider. */ @@ -57,7 +61,7 @@ public interface ElasticSearchDataProvider extends RestDataProvider { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#shutdown() + * @see org.openecomp.sparky.dal.rest.RestDataProvider#shutdown() */ @Override void shutdown(); diff --git a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java index 50d318b..ba012bd 100644 --- a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java +++ b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java @@ -29,11 +29,10 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicInteger; -import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; -import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.restclient.client.OperationResult; import org.onap.aai.sparky.dal.NetworkTransaction; import org.onap.aai.sparky.dal.rest.HttpMethod; -import org.onap.aai.sparky.dal.rest.OperationResult; + /** * The Class ElasticSearchEntityStatistics. @@ -48,7 +47,6 @@ public class ElasticSearchEntityStatistics { private static final String ERROR = "ERROR"; private Map> entityStatistics; - private OxmModelLoader loader; /** * Creates the entity op stats. @@ -70,19 +68,6 @@ public class ElasticSearchEntityStatistics { } - /* - * private void createActiveInventoryEntityStatistics() { - * - * Map descriptors = loader.getSearchableEntityDescriptors(); - * - * if(descriptors == null) { return; } - * - * OxmEntityDescriptor d = null; for ( String key : descriptors.keySet() ) { d = - * descriptors.get(key); entityStatistics.put(d.getEntityName(), createEntityOpStats()); } - * - * } - */ - /** * Initializecreate active inventory entity statistics. */ @@ -109,10 +94,8 @@ public class ElasticSearchEntityStatistics { * * @param loader the loader */ - public ElasticSearchEntityStatistics(OxmModelLoader loader) { - this.loader = loader; + public ElasticSearchEntityStatistics() { entityStatistics = new HashMap>(); - // createActiveInventoryEntityStatistics(); reset(); } @@ -121,18 +104,25 @@ public class ElasticSearchEntityStatistics { * * @param descriptors the descriptors */ - public void initializeCountersFromOxmEntityDescriptors( - Map descriptors) { + public void intializeEntityCounters(String... entityTypes) { + + if (entityTypes != null && entityTypes.length > 0) { + for (String entityType : entityTypes) { + entityStatistics.put(entityType, createEntityOpStats()); + } - if (descriptors == null) { - return; } - OxmEntityDescriptor descriptor = null; - for (String key : descriptors.keySet()) { - descriptor = descriptors.get(key); - entityStatistics.put(descriptor.getEntityName(), createEntityOpStats()); + } + + public void intializeEntityCounters(Set entityTypes) { + + if (entityTypes != null && entityTypes.size() > 0) { + for (String entityType : entityTypes) { + entityStatistics.put(entityType, createEntityOpStats()); + } } + } /** diff --git a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/HashQueryResponse.java b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/HashQueryResponse.java index 646916b..8abf20f 100644 --- a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/HashQueryResponse.java +++ b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/HashQueryResponse.java @@ -22,8 +22,7 @@ */ package org.onap.aai.sparky.dal.elasticsearch; -import org.json.JSONObject; -import org.onap.aai.sparky.dal.rest.OperationResult; +import org.onap.aai.restclient.client.OperationResult; public class HashQueryResponse { private String jsonPayload = null; diff --git a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/SearchAdapter.java b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/SearchAdapter.java index 200f405..c4e81b7 100644 --- a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/SearchAdapter.java +++ b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/SearchAdapter.java @@ -29,20 +29,17 @@ import java.util.Map; import javax.ws.rs.core.MediaType; -import org.onap.aai.sparky.dal.rest.OperationResult; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.Headers; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.client.RestClient; import org.onap.aai.sparky.dal.sas.config.SearchServiceConfig; import org.onap.aai.sparky.util.Encryptor; import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; import org.slf4j.MDC; -import org.onap.aai.restclient.client.RestClient; -import org.onap.aai.restclient.enums.RestAuthenticationMode; -import org.onap.aai.restclient.client.Headers; -import org.onap.aai.cl.mdc.MdcContext; - -import org.onap.aai.cl.mdc.MdcContext; /** * The Class SearchAdapter. @@ -53,41 +50,6 @@ public class SearchAdapter { private RestClient client; - /** - * @return the client - */ - public RestClient getClient() { - return client; - } - - /** - * @param client the client to set - */ - public void setClient(RestClient client) { - this.client = client; - } - - /** - * @return the commonHeaders - */ - public Map> getCommonHeaders() { - return commonHeaders; - } - - /** - * @param commonHeaders the commonHeaders to set - */ - public void setCommonHeaders(Map> commonHeaders) { - this.commonHeaders = commonHeaders; - } - - /** - * @return the log - */ - public static Logger getLog() { - return LOG; - } - private Map> commonHeaders; private SearchServiceConfig sasConfig; @@ -99,8 +61,8 @@ public class SearchAdapter { public SearchAdapter() throws Exception { sasConfig = SearchServiceConfig.getConfig(); Encryptor encryptor = new Encryptor(); - client = new RestClient().authenticationMode(RestAuthenticationMode.SSL_CERT) - .validateServerHostname(false).validateServerCertChain(false) + + client = new RestClient().validateServerHostname(false).validateServerCertChain(false) .clientCertFile(TierSupportUiConstants.CONFIG_AUTH_LOCATION + sasConfig.getCertName()) .clientCertPassword(encryptor.decryptValue(sasConfig.getKeystorePassword())) .trustStore(TierSupportUiConstants.CONFIG_AUTH_LOCATION + sasConfig.getKeystore()); @@ -119,27 +81,25 @@ public class SearchAdapter { } public OperationResult doPost(String url, String jsonPayload, String acceptContentType) { - org.onap.aai.restclient.client.OperationResult or = client.post(url, jsonPayload, - getTxnHeader(), MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE); + OperationResult or = client.post(url, jsonPayload, getTxnHeader(), + MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE); return new OperationResult(or.getResultCode(), or.getResult()); } public OperationResult doGet(String url, String acceptContentType) { - org.onap.aai.restclient.client.OperationResult or = - client.get(url, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE); + OperationResult or = client.get(url, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE); return new OperationResult(or.getResultCode(), or.getResult()); } public OperationResult doPut(String url, String payload, String acceptContentType) { - org.onap.aai.restclient.client.OperationResult or = client.put(url, payload, getTxnHeader(), - MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE); + OperationResult or = client.put(url, payload, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE, + MediaType.APPLICATION_JSON_TYPE); return new OperationResult(or.getResultCode(), or.getResult()); } public OperationResult doDelete(String url, String acceptContentType) { - org.onap.aai.restclient.client.OperationResult or = - client.delete(url, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE); + OperationResult or = client.delete(url, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE); return new OperationResult(or.getResultCode(), or.getResult()); } diff --git a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/config/ElasticSearchConfig.java b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/config/ElasticSearchConfig.java index 68e4151..c3c27f8 100644 --- a/src/main/java/org/onap/aai/sparky/dal/elasticsearch/config/ElasticSearchConfig.java +++ b/src/main/java/org/onap/aai/sparky/dal/elasticsearch/config/ElasticSearchConfig.java @@ -22,21 +22,11 @@ */ package org.onap.aai.sparky.dal.elasticsearch.config; -import java.io.BufferedReader; -import java.io.FileReader; -import java.io.IOException; import java.util.Properties; -import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException; -import org.onap.aai.sparky.synchronizer.config.TaskProcessorConfig; import org.onap.aai.sparky.util.ConfigHelper; import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - /** * The Class ElasticSearchConfig. @@ -64,9 +54,7 @@ public class ElasticSearchConfig { private String settingsFileName; - private int syncAdapterMaxConcurrentWorkers; - - private String auditIndexName; + private String topographicalSearchIndex; private String entityCountHistoryIndex; @@ -112,16 +100,6 @@ public class ElasticSearchConfig { private static final String BULK_API = "_bulk"; - private TaskProcessorConfig processorConfig; - - public TaskProcessorConfig getProcessorConfig() { - return processorConfig; - } - - public void setProcessorConfig(TaskProcessorConfig processorConfig) { - this.processorConfig = processorConfig; - } - public static ElasticSearchConfig getConfig() throws Exception { if (instance == null) { @@ -174,6 +152,10 @@ public class ElasticSearchConfig { private void initializeProperties() { Properties props = ConfigHelper.loadConfigFromExplicitPath(CONFIG_FILE); + if (props == null || props.isEmpty()) { + return; + } + ipAddress = props.getProperty("elasticsearch.ipAddress", IP_ADDRESS_DEFAULT); httpPort = props.getProperty("elasticsearch.httpPort", "" + HTTP_PORT_DEFAULT); javaApiPort = props.getProperty("elasticsearch.javaApiPort", "" + JAVA_API_PORT_DEFAULT); @@ -182,9 +164,11 @@ public class ElasticSearchConfig { indexName = props.getProperty("elasticsearch.indexName", INDEX_NAME_DEFAULT); mappingsFileName = props.getProperty("elasticsearch.mappingsFileName"); settingsFileName = props.getProperty("elasticsearch.settingsFileName"); - auditIndexName = props.getProperty("elasticsearch.auditIndexName", AUDIT_INDEX_NAME_DEFAULT); + topographicalSearchIndex = + props.getProperty("elasticsearch.topographicalIndexName", TOPOGRAPHICAL_INDEX_NAME_DEFAULT); entityCountHistoryIndex = props.getProperty("elasticsearch.entityCountHistoryIndexName", ENTITY_COUNT_HISTORY_INDEX_NAME_DEFAULT); + entityCountHistoryMappingsFileName = props.getProperty("elasticsearch.entityCountHistoryMappingsFileName"); @@ -197,13 +181,6 @@ public class ElasticSearchConfig { dynamicMappingsFileName = props.getProperty("elasticsearch.dynamicMappingsFileName", ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT); - syncAdapterMaxConcurrentWorkers = - Integer.parseInt(props.getProperty("elasticsearch.syncAdapter.maxConcurrentWorkers", "5")); - - processorConfig = new TaskProcessorConfig(); - processorConfig.initializeFromProperties( - ConfigHelper.getConfigWithPrefix("elasticsearch.taskProcessor", props)); - } public String getIpAddress() { @@ -266,24 +243,16 @@ public class ElasticSearchConfig { return settingsFileName; } - public int getSyncAdapterMaxConcurrentWorkers() { - return syncAdapterMaxConcurrentWorkers; - } - - public void setSyncAdapterMaxConcurrentWorkers(int syncAdapterMaxConcurrentWorkers) { - this.syncAdapterMaxConcurrentWorkers = syncAdapterMaxConcurrentWorkers; - } - public void setSettingsFileName(String settingsFileName) { this.settingsFileName = settingsFileName; } - public String getAuditIndexName() { - return auditIndexName; + public String getTopographicalSearchIndex() { + return topographicalSearchIndex; } - public void setAuditIndexName(String auditIndexName) { - this.auditIndexName = auditIndexName; + public void setTopographicalSearchIndex(String topographicalSearchIndex) { + this.topographicalSearchIndex = topographicalSearchIndex; } public String getEntityCountHistoryIndex() { @@ -309,55 +278,6 @@ public class ElasticSearchConfig { return String.format("http://%s:%s/%s", url, port, BULK_API); } - public String getConfigAsString(String configItem, String configFileName) - throws ElasticSearchOperationException { - String indexConfig = null; - - try { - indexConfig = ConfigHelper.getFileContents(configFileName); - } catch (IOException exc) { - throw new ElasticSearchOperationException( - "Failed to read index " + configItem + " from file = " + configFileName + ".", exc); - } - - if (indexConfig == null) { - throw new ElasticSearchOperationException( - "Failed to load index " + configItem + " with filename = " + configFileName + "."); - } - return indexConfig; - } - - public String getElasticSearchSettings() throws ElasticSearchOperationException { - return getConfigAsString("settings", - TierSupportUiConstants.getConfigPath(this.getSettingsFileName())); - } - - public String getDynamicMappings() throws ElasticSearchOperationException { - return getConfigAsString("mapping", - TierSupportUiConstants.getConfigPath(this.getDynamicMappingsFileName())); - } - - public String getElasticSearchMappings() throws ElasticSearchOperationException { - return getConfigAsString("mapping", - TierSupportUiConstants.getConfigPath(this.getMappingsFileName())); - } - - public String getElasticSearchEntityCountHistoryMappings() - throws ElasticSearchOperationException { - return getConfigAsString("mapping", - TierSupportUiConstants.getConfigPath(this.getEntityCountHistoryMappingsFileName())); - } - - public String getAutosuggestIndexSettings() throws ElasticSearchOperationException { - return getConfigAsString("setting", - TierSupportUiConstants.getConfigPath(this.getAutoSuggestSettingsFileName())); - } - - public String getAutosuggestIndexMappings() throws ElasticSearchOperationException { - return getConfigAsString("mapping", - TierSupportUiConstants.getConfigPath(this.getAutoSuggestMappingsFileName())); - } - public String getAutosuggestIndexname() { return autosuggestIndexname; } @@ -382,266 +302,4 @@ public class ElasticSearchConfig { this.autoSuggestMappingsFileName = autoSuggestMappingsFileName; } - public String getDynamicMappingsFileName() { - return dynamicMappingsFileName; - } - - public void setDynamicMappingsFileName(String dynamicMappingsFileName) { - this.dynamicMappingsFileName = dynamicMappingsFileName; - } - - /** - * Builds the elastic search table config. - * - * @return the string - * @throws ElasticSearchOperationException the elastic search operation exception - */ - public String buildElasticSearchTableConfig() throws ElasticSearchOperationException { - - JsonNode esSettingsNode; - JsonNode esMappingsNodes; - ObjectMapper mapper = new ObjectMapper(); - - try { - esSettingsNode = mapper.readTree(getElasticSearchSettings()); - esMappingsNodes = mapper.readTree(getElasticSearchMappings()); - } catch (IOException e1) { - throw new ElasticSearchOperationException("Caught an exception building initial ES index"); - } - - ObjectNode esConfig = (ObjectNode) mapper.createObjectNode().set("settings", esSettingsNode); - ObjectNode mappings = (ObjectNode) mapper.createObjectNode().set(getType(), esMappingsNodes); - - esConfig.set("mappings", mappings); - - try { - return mapper.writeValueAsString(esConfig); - } catch (JsonProcessingException exc) { - throw new ElasticSearchOperationException("Error getting object node as string", exc); - } - - } - - /** - * Builds the elastic search entity count history table config. - * - * @return the string - * @throws ElasticSearchOperationException the elastic search operation exception - */ - public String buildElasticSearchEntityCountHistoryTableConfig() - throws ElasticSearchOperationException { - - JsonNode esSettingsNode; - JsonNode esMappingsNodes; - ObjectMapper mapper = new ObjectMapper(); - - try { - esSettingsNode = mapper.readTree(getElasticSearchSettings()); - esMappingsNodes = mapper.readTree(getElasticSearchEntityCountHistoryMappings()); - } catch (IOException e1) { - throw new ElasticSearchOperationException("Caught an exception building initial ES index"); - } - - ObjectNode esConfig = (ObjectNode) mapper.createObjectNode().set("settings", esSettingsNode); - ObjectNode mappings = (ObjectNode) mapper.createObjectNode().set(getType(), esMappingsNodes); - - esConfig.set("mappings", mappings); - - try { - return mapper.writeValueAsString(esConfig); - } catch (JsonProcessingException exc) { - throw new ElasticSearchOperationException("Error getting object node as string", exc); - } - - } - - public String buildAggregationTableConfig() throws ElasticSearchOperationException { - - JsonNode esMappingsNodes; - ObjectMapper mapper = new ObjectMapper(); - - try { - esMappingsNodes = mapper.readTree(this.getDynamicMappings()); - } catch (IOException e1) { - throw new ElasticSearchOperationException( - "Caught an exception building Aggreagation ES index"); - } - - ObjectNode mappings = (ObjectNode) mapper.createObjectNode().set(getType(), esMappingsNodes); - - ObjectNode indexConfig = (ObjectNode) mapper.createObjectNode().set("mappings", mappings); - - try { - return mapper.writeValueAsString(indexConfig); - } catch (JsonProcessingException exc) { - throw new ElasticSearchOperationException("Error getting object node as string", exc); - } - - } - - public String buildAutosuggestionTableConfig() throws ElasticSearchOperationException { - - JsonNode esSettingsNode; - JsonNode esMappingsNodes; - ObjectMapper mapper = new ObjectMapper(); - - try { - esSettingsNode = mapper.readTree(this.getAutosuggestIndexSettings()); - esMappingsNodes = mapper.readTree(this.getAutosuggestIndexMappings()); - } catch (IOException e1) { - throw new ElasticSearchOperationException( - "Caught an exception building Autosuggestion ES index"); - } - - ObjectNode indexConfig = (ObjectNode) mapper.createObjectNode().set("settings", esSettingsNode); - ObjectNode mappings = (ObjectNode) mapper.createObjectNode().set(getType(), esMappingsNodes); - - indexConfig.set("mappings", mappings); - - try { - return mapper.writeValueAsString(indexConfig); - } catch (JsonProcessingException exc) { - throw new ElasticSearchOperationException("Error getting object node as string", exc); - } - - } - - /** - * @return the instance - */ - public static ElasticSearchConfig getInstance() { - return instance; - } - - /** - * @param instance the instance to set - */ - public static void setInstance(ElasticSearchConfig instance) { - ElasticSearchConfig.instance = instance; - } - - /** - * @return the configFile - */ - public static String getConfigFile() { - return CONFIG_FILE; - } - - /** - * @return the ipAddressDefault - */ - public static String getIpAddressDefault() { - return IP_ADDRESS_DEFAULT; - } - - /** - * @return the httpPortDefault - */ - public static String getHttpPortDefault() { - return HTTP_PORT_DEFAULT; - } - - /** - * @return the javaApiPortDefault - */ - public static String getJavaApiPortDefault() { - return JAVA_API_PORT_DEFAULT; - } - - /** - * @return the typeDefault - */ - public static String getTypeDefault() { - return TYPE_DEFAULT; - } - - /** - * @return the clusterNameDefault - */ - public static String getClusterNameDefault() { - return CLUSTER_NAME_DEFAULT; - } - - /** - * @return the indexNameDefault - */ - public static String getIndexNameDefault() { - return INDEX_NAME_DEFAULT; - } - - /** - * @return the auditIndexNameDefault - */ - public static String getAuditIndexNameDefault() { - return AUDIT_INDEX_NAME_DEFAULT; - } - - /** - * @return the topographicalIndexNameDefault - */ - public static String getTopographicalIndexNameDefault() { - return TOPOGRAPHICAL_INDEX_NAME_DEFAULT; - } - - /** - * @return the entityCountHistoryIndexNameDefault - */ - public static String getEntityCountHistoryIndexNameDefault() { - return ENTITY_COUNT_HISTORY_INDEX_NAME_DEFAULT; - } - - /** - * @return the entityAutoSuggestIndexNameDefault - */ - public static String getEntityAutoSuggestIndexNameDefault() { - return ENTITY_AUTO_SUGGEST_INDEX_NAME_DEFAULT; - } - - /** - * @return the entityAutoSuggestSettingsFileDefault - */ - public static String getEntityAutoSuggestSettingsFileDefault() { - return ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT; - } - - /** - * @return the entityAutoSuggestMappingsFileDefault - */ - public static String getEntityAutoSuggestMappingsFileDefault() { - return ENTITY_AUTO_SUGGEST_MAPPINGS_FILE_DEFAULT; - } - - /** - * @return the entityDynamicMappingsFileDefault - */ - public static String getEntityDynamicMappingsFileDefault() { - return ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT; - } - - /** - * @return the bulkApi - */ - public static String getBulkApi() { - return BULK_API; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "ElasticSearchConfig [ipAddress=" + ipAddress + ", httpPort=" + httpPort - + ", javaApiPort=" + javaApiPort + ", indexName=" + indexName + ", type=" + type - + ", clusterName=" + clusterName + ", mappingsFileName=" + mappingsFileName - + ", settingsFileName=" + settingsFileName + ", syncAdapterMaxConcurrentWorkers=" - + syncAdapterMaxConcurrentWorkers + ", auditIndexName=" + auditIndexName - + ", entityCountHistoryIndex=" + entityCountHistoryIndex + ", autosuggestIndexname=" - + autosuggestIndexname + ", entityCountHistoryMappingsFileName=" - + entityCountHistoryMappingsFileName + ", autoSuggestSettingsFileName=" - + autoSuggestSettingsFileName + ", autoSuggestMappingsFileName=" - + autoSuggestMappingsFileName + ", dynamicMappingsFileName=" + dynamicMappingsFileName - + ", processorConfig=" + processorConfig + "]"; - } } diff --git a/src/main/java/org/onap/aai/sparky/dal/proxy/config/DataRouterConfig.java b/src/main/java/org/onap/aai/sparky/dal/proxy/config/DataRouterConfig.java new file mode 100644 index 0000000..df2ae13 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/dal/proxy/config/DataRouterConfig.java @@ -0,0 +1,132 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.proxy.config; + +import java.util.Properties; + +import org.onap.aai.sparky.util.ConfigHelper; +import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; + +public class DataRouterConfig { + private String host; + private String port; + private String drUriSuffix; + private String certName; + private String keystorePassword; + private String keystore; + private int connectTimeout; + private int readTimeout; + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public String getPort() { + return port; + } + + public void setPort(String port) { + this.port = port; + } + + public String getCertName() { + return certName; + } + + public void setCertName(String certName) { + this.certName = certName; + } + + public String getKeystorePassword() { + return keystorePassword; + } + + public void setKeystorePassword(String keystorePassword) { + this.keystorePassword = keystorePassword; + } + + public String getKeystore() { + return keystore; + } + + public void setKeystore(String keystore) { + this.keystore = keystore; + } + + public int getConnectTimeout() { + return connectTimeout; + } + + public void setConnectTimeout(int connectTimeout) { + this.connectTimeout = connectTimeout; + } + + public int getReadTimeout() { + return readTimeout; + } + + public void setReadTimeout(int readTimeout) { + this.readTimeout = readTimeout; + } + + public String getDrUriSuffix() { + return drUriSuffix; + } + + public void setDrUriSuffix(String drUriSuffix) { + this.drUriSuffix = drUriSuffix; + } + + public DataRouterConfig(Properties props) { + + if (props == null) { + return; + } + + Properties restProps = ConfigHelper.getConfigWithPrefix("data-router.rest", props); + host = restProps.getProperty(TierSupportUiConstants.IP_ADDRESS, "localhost"); + port = restProps.getProperty(TierSupportUiConstants.PORT, "9502"); + drUriSuffix = restProps.getProperty(TierSupportUiConstants.DR_URI_SUFFIX, "ui-request"); + connectTimeout = + Integer.parseInt(restProps.getProperty(TierSupportUiConstants.DR_CONNECT_TIMEOUT, "5000")); + readTimeout = + Integer.parseInt(restProps.getProperty(TierSupportUiConstants.DR_READ_TIMEOUT, "1000")); + + Properties sslProps = ConfigHelper.getConfigWithPrefix("data-router.ssl", props); + certName = sslProps.getProperty(TierSupportUiConstants.DR_CERT_NAME, "aai-client-cert.p12"); + keystorePassword = sslProps.getProperty(TierSupportUiConstants.DR_KEYSTORE_PASSWORD, ""); + keystore = sslProps.getProperty(TierSupportUiConstants.DR_KEYSTORE, "tomcat_keystore"); + } + + @Override + public String toString() { + return "DataRouterConfig [host=" + host + ", port=" + port + ", drUriSuffix=" + drUriSuffix + + ", certName=" + certName + ", keystorePassword=" + keystorePassword + ", keystore=" + + keystore + ", connectTimeout=" + connectTimeout + ", readTimeout=" + readTimeout + "]"; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java b/src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java new file mode 100644 index 0000000..444a34b --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java @@ -0,0 +1,227 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.proxy.processor; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import javax.json.Json; +import javax.json.JsonObjectBuilder; +import javax.servlet.http.HttpServletRequest; + +import org.apache.camel.Exchange; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.client.RestClient; +import org.onap.aai.restclient.rest.HttpUtil; +import org.onap.aai.sparky.dal.proxy.config.DataRouterConfig; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.ConfigHelper; +import org.onap.aai.sparky.util.Encryptor; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; +import org.slf4j.MDC; + +/** + * The Class AaiUiProxyProcessor. + */ +public class AaiUiProxyProcessor { + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AaiUiProxyProcessor.class); + private static Logger auditLogger = + LoggerFactory.getInstance().getAuditLogger(AaiUiProxyProcessor.class.getName()); + public String configFile = + TierSupportUiConstants.DYNAMIC_CONFIG_APP_LOCATION + "data-router.properties"; + + private RestClient client; + private DataRouterConfig config; + private String drBaseUrl; + private OperationResult operationResult = null; + + private String xTransactionId; + private String xFromAppId; + + private static final String ROUTER_SERVICE = "routerService"; + + public String getDrBaseUrl() { + return drBaseUrl; + } + + public void setDrBaseUrl(String drBaseUrl) { + this.drBaseUrl = drBaseUrl; + } + + /** + * Instantiates a new AaiUiProxyProcessor. + */ + + public AaiUiProxyProcessor() { + Properties props = ConfigHelper.loadConfigFromExplicitPath(configFile); + config = new DataRouterConfig(props); + initializeProxyProcessor(config); + } + + public AaiUiProxyProcessor(DataRouterConfig config) { + initializeProxyProcessor(config); + } + + private void initializeProxyProcessor(DataRouterConfig config) { + Encryptor encryptor = new Encryptor(); + client = new RestClient().validateServerHostname(false).validateServerCertChain(false) + .clientCertFile(TierSupportUiConstants.CONFIG_AUTH_LOCATION + config.getCertName()) + .clientCertPassword(encryptor.decryptValue(config.getKeystorePassword())) + .trustStore(TierSupportUiConstants.CONFIG_AUTH_LOCATION + config.getKeystore()) + .connectTimeoutMs(config.getConnectTimeout()).readTimeoutMs(config.getReadTimeout()); + + drBaseUrl = + "https://" + config.getHost() + ":" + config.getPort() + "/" + config.getDrUriSuffix(); + } + + void setUpMdcContext(final Exchange exchange, final HttpServletRequest request) { + + Object xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + if (xTransactionId == null) { + this.xTransactionId = NodeUtils.getRandomTxnId(); + } else { + this.xTransactionId = (String) xTransactionId; + } + + Object partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + xFromAppId = "Browser"; + } else { + xFromAppId = (String) partnerName; + } + + MdcContext.initialize((String) xTransactionId, "AAI-UI", "", xFromAppId, + request.getRequestURI() + ":" + request.getLocalPort()); + } + + private Map> getHeaders() { + Map> headers = new HashMap<>(); + headers.put("X-FromAppId", Arrays.asList(TierSupportUiConstants.APP_NAME)); + headers.put("X-TransactionId", Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID))); + headers.put("X-FromAppId", Arrays.asList(MDC.get(MdcContext.MDC_PARTNER_NAME))); + return headers; + } + + private String getProxyPayloadAsString(final Exchange exchange) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + String srcUri = ""; + try { + srcUri = (String) exchange.getIn().getHeader(Exchange.HTTP_URI); + jsonBuilder.add("origin-uri", srcUri); + + String body = exchange.getIn().getBody(String.class); + + if (body != null && body.length() != 0) { + jsonBuilder.add("origin-payload", body); + } + + } catch (Exception e) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Failed to extract payload for proxying.\n" + "Requestor URL: " + srcUri); + } + + return jsonBuilder.build().toString(); + } + + private String getDrUrl(String requestUri) { + String url = ""; + int pos = requestUri.indexOf(ROUTER_SERVICE); + if (pos != -1) { + url = drBaseUrl + requestUri.substring(pos + ROUTER_SERVICE.length()); + } else { + LOG.error(AaiUiMsgs.DR_REQUEST_URI_FOR_PROXY_UNKNOWN, requestUri); + } + return url; + } + + public void proxyMessage(Exchange exchange) { + HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class); + + setUpMdcContext(exchange, request); + + try { + Map> headers = getHeaders(); + String proxyPayload = getProxyPayloadAsString(exchange); + String fromUrl = (String) exchange.getIn().getHeader(Exchange.HTTP_URI); + String toUrl = getDrUrl(fromUrl); + auditLogger.info(AaiUiMsgs.DR_PROXY_FROM_TO, fromUrl, toUrl); + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Proxying request:\n" + proxyPayload + "\n" + "Target URL:\n" + toUrl); + + long startTimeInMs = System.currentTimeMillis(); + + operationResult = client.post(toUrl, proxyPayload, headers, + javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE, + javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE); + + long drOpTime = (System.currentTimeMillis() - startTimeInMs); + int rc = operationResult.getResultCode(); + String result = ""; + + if (HttpUtil.isHttpResponseClassSuccess(rc)) { + result = operationResult.getResult(); + } else { + result = operationResult.getFailureCause(); + LOG.info(AaiUiMsgs.DR_PROCESSING_FAILURE, String.valueOf(rc), proxyPayload); + } + + auditLogger.info(AaiUiMsgs.DR_PROCESSING_TIME, String.valueOf(drOpTime)); + + exchange.getOut().setHeader("X-TransactionId", xTransactionId); + exchange.getOut().setHeader("X-FromAppId", xFromAppId); + exchange.getOut().setHeader("RequestUrl", request.getRequestURI()); + exchange.getOut().setHeader("RequestPort", request.getLocalPort()); + exchange.getOut().setBody(result); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_PROCESSING_REQUEST, exc); + } + } + + public RestClient getClient() { + return client; + } + + public void setClient(RestClient client) { + this.client = client; + } + + public DataRouterConfig getConfig() { + return config; + } + + public void setConfig(DataRouterConfig config) { + this.config = config; + } + + protected OperationResult getOperationResult() { + return operationResult; + } +} diff --git a/src/main/java/org/onap/aai/sparky/dal/rest/RestClientBuilder.java b/src/main/java/org/onap/aai/sparky/dal/rest/RestClientBuilder.java index 77f04e0..5977a03 100644 --- a/src/main/java/org/onap/aai/sparky/dal/rest/RestClientBuilder.java +++ b/src/main/java/org/onap/aai/sparky/dal/rest/RestClientBuilder.java @@ -22,11 +22,6 @@ */ package org.onap.aai.sparky.dal.rest; -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.config.ClientConfig; -import com.sun.jersey.api.client.config.DefaultClientConfig; -import com.sun.jersey.client.urlconnection.HTTPSProperties; - import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSession; @@ -34,6 +29,11 @@ import javax.net.ssl.SSLSession; import org.onap.aai.sparky.security.SecurityContextFactory; import org.onap.aai.sparky.security.SecurityContextFactoryImpl; +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.config.ClientConfig; +import com.sun.jersey.api.client.config.DefaultClientConfig; +import com.sun.jersey.client.urlconnection.HTTPSProperties; + /** * This is a generic REST Client builder with flexible security validation. Sometimes it's nice to * be able to disable server chain cert validation and hostname validation to work-around lab diff --git a/src/main/java/org/onap/aai/sparky/dal/rest/RestfulDataAccessor.java b/src/main/java/org/onap/aai/sparky/dal/rest/RestfulDataAccessor.java index c229de1..9f07aff 100644 --- a/src/main/java/org/onap/aai/sparky/dal/rest/RestfulDataAccessor.java +++ b/src/main/java/org/onap/aai/sparky/dal/rest/RestfulDataAccessor.java @@ -24,11 +24,9 @@ package org.onap.aai.sparky.dal.rest; import java.security.SecureRandom; -import org.onap.aai.sparky.dal.cache.EntityCache; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.util.NodeUtils; import org.onap.aai.cl.api.Logger; import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; @@ -37,6 +35,9 @@ import com.sun.jersey.api.client.WebResource.Builder; /** * The Class RestfulDataAccessor. + * + * TODO: DELETE ME + * */ public class RestfulDataAccessor implements RestDataProvider { @@ -44,8 +45,6 @@ public class RestfulDataAccessor implements RestDataProvider { protected RestClientBuilder clientBuilder; - protected EntityCache entityCache; - private boolean cacheEnabled; private static final Logger LOG = LoggerFactory.getInstance().getLogger(RestfulDataAccessor.class); @@ -66,37 +65,6 @@ public class RestfulDataAccessor implements RestDataProvider { this.clientBuilder = clientBuilder; txnIdGenerator = new SecureRandom(); resourceNotFoundErrorsSurpressed = false; - cacheEnabled = false; - entityCache = null; - } - - protected boolean isCacheEnabled() { - return cacheEnabled; - } - - public void setCacheEnabled(boolean cacheEnabled) { - this.cacheEnabled = cacheEnabled; - } - - protected EntityCache getEntityCache() { - return entityCache; - } - - public void setEntityCache(EntityCache entityCache) { - this.entityCache = entityCache; - } - - /** - * Cache result. - * - * @param result the result - */ - private void cacheResult(OperationResult result) { - if (cacheEnabled && entityCache != null) { - final String id = - NodeUtils.generateUniqueShaDigest(result.getRequestLink(), result.getRequestPayload()); - entityCache.put(id, result); - } } /** @@ -119,27 +87,12 @@ public class RestfulDataAccessor implements RestDataProvider { } - /** - * Gets the cached data. - * - * @param link the link - * @param payload the payload - * @return the cached data - */ - private OperationResult getCachedData(String link, String payload) { - if (cacheEnabled && entityCache != null) { - final String id = NodeUtils.generateUniqueShaDigest(link, payload); - return entityCache.get(id, link); - } - return null; - } - /* * (non-Javadoc) * * @see - * org.onap.aai.sparky.dal.rest.RestDataProvider#doRestfulOperation(org.onap.aai.sparky.dal.rest. - * HttpMethod, java.lang.String, java.lang.String, java.lang.String, java.lang.String) + * org.openecomp.sparky.dal.rest.RestDataProvider#doRestfulOperation(org.openecomp.sparky.dal.rest + * .HttpMethod, java.lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override public OperationResult doRestfulOperation(HttpMethod method, String url, String payload, @@ -151,31 +104,11 @@ public class RestfulDataAccessor implements RestDataProvider { Client client = null; Builder builder = null; - OperationResult operationResult = null; - - /* - * Attempt to get cached data for the requested URL. We don't currently cache the other - * operations. - */ - - operationResult = getCachedData(url, payload); - - if (operationResult != null) { - - /* - * cache-hit, return what we found - */ - - // System.out.println("operationResult = " + operationResult.getResultCode()); - // System.out.println("opresult = " + operationResult.getResult()); - return operationResult; - } - /* * else cache miss / cache disabled (default operation) */ - operationResult = new OperationResult(); + OperationResult operationResult = new OperationResult(); operationResult.setRequestLink(url); try { @@ -245,8 +178,6 @@ public class RestfulDataAccessor implements RestDataProvider { String.valueOf(operationResult.getResultCode())); } - cacheResult(operationResult); - return operationResult; } @@ -262,7 +193,7 @@ public class RestfulDataAccessor implements RestDataProvider { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doGet(java.lang.String, java.lang.String) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doGet(java.lang.String, java.lang.String) */ @Override public OperationResult doGet(String url, String acceptContentType) { @@ -272,7 +203,8 @@ public class RestfulDataAccessor implements RestDataProvider { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doDelete(java.lang.String, java.lang.String) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doDelete(java.lang.String, + * java.lang.String) */ @Override public OperationResult doDelete(String url, String acceptContentType) { @@ -282,7 +214,7 @@ public class RestfulDataAccessor implements RestDataProvider { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doPost(java.lang.String, java.lang.String, + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doPost(java.lang.String, java.lang.String, * java.lang.String) */ @Override @@ -294,7 +226,7 @@ public class RestfulDataAccessor implements RestDataProvider { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doPut(java.lang.String, java.lang.String, + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doPut(java.lang.String, java.lang.String, * java.lang.String) */ @Override @@ -306,7 +238,7 @@ public class RestfulDataAccessor implements RestDataProvider { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doPatch(java.lang.String, java.lang.String, + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doPatch(java.lang.String, java.lang.String, * java.lang.String) */ @Override @@ -318,7 +250,7 @@ public class RestfulDataAccessor implements RestDataProvider { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#doHead(java.lang.String, java.lang.String) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doHead(java.lang.String, java.lang.String) */ @Override public OperationResult doHead(String url, String acceptContentType) { @@ -350,27 +282,20 @@ public class RestfulDataAccessor implements RestDataProvider { /* * (non-Javadoc) * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#shutdown() + * @see org.openecomp.sparky.dal.rest.RestDataProvider#shutdown() */ @Override public void shutdown() { - if (entityCache != null) { - entityCache.shutdown(); - } - } /* * (non-Javadoc) * - * @see org.onap.aai.sparky.dal.rest.RestDataProvider#clearCache() + * @see org.openecomp.sparky.dal.rest.RestDataProvider#clearCache() */ @Override public void clearCache() { - if (cacheEnabled) { - entityCache.clear(); - } } diff --git a/src/main/java/org/onap/aai/sparky/dal/sas/config/SearchServiceConfig.java b/src/main/java/org/onap/aai/sparky/dal/sas/config/SearchServiceConfig.java index 0925d71..cb6f933 100644 --- a/src/main/java/org/onap/aai/sparky/dal/sas/config/SearchServiceConfig.java +++ b/src/main/java/org/onap/aai/sparky/dal/sas/config/SearchServiceConfig.java @@ -70,6 +70,9 @@ public class SearchServiceConfig { private static final String TOPOGRAPHICAL_INDEX_NAME_DEFAULT = "topographicalsearchindex-localhost"; + private static final String ENTITY_COUNT_HISTORY_INDEX_NAME_DEFAULT = + "entitycounthistoryindex-localhost"; + private static final String VERSION_DEFAULT = "v1"; public static SearchServiceConfig getConfig() throws Exception { @@ -108,6 +111,8 @@ public class SearchServiceConfig { auditIndexName = sasProps.getProperty("auditIndexName", AUDIT_INDEX_NAME_DEFAULT); topographicalSearchIndex = sasProps.getProperty("topographicalIndexName", TOPOGRAPHICAL_INDEX_NAME_DEFAULT); + entityCountHistoryIndex = sasProps.getProperty("entityCountHistoryIndexName", + ENTITY_COUNT_HISTORY_INDEX_NAME_DEFAULT); certName = sasProps.getProperty("ssl.cert-name", "aai-client-cert.p12"); keystorePassword = sasProps.getProperty("ssl.keystore-password", "OBF:1i9a1u2a1unz1lr61wn51wn11lss1unz1u301i6o"); diff --git a/src/main/java/org/onap/aai/sparky/dal/servlet/ResettableStreamHttpServletRequest.java b/src/main/java/org/onap/aai/sparky/dal/servlet/ResettableStreamHttpServletRequest.java deleted file mode 100644 index 4713222..0000000 --- a/src/main/java/org/onap/aai/sparky/dal/servlet/ResettableStreamHttpServletRequest.java +++ /dev/null @@ -1,128 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.dal.servlet; - -import com.google.common.primitives.Bytes; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; - -import javax.servlet.ReadListener; -import javax.servlet.ServletInputStream; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletRequestWrapper; - -/** - * The Class ResettableStreamHttpServletRequest. - */ -public class ResettableStreamHttpServletRequest extends HttpServletRequestWrapper { - - private byte[] requestBody = new byte[0]; - private boolean bufferFilled = false; - - /** - * Constructs a request object wrapping the given request. - * - * @param request The request to wrap - * @throws IllegalArgumentException if the request is null - */ - public ResettableStreamHttpServletRequest(HttpServletRequest request) { - super(request); - } - - /** - * Get request body. - * - * @return Bytes with the request body contents. - * @throws IOException In case stream reqding fails. - */ - public byte[] getRequestBody() throws IOException { - if (bufferFilled) { - return Arrays.copyOf(requestBody, requestBody.length); - } - - InputStream inputStream = super.getInputStream(); - - byte[] buffer = new byte[102400]; - - int bytesRead; - while ((bytesRead = inputStream.read(buffer)) != -1) { - requestBody = Bytes.concat(this.requestBody, Arrays.copyOfRange(buffer, 0, bytesRead)); - } - - bufferFilled = true; - - return requestBody; - } - - @Override - public ServletInputStream getInputStream() throws IOException { - return new CustomServletInputStream(getRequestBody()); - } - - /** - * The Class CustomServletInputStream. - */ - private static class CustomServletInputStream extends ServletInputStream { - - private ByteArrayInputStream buffer; - - /** - * Instantiates a new custom servlet input stream. - * - * @param contents the contents - */ - public CustomServletInputStream(byte[] contents) { - this.buffer = new ByteArrayInputStream(contents); - } - - /* - * (non-Javadoc) - * - * @see java.io.InputStream#read() - */ - @Override - public int read() throws IOException { - return buffer.read(); - } - - @Override - public boolean isFinished() { - return buffer.available() == 0; - } - - @Override - public boolean isReady() { - return true; - } - - @Override - public void setReadListener(ReadListener arg0) { - throw new RuntimeException("Not implemented"); - } - - } - -} diff --git a/src/main/java/org/onap/aai/sparky/dataintegrity/config/DiUiConstants.java b/src/main/java/org/onap/aai/sparky/dataintegrity/config/DiUiConstants.java new file mode 100644 index 0000000..c449931 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/dataintegrity/config/DiUiConstants.java @@ -0,0 +1,77 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dataintegrity.config; + +/** + * The Class DiUiConstants. + */ +public class DiUiConstants { + + public static final String APP_JSON = "application/json"; + public static final String CATEGORY = "category"; + + public static final String ENTITY_TYPE = "entityType"; + public static final String KEY_AGG = "aggregations"; + public static final String KEY_AGG_RESULT = "aggregationResult"; + public static final String KEY_AGG_RESULT_COUNT = "count"; + public static final String KEY_AGG_RESULT_ID = "key_as_string"; + public static final String KEY_BUCKETS = "buckets"; + public static final String KEY_ROUTE = "route"; + public static final String KEY_FILTERS = "filters"; + public static final String KEY_FILTER_VALUE = "filterValue"; + public static final String KEY_FILTER_ID = "filterId"; + public static final String KEY_START_DATE = "startDate"; + public static final String KEY_END_DATE = "endDate"; + public static final String KEY_TIME_ZONE = "time_zone"; + public static final String DEFAULT_TIME_ZONE = "+00:00"; + + public static final String WIDGET_TYPE_SEVERITY = "severity"; + public static final String WIDGET_TYPE_CATEGORY = "category"; + public static final String WIDGET_TYPE_ENTITY_TYPE = "entityType"; + public static final String WIDGET_TYPE_PAGINATED_TABLE = "pagination"; + public static final String WIDGET_TYPE_DATE_HISTOGRAM = "dateHistogram"; + + + public static final String KEY_BY_ITEM = "by_item"; + public static final String KEY_ENTITY_ID = "entityId"; + public static final String KEY_HITS = "hits"; + public static final String KEY_SEARCH_RESULT = "searchResult"; + public static final String KEY_INNER_HITS = "inner_hits"; + public static final String KEY_ITEM = "item"; + public static final String KEY_ITEM_AGG = "item_aggregation"; + public static final String KEY_TIMESTAMP = "violationTimestamp"; + public static final String KEY_TOTAL_HITS = "totalHits"; + public static final String KEY_VIOLATION_DETAILS = "violationDetails"; + public static final String SEARCH_API = "query"; + + public static final String SEVERITY = "severity"; + public static final String UI_KEY_BY_CATEGORY = "group_by_status"; + public static final String UI_KEY_BY_DATE = "group_by_date"; + public static final String UI_KEY_BY_ENTITY_TYPE = "group_by_entityType"; + public static final String UI_KEY_BY_SEVERITY = "group_by_severity"; + + public static final String UI_KEY_ORDER_BY_DATE = "order_by_date"; + public static final String VIOLATIONS = "violations"; + public static final String KEY_VIEW_NAME = "Data Integrity"; + +} diff --git a/src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java b/src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java new file mode 100644 index 0000000..42b439e --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java @@ -0,0 +1,182 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes; + +import java.io.UnsupportedEncodingException; +import java.util.Map; + +import org.apache.camel.Exchange; +import org.apache.camel.component.restlet.RestletConstants; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.editattributes.entity.EditRequest; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.NodeUtils; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.ClientInfo; +import org.restlet.data.Cookie; +import org.restlet.data.MediaType; +import org.restlet.data.Status; +import org.restlet.util.Series; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class AttributeEditProcessor. + */ +public class AttributeEditProcessor { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AttributeEditProcessor.class); + + private ObjectMapper mapper; + private AttributeUpdater attrUpdater; + + public AttributeEditProcessor(AttributeUpdater attributeUpdater) { + this.attrUpdater = attributeUpdater; + + this.mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + } + + public void editAttribute(Exchange exchange) { + + Object xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + + if (xTransactionId == null) { + xTransactionId = NodeUtils.getRandomTxnId(); + } + + Object partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + partnerName = "Browser"; + } + + Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class); + + /* + * Disables automatic Apache Camel Restlet component logging which prints out an undesirable log + * entry which includes client (e.g. browser) information + */ + request.setLoggable(false); + + ClientInfo clientInfo = request.getClientInfo(); + MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName, + clientInfo.getAddress() + ":" + clientInfo.getPort()); + + String payload = exchange.getIn().getBody(String.class); + EditRequest editRequest = null; + OperationResult operationResult = new OperationResult(); + + Response response = + exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class); + response.setStatus(Status.SUCCESS_OK); // 200 is assumed unless an actual exception occurs (a + // failure is still a valid response) + + boolean wasErrorDuringProcessing = false; + String errorMessage = null; + + + try { + + if (payload != null && !payload.isEmpty()) { + editRequest = mapper.readValue(payload, EditRequest.class); + + if (editRequest != null) { + + String attUid = getAttUid(request.getCookies()); + String objectUri = editRequest.getEntityUri(); + Map attributeValues = editRequest.getAttributes(); + + if (attUid != null && !attUid.isEmpty() && objectUri != null && !objectUri.isEmpty() + && attributeValues != null && !attributeValues.isEmpty()) { + + LOG.info(AaiUiMsgs.ATTRIBUTES_HANDLING_EDIT, objectUri, editRequest.toString()); + + operationResult = attrUpdater.updateObjectAttribute(objectUri, attributeValues, attUid); + + boolean wasSuccess = (operationResult.getResultCode() == 200); + String message = String.format("Edit Attributes completed with Result Code : %s (%s).", + operationResult.getResultCode(), wasSuccess ? "success" : "failed"); + + LOG.info(AaiUiMsgs.INFO_GENERIC, message); + } + } + } else { + wasErrorDuringProcessing = true; + errorMessage = "Empty payload provided, need details to complete request"; + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_EXCEPTION, exc.getLocalizedMessage()); + operationResult.setResult(500, "Error encountered while trying to update attributes."); + response.setStatus(Status.SERVER_ERROR_INTERNAL); + } + + if (wasErrorDuringProcessing) { + LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_MESSAGE, errorMessage); + } + + response.setEntity(operationResult.getResult(), MediaType.APPLICATION_JSON); + exchange.getOut().setBody(response); + } + + /** + * Gets the att uid. + * + * @param request the request + * @return the att uid + * @throws UnsupportedEncodingException the unsupported encoding exception + */ + public String getAttUid(Series cookies) throws UnsupportedEncodingException { + String attId = ""; + if (cookies == null) { + LOG.error(AaiUiMsgs.COOKIE_NOT_FOUND); + return attId; + } + for (Cookie cookie : cookies) { + if (cookie.getName().equals("attESHr")) { + // This cookie is of the form : + // "FIRSTNAME|LASTNAME|emailname@domain.com|||ab1234||fl6789,RBFMSKQ," + // + "Z9V2298,9762186|YNNNNNNNNNNNNNYNNYYNNNNN|FIRSTNAME|EY6SC9000|" + // we are to extract fl6789 from this which would be the attuid for the user. + String value = cookie.getValue(); + value = java.net.URLDecoder.decode(value, "UTF-8"); + LOG.info(AaiUiMsgs.COOKIE_FOUND, value); + String[] values = value.split("\\|"); + if (values.length > 7) { + attId = (values[7].split(","))[0]; + + String initials = (values[0].substring(0, 1) + values[1].substring(0, 1)).toLowerCase(); + if (attId.startsWith(initials)) { + return attId; + } + } + } + } + return attId; + } +} diff --git a/src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java b/src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java new file mode 100644 index 0000000..5e6d652 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java @@ -0,0 +1,366 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes; + +import java.net.URI; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.ws.rs.core.UriBuilder; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; +import org.onap.aai.sparky.editattributes.exception.AttributeUpdateException; +import org.onap.aai.sparky.logging.AaiUiMsgs; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.fasterxml.jackson.databind.PropertyNamingStrategy; + +/** + * Class to process attribute updates on AAI objects. + * + * + */ +public class AttributeUpdater { + + /** + * The Class AaiEditObject. + */ + public class AaiEditObject { + String objectType; + String rootElement; + String keyName; + String keyValue; + String schemaVersion; + + /** + * Instantiates a new aai edit object. + */ + public AaiEditObject() { + + } + + /** + * Instantiates a new aai edit object. + * + * @param objectType the object type + * @param idName the id name + * @param schemaVersion the schema version + */ + public AaiEditObject(String objectType, String idName, String schemaVersion) { + super(); + this.objectType = objectType; + this.keyName = idName; + this.schemaVersion = schemaVersion; + } + + public String getObjectType() { + return objectType; + } + + public void setObjectType(String objectType) { + this.objectType = objectType; + } + + public String getKeyName() { + return keyName; + } + + public void setKeyName(String idName) { + this.keyName = idName; + } + + public String getSchemaVersion() { + return schemaVersion; + } + + public void setSchemaVersion(String schemaVersion) { + this.schemaVersion = schemaVersion; + } + + public void setKeyValue(String keyValue) { + this.keyValue = keyValue; + } + + public String getKeyValue() { + return keyValue; + } + + public String getRootElement() { + return rootElement; + } + + public void setRootElement(String rootElement) { + this.rootElement = rootElement; + } + + } + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(AttributeUpdater.class); + private static final String MESSAGE_VERSION_EXTRACTION_REGEX = "\\/(v[0-9]+)"; + private static final String ATTRIBUTES_UPDATED_SUCCESSFULLY = "Attributes updated successfully"; + private static final String ATTRIBUTES_NOT_UPDATED = "Attributes not updated. "; + private ActiveInventoryConfig aaiConfig; + private ActiveInventoryAdapter aaiAdapter; + private UserValidator validator; + private OxmModelLoader oxmModelLoader; + private OxmEntityLookup oxmEntityLookup; + + /** + * Instantiates a new attribute updater. + * + * @throws AttributeUpdateException + */ + public AttributeUpdater(OxmModelLoader oxmModelLoader, OxmEntityLookup oxmEntityLookup, + ActiveInventoryAdapter activeInventoryAdapter) throws AttributeUpdateException { + super(); + this.oxmModelLoader = oxmModelLoader; + this.oxmEntityLookup = oxmEntityLookup; + this.aaiAdapter = activeInventoryAdapter; + + try { + this.aaiConfig = ActiveInventoryConfig.getConfig(); // TODO -> Config to become a bean + this.validator = new UserValidator(); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ATTRIBUTES_ERROR_GETTING_AAI_CONFIG_OR_ADAPTER, + exc.getLocalizedMessage()); + throw new AttributeUpdateException(exc); + } + } + + protected String getResourceBasePath() { + + String versionStr = null; + if (oxmModelLoader != null) { + versionStr = String.valueOf(oxmModelLoader.getLatestVersionNum()); + } + + return "/aai/v" + versionStr; + + } + + protected URI getBaseUri() { + return UriBuilder.fromUri("https://" + aaiConfig.getAaiRestConfig().getHost() + ":" + + aaiConfig.getAaiRestConfig().getPort() + getResourceBasePath()).build(); + } + + /** + * Update object attribute. + * + * @param objectUri - Valid URI of the object as per OXM model. + * @param attributeValues - Map of (attribute-name & attribute-value) for any attributes to be + * updated to the value. + * @param attUid - ATTUID of the user requesting the update. + * @return - OperationResult with success or failure reason. + */ + public OperationResult updateObjectAttribute(String objectUri, + Map attributeValues, String attUid) { + OperationResult result = new OperationResult(); + LOG.info(AaiUiMsgs.ATTRIBUTES_UPDATE_METHOD_CALLED, objectUri, attUid, + String.valueOf(attributeValues)); + if (!validator.isAuthorizedUser(attUid)) { + result.setResultCode(403); + result.setResult(String.format("User %s is not authorized for Attributes update ", attUid)); + LOG.error(AaiUiMsgs.ATTRIBUTES_USER_NOT_AUTHORIZED_TO_UPDATE, attUid); + return result; + } + + AaiEditObject object = null; + + try { + object = getEditObjectFromUri(objectUri); + } catch (AttributeUpdateException exc) { + result.setResultCode(400); + result.setResult(ATTRIBUTES_NOT_UPDATED); + LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_EXCEPTION, exc.getLocalizedMessage()); + return result; + } + try { + String jsonPayload = convertEditRequestToJson(object, attributeValues); + String patchUri = getBaseUri().toString() + getRelativeUri(objectUri); + + + /* + * FIX ME: Dave Adams, 8-Nov-2017 + */ + + // result = aaiAdapter.doPatch(patchUri, jsonPayload, MediaType.APPLICATION_JSON); + + result = new OperationResult(); + result.setResultCode(404); + + if (result.getResultCode() == 200) { + result.setResult(ATTRIBUTES_UPDATED_SUCCESSFULLY); + String message = result.getResult() + " for " + objectUri; + LOG.info(AaiUiMsgs.INFO_GENERIC, message); + } else { + String message = + ATTRIBUTES_NOT_UPDATED + " For: " + objectUri + ". AAI PATCH Status Code : " + + result.getResultCode() + ". Error : " + result.getResult(); + LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_MESSAGE, message); + } + } catch (AttributeUpdateException exc) { + result.setResultCode(500); + result.setResult(ATTRIBUTES_NOT_UPDATED + exc.getLocalizedMessage()); + LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_EXCEPTION, exc.getLocalizedMessage()); + } + return result; + + } + + /** + * Gets the relative uri. + * + * @param objectUri the object uri + * @return the relative uri + */ + public String getRelativeUri(String objectUri) { + String tempUri = objectUri; + final Pattern pattern = Pattern.compile(MESSAGE_VERSION_EXTRACTION_REGEX, Pattern.DOTALL); + Matcher matcher = pattern.matcher(objectUri); + while (matcher.find()) { + tempUri = objectUri.substring(matcher.end()); + } + if (!tempUri.startsWith("/")) { + tempUri = "/" + tempUri; + } + return tempUri; + } + + /** + * Gets the edits the object from uri. + * + * @param objectUri the object uri + * @return the edits the object from uri + * @throws AttributeUpdateException the attribute update exception + */ + public AaiEditObject getEditObjectFromUri(String objectUri) throws AttributeUpdateException { + + AaiEditObject object = new AaiEditObject(); + String version = getVersionFromUri(objectUri); + + if (null == version) { + version = "v" + String.valueOf(oxmModelLoader.getLatestVersionNum()); + } + object.setSchemaVersion(version); + + String[] values = objectUri.split("/"); + if (values.length < 2) { + throw new AttributeUpdateException("Invalid or malformed object URI : " + objectUri); + } + String keyValue = values[values.length - 1]; + String rootElement = values[values.length - 2]; + + object.setKeyValue(keyValue); + object.setRootElement(rootElement); + + String objectJavaType = null; + Map entityTypeLookup = oxmEntityLookup.getEntityTypeLookup(); + DynamicType entity = entityTypeLookup.get(rootElement); + if (null != entity) { + objectJavaType = entity.getName(); + String message = + "Descriptor: Alias: " + objectJavaType + " : DefaultRootElement: " + rootElement; + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, message); + } + + + if (objectJavaType == null) { + throw new AttributeUpdateException( + "Object type could not be determined from the URI : " + objectUri); + } + object.setObjectType(objectJavaType); + + // Set key attribute name + final List primaryKeys = entity.getDescriptor().getPrimaryKeyFieldNames(); + + if (primaryKeys.isEmpty()) { + throw new AttributeUpdateException("Object primary key not found in OXM version " + version); + } + + for (int i = 0; i < primaryKeys.size(); i++) { + final String primaryKey = primaryKeys.get(i); + if (primaryKey.indexOf("/text()") != -1) { + primaryKeys.set(i, primaryKey.replace("/text()", "")); + } + } + object.setKeyName(primaryKeys.iterator().next()); + + return object; + } + + /** + * Gets the version from uri. + * + * @param objectUri the object uri + * @return the version from uri + * @throws AttributeUpdateException the attribute update exception + */ + private String getVersionFromUri(String objectUri) throws AttributeUpdateException { + final Pattern pattern = Pattern.compile(MESSAGE_VERSION_EXTRACTION_REGEX, Pattern.DOTALL); + Matcher matcher = pattern.matcher(objectUri); + String messageSchemaVersion = null; + while (matcher.find()) { + messageSchemaVersion = matcher.group(1); + break; + } + return messageSchemaVersion; + } + + /** + * Convert edit request to json. + * + * @param object the object + * @param attributeValues the attribute values + * @return the string + * @throws AttributeUpdateException the attribute update exception + */ + private static String convertEditRequestToJson(AaiEditObject object, + Map attributeValues) throws AttributeUpdateException { + + ObjectMapper mapper = new ObjectMapper(); + mapper.setPropertyNamingStrategy(new PropertyNamingStrategy.KebabCaseStrategy()); + ObjectWriter ow = mapper.writer(); + + Map patchAttributes = new HashMap<>(); + patchAttributes.put(object.getKeyName(), object.getKeyValue()); + patchAttributes.putAll(attributeValues); + + try { + return ow.writeValueAsString(patchAttributes); + } catch (JsonProcessingException exc) { + throw new AttributeUpdateException("Caught a JPE while creating PATCH request body = ", exc); + } + } +} diff --git a/src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java b/src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java new file mode 100644 index 0000000..65467a2 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java @@ -0,0 +1,77 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Reads user IDs from a file. Each line in the user authorization file should contain a single user + * ID. For example, + * + *
    + * user1
    + * user2
    + * 
    + */ +public class UserAuthorizationReader { + + private File userAuthorizationFile; + + /** + * Set the user authorization file. + * + * @param file a user authorization file + */ + public UserAuthorizationReader(File file) { + this.userAuthorizationFile = file; + } + + /** + * Gets user IDs from a file. + * + * @return a list of user IDs + * @throws IOException if there is a problem reading the user configuration file + */ + public List getUsers() throws IOException { + List userList = new ArrayList<>(); + try (Stream stream = Files.lines(getUserAuthorizationFile().toPath())) { + userList.addAll(stream.map(String::trim).collect(Collectors.toList())); + } + return userList; + } + + // Getters and setters + public File getUserAuthorizationFile() { + return userAuthorizationFile; + } + + public void setUserAuthorizationFile(File file) { + this.userAuthorizationFile = file; + } +} diff --git a/src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java b/src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java new file mode 100644 index 0000000..cccd815 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java @@ -0,0 +1,65 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes; + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; + +/** + * Validates users against a user authorization file. + */ +public class UserValidator { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(UserValidator.class); + private static final String USER_AUTH_FILE = + TierSupportUiConstants.AUTHORIZED_USERS_FILE_LOCATION; + + private UserAuthorizationReader userAuthorizationReader = + new UserAuthorizationReader(new File(USER_AUTH_FILE)); + + /** + * Returns true if the user is authorized. + * + * @param userId a user identifier + * @return true if the user ID is present in the user authorization file + */ + public boolean isAuthorizedUser(String userId) { + if (userId != null && !userId.isEmpty()) { + try { + List users = userAuthorizationReader.getUsers(); + return users.contains(userId); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.USER_AUTHORIZATION_FILE_UNAVAILABLE, userId); + return false; + } + } else { + return false; + } + } +} diff --git a/src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java b/src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java new file mode 100644 index 0000000..df4c685 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java @@ -0,0 +1,67 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes.entity; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Class EditRequest. + */ +public class EditRequest { + + @JsonProperty("entity-uri") + private String entityUri; + + @JsonProperty("entity-type") + private String entityType; + + @JsonProperty("attributes") + private Map attributes = new HashMap<>(); + + public String getEntityUri() { + return entityUri; + } + + public void setEntityUri(String entityUri) { + this.entityUri = entityUri; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public Map getAttributes() { + return attributes; + } + + public void setAttributes(Map attributes) { + this.attributes = attributes; + } +} diff --git a/src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java b/src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java new file mode 100644 index 0000000..4612785 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java @@ -0,0 +1,60 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes.exception; + +/** + * The Class AttributeUpdateException. + */ +public class AttributeUpdateException extends Exception { + + private static final long serialVersionUID = 1L; + + /** + * Attribute Edit specific Exception Class. + * + * @param exc the exc + */ + + public AttributeUpdateException(Exception exc) { + super(exc); + } + + /** + * Instantiates a new attribute update exception. + * + * @param message the message + */ + public AttributeUpdateException(String message) { + super(message); + } + + /** + * Instantiates a new attribute update exception. + * + * @param message the message + * @param exc the exc + */ + public AttributeUpdateException(String message, Exception exc) { + super(message, exc); + } +} diff --git a/src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java b/src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java new file mode 100644 index 0000000..b765dc8 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java @@ -0,0 +1,143 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.inventory; + +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; + +/** + * The Class EntityHistoryQueryBuilder. + */ +public class EntityHistoryQueryBuilder { + + private static final String TABLE = "table"; + private static final String GRAPH = "graph"; + + /** + * Gets the query. + * + * @param type the type + * @return the query + */ + public static JsonObject getQuery(String type) { + if (type.equalsIgnoreCase(TABLE)) { + return createTableQuery(); + } else if (type.equalsIgnoreCase(GRAPH)) { + return createGraphQuery(); + } else { + return null; + } + } + + /** + * Creates the graph query. + * + * @return the json object + */ + public static JsonObject createGraphQuery() { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + jsonBuilder.add("aggs", + Json.createObjectBuilder().add("group_by_entityType", + Json.createObjectBuilder() + .add("terms", Json.createObjectBuilder().add("field", "entityType").add("size", 0)) + .add("aggs", Json.createObjectBuilder().add("group_by_date", + Json.createObjectBuilder().add("date_histogram", createDateHistogram()) + .add("aggs", Json.createObjectBuilder().add("sort_by_date", + Json.createObjectBuilder().add("top_hits", createTopHitsBlob()))))))); + jsonBuilder.add("size", 0); + + return jsonBuilder.build(); + } + + /** + * Creates the table query. + * + * @return the json object + */ + public static JsonObject createTableQuery() { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + jsonBuilder + .add("aggs", + Json.createObjectBuilder().add("group_by_entityType", + Json.createObjectBuilder() + .add("terms", + Json.createObjectBuilder().add("field", "entityType").add("size", 0)) + .add("aggs", Json.createObjectBuilder().add("sort_by_date", + Json.createObjectBuilder().add("top_hits", createTopHitsBlob()))))); + jsonBuilder.add("size", 0); + + return jsonBuilder.build(); + } + + /** + * Creates the date histogram. + * + * @return the json object + */ + private static JsonObject createDateHistogram() { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + jsonBuilder.add("field", "timestamp"); + jsonBuilder.add("min_doc_count", 1); + jsonBuilder.add("interval", "day"); + jsonBuilder.add("format", "epoch_millis"); + + return jsonBuilder.build(); + } + + /** + * Creates the top hits blob. + * + * @return the json object + */ + private static JsonObject createTopHitsBlob() { + JsonObjectBuilder builder = Json.createObjectBuilder(); + builder.add("size", 1); + builder.add("sort", getSortCriteria()); + return builder.build(); + } + + public static JsonArray getSortCriteria() { + JsonArrayBuilder jsonBuilder = Json.createArrayBuilder(); + jsonBuilder.add(Json.createObjectBuilder().add("timestamp", + Json.createObjectBuilder().add("order", "desc"))); + + return jsonBuilder.build(); + } + + /** + * The main method. + * + * @param args the arguments + */ + public static void main(String[] args) { + System.out.println("TABLE-QUERY: " + createTableQuery().toString()); + System.out.println("GRAPH_QUERY: " + createGraphQuery().toString()); + } + +} diff --git a/src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java b/src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java new file mode 100644 index 0000000..c356191 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java @@ -0,0 +1,202 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.inventory; + +import java.io.IOException; + +import org.apache.camel.Exchange; +import org.apache.camel.component.restlet.RestletConstants; +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; +import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.logging.util.ServletUtils; +import org.onap.aai.sparky.util.NodeUtils; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.ClientInfo; +import org.restlet.data.Form; +import org.restlet.data.MediaType; +import org.restlet.data.Parameter; +import org.restlet.data.Status; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class GeoVisualizationServlet. + */ +public class GeoVisualizationProcessor { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(GeoVisualizationProcessor.class); + + private ObjectMapper mapper; + private SearchAdapter search = null; + private ElasticSearchConfig elasticConfig = null; + + private static final String SEARCH_STRING = "_search"; + private static final String SEARCH_PARAMETER = + "?filter_path=hits.hits._source&_source=location&size=5000&q=entityType:"; + private static final String PARAMETER_KEY = "entity"; + + /** + * Instantiates a new geo visualization processor + */ + public GeoVisualizationProcessor() { + this.mapper = new ObjectMapper(); + + try { + if (elasticConfig == null) { + elasticConfig = ElasticSearchConfig.getConfig(); + } + if (search == null) { + search = new SearchAdapter(); + } + this.mapper = new ObjectMapper(); + } catch (Exception exc) { + + } + } + + public void setSearch(SearchAdapter search) { + this.search = search; + } + + public void setElasticConfig(ElasticSearchConfig elasticConfig) { + this.elasticConfig = elasticConfig; + } + + /** + * Gets the geo visualization results. + * + * @param response the response + * @param entityType the entity type + * @return the geo visualization results + * @throws Exception the exception + */ + protected OperationResult getGeoVisualizationResults(Exchange exchange) throws Exception { + OperationResult operationResult = new OperationResult(); + + + Object xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + if (xTransactionId == null) { + xTransactionId = NodeUtils.getRandomTxnId(); + } + + Object partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + partnerName = "Browser"; + } + + Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class); + + /* + * Disables automatic Apache Camel Restlet component logging which prints out an undesirable log + * entry which includes client (e.g. browser) information + */ + request.setLoggable(false); + + ClientInfo clientInfo = request.getClientInfo(); + MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName, + clientInfo.getAddress() + ":" + clientInfo.getPort()); + + String entityType = ""; + + Form form = request.getResourceRef().getQueryAsForm(); + for (Parameter parameter : form) { + if (PARAMETER_KEY.equals(parameter.getName())) { + entityType = parameter.getName(); + } + } + + String parameters = SEARCH_PARAMETER + entityType; + String requestString = String.format("/%s/%s/%s", elasticConfig.getTopographicalSearchIndex(), + SEARCH_STRING, parameters); + + try { + final String fullUrlStr = ServletUtils.getFullUrl(elasticConfig, requestString); + OperationResult opResult = search.doGet(fullUrlStr, "application/json"); + + JSONObject finalOutputJson = formatOutput(opResult.getResult()); + + Response response = + exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class); + response.setStatus(Status.SUCCESS_OK); + response.setEntity(String.valueOf(finalOutputJson), MediaType.APPLICATION_JSON); + exchange.getOut().setBody(response); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "Error processing Geo Visualization request"); + } + + return operationResult; + } + + /** + * Format output. + * + * @param results the results + * @return the JSON object + */ + private JSONObject formatOutput(String results) { + JsonNode resultNode = null; + JSONObject finalResult = new JSONObject(); + JSONArray entitiesArr = new JSONArray(); + + try { + resultNode = mapper.readTree(results); + + final JsonNode hitsNode = resultNode.get("hits").get("hits"); + if (hitsNode.isArray()) { + + for (final JsonNode arrayNode : hitsNode) { + JsonNode sourceNode = arrayNode.get("_source"); + if (sourceNode.get("location") != null) { + JsonNode locationNode = sourceNode.get("location"); + if (NodeUtils.isNumeric(locationNode.get("lon").asText()) + && NodeUtils.isNumeric(locationNode.get("lat").asText())) { + JSONObject location = new JSONObject(); + location.put("longitude", locationNode.get("lon").asText()); + location.put("latitude", locationNode.get("lat").asText()); + + entitiesArr.put(location); + } + + } + } + } + finalResult.put("plotPoints", entitiesArr); + + } catch (IOException exc) { + LOG.warn(AaiUiMsgs.ERROR_BUILDING_SEARCH_RESPONSE, exc.getLocalizedMessage()); + } + + return finalResult; + } +} diff --git a/src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java b/src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java new file mode 100644 index 0000000..3596c54 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java @@ -0,0 +1,292 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.inventory.entity; + +import java.io.Serializable; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.List; + +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.sync.entity.IndexDocument; +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class GeoIndexDocument. + */ +public class GeoIndexDocument implements Serializable, IndexDocument { + + @JsonIgnore + private static final long serialVersionUID = -5188479658230319058L; + + protected String entityType; + protected String entityPrimaryKeyValue; + protected String entityPrimaryKeyName; + protected String latitude; + protected String longitude; + protected String selfLink; + + @JsonIgnore + protected OxmEntityLookup oxmEntityLookup; + + @JsonIgnore + protected ObjectMapper mapper = new ObjectMapper(); + // generated, SHA-256 digest + @JsonIgnore + protected String id; + + /** + * Convert bytes to hex string. + * + * @param bytesToConvert the bytes to convert + * @return the string + */ + private static String convertBytesToHexString(byte[] bytesToConvert) { + StringBuffer hexString = new StringBuffer(); + for (int i = 0; i < bytesToConvert.length; i++) { + hexString.append(Integer.toHexString(0xFF & bytesToConvert[i])); + } + return hexString.toString(); + } + + + @JsonIgnore + public boolean isValidGeoDocument() { + + boolean isValid = true; + + isValid &= (this.getEntityType() != null); + isValid &= (this.getLatitude() != null); + isValid &= (this.getLongitude() != null); + isValid &= (this.getId() != null); + isValid &= (this.getSelfLink() != null); + + isValid &= NodeUtils.isNumeric(this.getLatitude()); + isValid &= NodeUtils.isNumeric(this.getLongitude()); + + return isValid; + } + + /** + * Concat array. + * + * @param list the list + * @param delimiter the delimiter + * @return the string + */ + private static String concatArray(List list, char delimiter) { + + if (list == null || list.size() == 0) { + return ""; + } + + StringBuilder result = new StringBuilder(64); + + int listSize = list.size(); + boolean firstValue = true; + + for (String item : list) { + + if (firstValue) { + result.append(item); + firstValue = false; + } else { + result.append(delimiter).append(item); + } + + } + + return result.toString(); + + } + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. The + * best we can hope for is identification of resources by generated Id until the Identity-Service + * UUID is tagged against all resources, then we can use that instead. + */ + + /** + * Generate unique sha digest. + * + * @param entityType the entity type + * @param fieldName the field name + * @param fieldValue the field value + * @return the string + * @throws NoSuchAlgorithmException the no such algorithm exception + */ + public static String generateUniqueShaDigest(String entityType, String fieldName, + String fieldValue) throws NoSuchAlgorithmException { + + /* + * Basically SHA-256 will result in an identity with a guaranteed uniqueness compared to just a + * java hashcode value. + */ + MessageDigest digest = MessageDigest.getInstance("SHA-256"); + digest.update(String.format("%s.%s.%s", entityType, fieldName, fieldValue).getBytes()); + return convertBytesToHexString(digest.digest()); + } + + /** + * Instantiates a new geo index document. + */ + public GeoIndexDocument() {} + + /* + * (non-Javadoc) + * + * @see com.att.queryrouter.dao.DocumentStoreDataEntity#getAsJson() + */ + + @Override + @JsonIgnore + public String getAsJson() throws JsonProcessingException { + + if (latitude != null && longitude != null) { + + /** + * A valid entry from this class is one that has both lat and long. If one or both is missing + * we shouldn't be indexing anything. + */ + + return NodeUtils.convertObjectToJson(this, true); + + } + + return null; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. + * The best we can hope for is identification of resources by generated Id until the + * Identity-Service UUID is tagged against all resources, then we can use that instead. + */ + + OxmEntityDescriptor descriptor = oxmEntityLookup.getEntityDescriptors().get(entityType); + String entityPrimaryKeyName = + NodeUtils.concatArray(descriptor.getPrimaryKeyAttributeNames(), "/"); + + this.id = + NodeUtils.generateUniqueShaDigest(entityType, entityPrimaryKeyName, entityPrimaryKeyValue); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "TopographicalEntity [" + ("entityType=" + entityType + ", ") + + ("entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", ") + + ("latitude=" + latitude + ", ") + ("longitude=" + longitude + ", ") + ("ID=" + id + ", ") + + ("selfLink=" + selfLink) + "]"; + } + + @Override + @JsonIgnore + public String getId() { + return this.id; + } + + @JsonProperty("entityType") + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + @JsonProperty("entityPrimaryKeyValue") + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) { + this.entityPrimaryKeyValue = entityPrimaryKeyValue; + } + + @JsonProperty("entityPrimaryKeyName") + public String getEntityPrimaryKeyName() { + return entityPrimaryKeyName; + } + + public void setEntityPrimaryKeyName(String entityPrimaryKeyName) { + this.entityPrimaryKeyName = entityPrimaryKeyName; + } + + @JsonProperty("lat") + public String getLatitude() { + return latitude; + } + + public void setLatitude(String latitude) { + this.latitude = latitude; + } + + @JsonProperty("long") + public String getLongitude() { + return longitude; + } + + public void setLongitude(String longitude) { + this.longitude = longitude; + } + + @JsonProperty("link") + public String getSelfLink() { + return selfLink; + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + @JsonIgnore + public static long getSerialversionuid() { + return serialVersionUID; + } + + public void setId(String id) { + this.id = id; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java b/src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java new file mode 100644 index 0000000..7736255 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java @@ -0,0 +1,220 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.inventory.entity; + +import java.io.IOException; +import java.io.Serializable; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.List; + +import javax.json.Json; +import javax.json.JsonObject; + +/** + * The Class TopographicalEntity. + */ +public class TopographicalEntity implements Serializable { + + private static final long serialVersionUID = -5188479658230319058L; + + protected String entityType; + protected String entityPrimaryKeyValue; + protected String entityPrimaryKeyName; + protected String latitude; + protected String longitude; + protected String selfLink; + + // generated, SHA-256 digest + protected String id; + + /** + * Convert bytes to hex string. + * + * @param bytesToConvert the bytes to convert + * @return the string + */ + private static String convertBytesToHexString(byte[] bytesToConvert) { + StringBuffer hexString = new StringBuffer(); + for (int i = 0; i < bytesToConvert.length; i++) { + hexString.append(Integer.toHexString(0xFF & bytesToConvert[i])); + } + return hexString.toString(); + } + + /** + * Concat array. + * + * @param list the list + * @param delimiter the delimiter + * @return the string + */ + private static String concatArray(List list, char delimiter) { + + if (list == null || list.size() == 0) { + return ""; + } + + StringBuilder result = new StringBuilder(64); + + int listSize = list.size(); + boolean firstValue = true; + + for (String item : list) { + + if (firstValue) { + result.append(item); + firstValue = false; + } else { + result.append(delimiter).append(item); + } + + } + + return result.toString(); + + } + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. The + * best we can hope for is identification of resources by generated Id until the Identity-Service + * UUID is tagged against all resources, then we can use that instead. + */ + + /** + * Generate unique sha digest. + * + * @param entityType the entity type + * @param fieldName the field name + * @param fieldValue the field value + * @return the string + * @throws NoSuchAlgorithmException the no such algorithm exception + */ + public static String generateUniqueShaDigest(String entityType, String fieldName, + String fieldValue) throws NoSuchAlgorithmException { + + /* + * Basically SHA-256 will result in an identity with a guaranteed uniqueness compared to just a + * java hashcode value. + */ + MessageDigest digest = MessageDigest.getInstance("SHA-256"); + digest.update(String.format("%s.%s.%s", entityType, fieldName, fieldValue).getBytes()); + return convertBytesToHexString(digest.digest()); + } + + /** + * Instantiates a new topographical entity. + */ + public TopographicalEntity() {} + + /* + * (non-Javadoc) + * + * @see com.att.queryrouter.dao.DocumentStoreDataEntity#getAsJson() + */ + public String getAsJson() throws IOException { + + JsonObject obj = + Json.createObjectBuilder().add("entityType", entityType).add("pkey", entityPrimaryKeyValue) + .add("location", Json.createObjectBuilder().add("lat", latitude).add("lon", longitude)) + .add("selfLink", selfLink).build(); + + return obj.toString(); + } + + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "TopographicalEntity [" + ("entityType=" + entityType + ", ") + + ("entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", ") + + ("latitude=" + latitude + ", ") + ("longitude=" + longitude + ", ") + ("ID=" + id + ", ") + + ("selfLink=" + selfLink) + "]"; + } + + public String getId() { + return this.id; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) { + this.entityPrimaryKeyValue = entityPrimaryKeyValue; + } + + public String getEntityPrimaryKeyName() { + return entityPrimaryKeyName; + } + + public void setEntityPrimaryKeyName(String entityPrimaryKeyName) { + this.entityPrimaryKeyName = entityPrimaryKeyName; + } + + public String getLatitude() { + return latitude; + } + + public void setLatitude(String latitude) { + this.latitude = latitude; + } + + public String getLongitude() { + return longitude; + } + + public void setLongitude(String longitude) { + this.longitude = longitude; + } + + public String getSelfLink() { + return selfLink; + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + public static long getSerialversionuid() { + return serialVersionUID; + } + + public void setId(String id) { + this.id = id; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java b/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java index c89f83c..5854bc7 100644 --- a/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java +++ b/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java @@ -22,10 +22,10 @@ */ package org.onap.aai.sparky.logging; -import com.att.eelf.i18n.EELFResourceManager; - import org.onap.aai.cl.eelf.LogMessageEnum; +import com.att.eelf.i18n.EELFResourceManager; + /** * The Enum AaiUiMsgs. */ @@ -52,6 +52,14 @@ public enum AaiUiMsgs implements LogMessageEnum { UNEXPECTED_TOKEN_COUNT, /** Arguments: {0} = Error/exception message. */ ADD_SEARCH_TARGET_ATTRIBUTES_FAILED, + /** Arguments: {0} = Error/exception message. */ + NODE_INTEGRITY_OVERLAY_ERROR, + /** Arguments: {0} = Node ID. */ + NODE_INTEGRITY_ALREADY_PROCESSED, + /** Arguments: {0} = Node ID. */ + SKIPPING_PROCESS_NODE_INTEGRITY, + /** Arguments: {0} = Error/exception message. */ + FAILED_TO_PROCESS_NODE_INTEGRITY, /** No argument */ MAX_EVALUATION_ATTEMPTS_EXCEEDED, /** Arguments: {0} = Error/exception message. */ @@ -244,10 +252,10 @@ public enum AaiUiMsgs implements LogMessageEnum { INTERRUPTED, /** Arguments: {0} = Entity Type {1} Entity */ GEO_SYNC_IGNORING_ENTITY, + /** Arguments: {0} = reason */ + OXM_LOADING_ERROR, /** Arguments: {0} = type */ - OXM_FAILED_RETRIEVAL, - /** Arguments: {0} = Directory. */ - OXM_FILE_NOT_FOUND, + OXM_FAILED_RETRIEVAL, OXM_FILE_NOT_FOUND, /** No argument */ OXM_READ_ERROR_NONVERBOSE, /** Arguments: {0} = OXM File name */ @@ -256,7 +264,7 @@ public enum AaiUiMsgs implements LogMessageEnum { OXM_PARSE_ERROR_NONVERBOSE, /** Arguments: {0} = OXM File name {1} = Exception */ OXM_PARSE_ERROR_VERBOSE, - /** No argument */ + /** Arguments: {0} = Numerical value for loaded OXM version */ OXM_LOAD_SUCCESS, /** Arguments: {0} = Entity {1} = Found property-value */ OXM_PROP_DEF_ERR_CROSS_ENTITY_REF, @@ -302,6 +310,20 @@ public enum AaiUiMsgs implements LogMessageEnum { AAI_RETRIEVAL_FAILED_GENERIC, /** Arguments: {0} = Self Link */ AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + /** Arguments: {0} = Exception */ + ATTRIBUTES_NOT_UPDATED_EXCEPTION, + /** Arguments: {0} = Message */ + ATTRIBUTES_NOT_UPDATED_MESSAGE, + /** Arguments: {0} = Exception */ + ATTRIBUTES_ERROR_GETTING_AAI_CONFIG_OR_ADAPTER, + /** Arguments: {0} = Schema File URI */ + ATTRIBUTES_ERROR_LOADING_MODEL_VERSION, + /** Arguments: {0} = Request URI {1} = Edit Request Body */ + ATTRIBUTES_HANDLING_EDIT, + /** Arguments: {0} = Object URI {1} = Attribute ID {2} Attribute Values */ + ATTRIBUTES_UPDATE_METHOD_CALLED, + /** Arguments: {0} = Attribute ID */ + ATTRIBUTES_USER_NOT_AUTHORIZED_TO_UPDATE, /** Arguments: {0} = Cookie */ COOKIE_FOUND, /** No argument */ @@ -404,7 +426,7 @@ public enum AaiUiMsgs implements LogMessageEnum { /** Arguments: {0} = URL to extract parameter from */ ERROR_REMOVING_URL_PARAM, /** Arguments: {0} = Hash value */ - ERROR_INVALID_HASH, ERROR_HASH_NOT_FOUND, ERROR_READING_HTTP_REQ_PARAMS, + ERROR_INVALID_HASH, ERROR_HASH_NOT_FOUND, ERROR_FILTERS_NOT_FOUND, ERROR_READING_HTTP_REQ_PARAMS, /** Arguments: {0} = Exception */ ERROR_D3_GRAPH_VISUALIZATION, /** Arguments: {0} = Exception */ @@ -417,8 +439,25 @@ public enum AaiUiMsgs implements LogMessageEnum { VIEW_NAME_NOT_SUPPORTED, /** Arguments: {0} = response code, {1} = filter name */ ERROR_FETCHING_FILTER_VALUES, + /** Arguments: {0} = query type, {1} = view name */ + ERROR_PROCESSING_WIDGET_REQUEST, + /** Arguments: {0} = Time in ms */ + DR_PROCESSING_TIME, + /** Arguments: {0} = Response code {1} = payload */ + DR_PROCESSING_FAILURE, + /** Arguments: {0} = request uri */ + DR_REQUEST_URI_FOR_PROXY_UNKNOWN, + /** Arguments: {0} = origin-url {1} = dr-url */ + DR_PROXY_FROM_TO, + /** Arguments: {0} = Exception */ + URI_DECODING_EXCEPTION, + /** Arguments: {0} = Value {1} = Error */ + ENCRYPTION_ERROR, + /** Arguments: {0} = Encrypted value {1} = Error */ + DECRYPTION_ERROR, /** Arguments: {0} = URI */ RESOURCE_NOT_FOUND; + /** * Static initializer to ensure the resource bundles for this class are loaded... */ diff --git a/src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java b/src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java new file mode 100644 index 0000000..dd040a2 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java @@ -0,0 +1,161 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.logging.util; + +import java.io.IOException; +import java.io.PrintWriter; + +import javax.servlet.http.HttpServletResponse; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; +import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.onap.aai.sparky.logging.AaiUiMsgs; + +/** + * The Class ServletUtils. + */ +public class ServletUtils { + + /** + * Execute get query. + * + * @param logger the logger + * @param search the search + * @param response the response + * @param requestUrl the request url + * @return the operation result + * @throws Exception the exception + */ + public static OperationResult executeGetQuery(Logger logger, SearchAdapter search, + HttpServletResponse response, String requestUrl) throws Exception { + + OperationResult opResult = search.doGet(requestUrl, "application/json"); + + if (opResult.getResultCode() > 300) { + setServletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult()); + } else { + response.setStatus(opResult.getResultCode()); + } + + return opResult; + + } + + /** + * Execute post query. + * + * @param logger the logger + * @param search the search + * @param response the response + * @param requestUrl the request url + * @param requestJsonPayload the request json payload + * @return the operation result + * @throws Exception the exception + */ + public static OperationResult executePostQuery(Logger logger, SearchAdapter search, + HttpServletResponse response, String requestUrl, String requestJsonPayload) throws Exception { + + OperationResult opResult = search.doPost(requestUrl, requestJsonPayload, "application/json"); + + if (opResult.getResultCode() > 300) { + setServletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult()); + + } else { + response.setStatus(opResult.getResultCode()); + } + + return opResult; + } + + /** + * Handle search servlet errors. + * + * @param logger the logger + * @param errorMsg the error msg + * @param exc the exc + * @param response the response + * @throws IOException Signals that an I/O exception has occurred. + */ + public static void handleSearchServletErrors(Logger logger, String errorMsg, Exception exc, + HttpServletResponse response) throws IOException { + String errorLogMsg = + (exc == null ? errorMsg : errorMsg + ". Error:" + exc.getLocalizedMessage()); + logger.error(AaiUiMsgs.ERROR_GENERIC, errorLogMsg); + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(generateJsonErrorResponse(errorMsg)); + out.close(); + } + + /** + * Generate json error response. + * + * @param message the message + * @return the string + */ + public static String generateJsonErrorResponse(String message) { + return String.format("{ \"errorMessage\" : %s }", message); + } + + /** + * Sets the servlet response. + * + * @param logger the logger + * @param isError the is error + * @param responseCode the response code + * @param response the response + * @param postPayload the post payload + * @throws IOException Signals that an I/O exception has occurred. + */ + public static void setServletResponse(Logger logger, boolean isError, int responseCode, + HttpServletResponse response, String postPayload) throws IOException { + + if (isError) { + logger.error(AaiUiMsgs.ERROR_GENERIC, postPayload); + } + + response.setStatus(responseCode); + + if (postPayload != null) { + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(postPayload); + out.close(); + } + } + + /** + * Gets the full url. + * + * @param elasticConfig the elastic config + * @param resourceUrl the resource url + * @return the full url + */ + public static String getFullUrl(ElasticSearchConfig elasticConfig, String resourceUrl) { + final String host = elasticConfig.getIpAddress(); + final String port = elasticConfig.getHttpPort(); + return String.format("http://%s:%s%s", host, port, resourceUrl); + } +} diff --git a/src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java b/src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java new file mode 100644 index 0000000..e2eef7a --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java @@ -0,0 +1,417 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.TreeMap; + +import org.apache.camel.Exchange; +import org.apache.camel.Processor; +import org.apache.camel.component.restlet.RestletConstants; +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; +import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.onap.aai.sparky.inventory.EntityHistoryQueryBuilder; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.logging.util.ServletUtils; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.util.RestletUtils; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.ClientInfo; +import org.restlet.data.MediaType; +import org.restlet.data.Status; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; + +/** + * Receives and processes Entity Count History requests + */ +public class EntityCountHistoryProcessor implements Processor { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(EntityCountHistoryProcessor.class); + + private static final long serialVersionUID = 1L; + + private SearchAdapter search = null; + private ElasticSearchConfig elasticConfig = null; + private VisualizationConfigs visualConfigs = null; + private ObjectMapper mapper; + + private static final String SEARCH_STRING = "_search"; + private static final String TYPE = "type"; + private static final String TABLE = "table"; + private static final String GRAPH = "graph"; + + private List vnfEntityTypesToSummarize; + private boolean summarizevnf = false; + + private RestletUtils restletUtils = new RestletUtils(); + + /** + * Instantiates a new Entity Count History + */ + + public EntityCountHistoryProcessor(VisualizationConfigs visualizationConfigs) { + + this.visualConfigs = visualizationConfigs; + vnfEntityTypesToSummarize = + Arrays.asList(visualConfigs.getVnfEntityTypes().toLowerCase().split("[\\s,]+")); + summarizevnf = visualConfigs.getEntityTypesToSummarize().toLowerCase().contains("vnf"); + try { + if (elasticConfig == null) { + elasticConfig = ElasticSearchConfig.getConfig(); + } + + if (search == null) { + search = new SearchAdapter(); + } + this.mapper = new ObjectMapper(); + this.mapper.configure(SerializationFeature.INDENT_OUTPUT, true); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.CONFIGURATION_ERROR, exc.getLocalizedMessage()); + } + } + + /** + * Processes a entity count history search request + * + * @param exchange The Exchange object generated by Apache Camel for the incoming request + */ + + @Override + public void process(Exchange exchange) throws Exception { + + Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class); + Response restletResponse = + exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class); + + Object xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + if (xTransactionId == null) { + xTransactionId = NodeUtils.getRandomTxnId(); + } + + Object partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + partnerName = "Browser"; + } + + /* + * Disables automatic Apache Camel Restlet component logging which prints out an undesirable log + * entry which includes client (e.g. browser) information + */ + request.setLoggable(false); + + ClientInfo clientInfo = request.getClientInfo(); + MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName, + clientInfo.getAddress() + ":" + clientInfo.getPort()); + + String typeParameter = getTypeParameter(exchange); + + if (null != typeParameter && !typeParameter.isEmpty()) { + OperationResult operationResult = null; + + try { + operationResult = getResults(restletResponse, typeParameter); + restletResponse.setEntity(operationResult.getResult(), MediaType.APPLICATION_JSON); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.CONFIGURATION_ERROR, exc.getLocalizedMessage()); + } + } else { + LOG.error(AaiUiMsgs.RESOURCE_NOT_FOUND, request.getOriginalRef().toString()); + String errorMessage = + restletUtils.generateJsonErrorResponse("Unsupported request. Resource not found."); + restletResponse.setEntity(errorMessage, MediaType.APPLICATION_JSON); + restletResponse.setStatus(Status.CLIENT_ERROR_NOT_FOUND); + } + + exchange.getOut().setBody(restletResponse); + } + + + /** + * Format line graph output + * + * @param results The results + * @return The JSON object + * @throws JsonProcessingException The JSON processing exception + */ + public JSONObject formatLineGraphOutput(String results) throws JsonProcessingException { + Map countByDateMap = new HashMap(); + + JsonNode resultNode = null; + + JSONObject finalResult = new JSONObject(); + JSONArray finalResultArr = new JSONArray(); + + try { + resultNode = mapper.readTree(results); + + final JsonNode bucketsNode = getBucketsNode(resultNode); + + if (bucketsNode.isArray()) { + + for (final JsonNode entityNode : bucketsNode) { + final JsonNode dateBucketNode = entityNode.get("group_by_date").get("buckets"); + if (dateBucketNode.isArray()) { + for (final JsonNode dateBucket : dateBucketNode) { + Long date = dateBucket.get("key").asLong(); + final JsonNode countBucketNode = + dateBucket.get("sort_by_date").get("hits").get("hits"); + + if (countBucketNode.isArray()) { + final JsonNode latestEntityNode = countBucketNode.get(0); + + long currentCount = latestEntityNode.get("_source").get("count").asLong(); + if (countByDateMap.containsKey(date)) { + // add to the value if map already contains this date + currentCount += countByDateMap.get(date); + } + + countByDateMap.put(date, currentCount); + } + } + + } + } + } + + /* + * Sort the map by epoch timestamp + */ + Map sortedMap = new TreeMap(countByDateMap); + for (Entry entry : sortedMap.entrySet()) { + JSONObject dateEntry = new JSONObject(); + dateEntry.put("date", entry.getKey()); + dateEntry.put("count", entry.getValue()); + finalResultArr.put(dateEntry); + } + + } catch (Exception exc) { + LOG.warn(AaiUiMsgs.ERROR_BUILDING_SEARCH_RESPONSE, exc.getLocalizedMessage()); + } + + return finalResult.put("result", finalResultArr); + } + + /** + * Format table output + * + * @param results The results + * @return The JSON object + * @throws JsonProcessingException The JSON processing exception + */ + public JSONObject formatTableOutput(String results) throws JsonProcessingException { + JsonNode resultNode = null; + + JSONObject finalResult = new JSONObject(); + JSONArray entitiesArr = new JSONArray(); + + Map entityCountInTable = initializeEntityMap(); + + long vnfCount = 0; + + try { + resultNode = mapper.readTree(results); + + final JsonNode bucketsNode = getBucketsNode(resultNode); + if (bucketsNode.isArray()) { + + for (final JsonNode entityNode : bucketsNode) { + String entityType = entityNode.get("key").asText(); + boolean isAVnf = vnfEntityTypesToSummarize.contains(entityType); + long countValue = 0; + + if (isAVnf || entityCountInTable.get(entityType) != null) { + final JsonNode hitsBucketNode = entityNode.get("sort_by_date").get("hits").get("hits"); + if (hitsBucketNode.isArray()) { + // the first bucket will be the latest + final JsonNode hitNode = hitsBucketNode.get(0); + + countValue = hitNode.get("_source").get("count").asLong(); + + /* + * Special case: Add all the VNF types together to get aggregate count + */ + if (summarizevnf && isAVnf) { + vnfCount += countValue; + countValue = vnfCount; + entityType = "vnf"; + } + + entityCountInTable.replace(entityType, countValue); + } + } + + } + } + for (Entry entry : entityCountInTable.entrySet()) { + JSONObject entityType = new JSONObject(); + entityType.put("key", entry.getKey()); + entityType.put("doc_count", entry.getValue()); + entitiesArr.put(entityType); + } + + finalResult.put("result", entitiesArr); + + } catch (Exception exc) { + LOG.warn(AaiUiMsgs.ERROR_BUILDING_RESPONSE_FOR_TABLE_QUERY, exc.getLocalizedMessage()); + } + + return finalResult; + } + + /** + * Gets the results + * + * @param response The response + * @param type The type + * @return The results + */ + public OperationResult getResults(Response response, String type) { + OperationResult operationResult = new OperationResult(); + + String requestString = + String.format("/%s/%s?pretty", elasticConfig.getEntityCountHistoryIndex(), SEARCH_STRING); + + String reqPayload = EntityHistoryQueryBuilder.getQuery(type).toString(); + + try { + final String fullUrlStr = ServletUtils.getFullUrl(elasticConfig, requestString); + OperationResult opResult = + restletUtils.executePostQuery(LOG, search, response, fullUrlStr, reqPayload); + + JSONObject finalOutput = null; + if (type.equalsIgnoreCase(TABLE)) { + finalOutput = formatTableOutput(opResult.getResult()); + } else if (type.equalsIgnoreCase(GRAPH)) { + finalOutput = formatLineGraphOutput(opResult.getResult()); + } + + if (finalOutput != null) { + response.setEntity(finalOutput.toString(), MediaType.APPLICATION_JSON); + operationResult.setResult(finalOutput.toString()); + } + } catch (JsonProcessingException exc) { + restletUtils.handleRestletErrors(LOG, "Unable to map JSONpayload", exc, response); + } + + return operationResult; + } + + /** + * Gets the buckets node + * + * @param node The node + * @return The buckets node + * @throws Exception The exception + */ + public JsonNode getBucketsNode(JsonNode node) throws Exception { + if (node.get("aggregations").get("group_by_entityType").get("buckets") != null) { + return node.get("aggregations").get("group_by_entityType").get("buckets"); + } else { + throw new Exception("Failed to map JSON response"); + } + } + + /** + * Initialize entity map + * + * @return the map + */ + private Map initializeEntityMap() { + Map entityMap = new HashMap(); + String[] entityTypes = visualConfigs.getEntityTypesToSummarize().split(","); + for (String entity : entityTypes) { + entityMap.put(entity, (long) 0); + } + + return entityMap; + } + + /** + * Extracts the "type" query parameter from the request URI + * + * @param exchange + * @return String containing the value of the "type" query parameter of the request. Returns null + * if no "type" parameter found + */ + public String getTypeParameter(Exchange exchange) { + String typeParameter = null; + + String requestUriParameterString = exchange.getIn().getHeader("CamelHttpQuery", String.class); + + if (null != requestUriParameterString) { + String[] requestParameterParts = requestUriParameterString.split("&"); + + String[] parameter = requestParameterParts[0].split("="); + String currentParameterKey = parameter[0]; + + if (null != currentParameterKey && !currentParameterKey.isEmpty()) { + // Check if we're looking at the "type" parameter key + if (currentParameterKey.equals(TYPE)) { + boolean uriIncludesTypeParameterValue = + (parameter.length >= 2) && !parameter[1].isEmpty(); + + if (uriIncludesTypeParameterValue) { + String typeParameterValue = parameter[1]; + + // Is the parameter value one that we return data for? + if (typeParameterValue.equalsIgnoreCase(TABLE) + || typeParameterValue.equalsIgnoreCase(GRAPH)) { + typeParameter = typeParameterValue; + } + } + } + } + } + + return typeParameter; + } + + public void setElasticConfig(ElasticSearchConfig elasticConfig) { + this.elasticConfig = elasticConfig; + } + + public void setRestletUtils(RestletUtils restletUtils) { + this.restletUtils = restletUtils; + } + + public void setSearch(SearchAdapter search) { + this.search = search; + } +} diff --git a/src/main/java/org/onap/aai/sparky/search/SearchResponse.java b/src/main/java/org/onap/aai/sparky/search/SearchResponse.java new file mode 100644 index 0000000..cddce49 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/search/SearchResponse.java @@ -0,0 +1,99 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.search.entity.SearchSuggestion; + +/** + * The Class SearchResponse. + */ +public class SearchResponse { + + private long processingTimeInMs; + private int totalFound; + + private List suggestions; + + /** + * Instantiates a new search response. + */ + public SearchResponse() { + this.suggestions = new ArrayList(); + this.processingTimeInMs = 0; + this.totalFound = 0; + } + + public long getProcessingTimeInMs() { + return processingTimeInMs; + } + + public void setProcessingTimeInMs(long processingTimeInMs) { + this.processingTimeInMs = processingTimeInMs; + } + + public int getTotalFound() { + return totalFound; + } + + public void setTotalFound(int totalFound) { + this.totalFound = totalFound; + } + + public List getSuggestions() { + return suggestions; + } + + public void setSuggestions(List suggestions) { + this.suggestions = suggestions; + } + + /** + * Adds the entity entry. + * + * @param suggestionEntry that will be converted to JSON + */ + public void addSuggestion(SearchSuggestion suggestionEntity) { + suggestions.add(suggestionEntity); + } + + /** + * Increments the total number of hits for this SearchResponse by the value passed in. + * + * @param additionalCount - Count to increment the total found + */ + public void addToTotalFound(int additionalCount) { + totalFound += additionalCount; + } + + @Override + public String toString() { + return "SearchResponse [processingTimeInMs=" + processingTimeInMs + ", totalFound=" + totalFound + + ", " + (suggestions != null ? "suggestions=" + suggestions : "") + "]"; + } + + + +} diff --git a/src/main/java/org/onap/aai/sparky/search/Suggestion.java b/src/main/java/org/onap/aai/sparky/search/Suggestion.java deleted file mode 100644 index 72530ef..0000000 --- a/src/main/java/org/onap/aai/sparky/search/Suggestion.java +++ /dev/null @@ -1,57 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.search; - -public class Suggestion { - private String entityType; - private String searchTags; - private SearchEntityProperties properties; - - public Suggestion(SearchEntityProperties properties) { - this.properties = properties; - } - - public String getEntityType() { - return entityType; - } - - public String getSearchTags() { - return searchTags; - } - - public SearchEntityProperties getProperties() { - return properties; - } - - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - public void setSearchTags(String searchTags) { - this.searchTags = searchTags; - } - - public void setProperties(SearchEntityProperties properties) { - this.properties = properties; - } -} diff --git a/src/main/java/org/onap/aai/sparky/search/SuggestionList.java b/src/main/java/org/onap/aai/sparky/search/SuggestionList.java deleted file mode 100644 index 5548ffb..0000000 --- a/src/main/java/org/onap/aai/sparky/search/SuggestionList.java +++ /dev/null @@ -1,70 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.search; - -import java.util.LinkedList; -import java.util.List; - -public class SuggestionList { - // TODO: verify which data type these fields should be - private Long processingTimeInMs; - private Long totalFound; - private Long numReturned; - private List suggestions = new LinkedList<>(); - - public void addSuggestion(Suggestion suggestion) { - suggestions.add(suggestion); - } - - public List getSuggestions() { - return suggestions; - } - - public void setSuggestions(List suggestions) { - this.suggestions = suggestions; - } - - public Long getProcessingTimeInMs() { - return processingTimeInMs; - } - - public Long getTotalFound() { - return totalFound; - } - - public Long getNumReturned() { - return numReturned; - } - - public void setProcessingTimeInMs(Long processingTimeInMs) { - this.processingTimeInMs = processingTimeInMs; - } - - public void setTotalFound(Long totalFound) { - this.totalFound = totalFound; - } - - public void setNumReturned(Long numReturned) { - this.numReturned = numReturned; - } -} diff --git a/src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java b/src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java new file mode 100644 index 0000000..2983163 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java @@ -0,0 +1,212 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.TreeMap; + +import org.apache.camel.Exchange; +import org.apache.camel.component.restlet.RestletConstants; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.api.SearchProvider; +import org.onap.aai.sparky.search.entity.QuerySearchEntity; +import org.onap.aai.sparky.search.entity.SearchSuggestion; +import org.onap.aai.sparky.search.registry.SearchProviderRegistry; +import org.onap.aai.sparky.util.NodeUtils; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.ClientInfo; +import org.restlet.data.MediaType; +import org.restlet.data.Status; + +import com.fasterxml.jackson.databind.ObjectMapper; + +public class UnifiedSearchProcessor { + + protected static final String HASH_ID_KEY = "hashId"; + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(UnifiedSearchProcessor.class); + + protected SearchProviderRegistry searchProviderRegistry; + protected ObjectMapper mapper; + protected boolean useOrderedSearchProviderKeys; + + public UnifiedSearchProcessor() { + mapper = new ObjectMapper(); + this.useOrderedSearchProviderKeys = false; + } + + public boolean isUseOrderedSearchProviderKeys() { + return useOrderedSearchProviderKeys; + } + + public void setUseOrderedSearchProviderKeys(boolean useOrderedSearchProviderKeys) { + this.useOrderedSearchProviderKeys = useOrderedSearchProviderKeys; + } + + public void search(Exchange exchange) { + + Object xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + if (xTransactionId == null) { + xTransactionId = NodeUtils.getRandomTxnId(); + } + + Object partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + partnerName = "Browser"; + } + + Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class); + + /* + * Disables automatic Apache Camel Restlet component logging which prints out an undesirable log + * entry which includes client (e.g. browser) information + */ + request.setLoggable(false); + + ClientInfo clientInfo = request.getClientInfo(); + MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName, + clientInfo.getAddress() + ":" + clientInfo.getPort()); + + SearchResponse searchResponse = new SearchResponse(); + long processTime = System.currentTimeMillis(); + int totalAdded = 0; + + try { + String payload = exchange.getIn().getBody(String.class); + + if (payload == null || payload.isEmpty()) { + + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, "Request Payload is empty"); + + /* + * Don't throw back an error, just return an empty set + */ + + } else { + + QuerySearchEntity searchRequest = mapper.readValue(payload, QuerySearchEntity.class); + int maxResultsPerSearch = Integer.valueOf(searchRequest.getMaxResults()); + + Map> searchProviderSuggestions = + new HashMap>(); + + int totalSuggestionsFromProviders = 0; + List suggestions = null; + for (SearchProvider searchProvider : searchProviderRegistry.getSearchProviders()) { + suggestions = searchProvider.search(searchRequest); + totalSuggestionsFromProviders += suggestions.size(); + searchProviderSuggestions.put(searchProvider.getClass().getCanonicalName(), suggestions); + } + + /* + * Using ordered search provider keys allows us to deterministically calculate how many + * results from each provider should be returned. At the moment, this behavior is primarily + * only beneficial to test classes. As there is a cost to sorted-collections in the call + * processing path, this behavior has been made optional. + */ + + if (useOrderedSearchProviderKeys) { + searchProviderSuggestions = + new TreeMap>(searchProviderSuggestions); + } + + if (totalSuggestionsFromProviders > 0) { + + int suggestionIndex = 0; + + Set>> searchProviderResults = + searchProviderSuggestions.entrySet(); + + while (totalAdded < maxResultsPerSearch && (totalAdded < totalSuggestionsFromProviders)) { + + for (Entry> searchProviderResultList : searchProviderResults) { + + if ((suggestionIndex <= (searchProviderResultList.getValue().size() - 1))) { + + if (totalAdded < maxResultsPerSearch) { + searchResponse + .addSuggestion(searchProviderResultList.getValue().get(suggestionIndex)); + totalAdded++; + } + } + + } + + suggestionIndex++; + + } + + } + + } + + searchResponse.addToTotalFound(totalAdded); + String searchResponseJson = NodeUtils.convertObjectToJson(searchResponse, true); + + processTime = System.currentTimeMillis() - processTime; + searchResponse.setProcessingTimeInMs(processTime); + + Response response = + exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class); + response.setStatus(Status.SUCCESS_OK); + response.setEntity(searchResponseJson, MediaType.APPLICATION_JSON); + exchange.getOut().setBody(response); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, + "Query search failed with error = " + exc.getMessage()); + exchange.getOut().setBody( + generateJsonErrorResponse("Error while building response. Error = " + exc.getMessage()), + String.class); + } + } + + public SearchProviderRegistry getSearchProviderRegistry() { + return searchProviderRegistry; + } + + public void setSearchProviderRegistry(SearchProviderRegistry searchProviderRegistry) { + this.searchProviderRegistry = searchProviderRegistry; + } + + + /* + * This is the manual approach, however we could also create an object container for the error + * then use the Jackson ObjectWrite to dump the object to json instead. If it gets any more + * complicated we could do that approach so we don't have to manually trip over the JSON + * formatting. + */ + protected String generateJsonErrorResponse(String message) { + return String.format("{ \"errorMessage\" : %s }", message); + } + +} diff --git a/src/main/java/org/onap/aai/sparky/search/VnfSearchQueryBuilder.java b/src/main/java/org/onap/aai/sparky/search/VnfSearchQueryBuilder.java deleted file mode 100644 index 9e206b3..0000000 --- a/src/main/java/org/onap/aai/sparky/search/VnfSearchQueryBuilder.java +++ /dev/null @@ -1,174 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.search; - -import java.util.Date; -import java.util.Map; - -import javax.json.Json; -import javax.json.JsonArray; -import javax.json.JsonArrayBuilder; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; - -/** - * Build a JSON payload to send to elastic search to get vnf search data. - */ - -public class VnfSearchQueryBuilder { - - /** - * Creates the suggestions query. - * - * @param maxResults maximum number of suggestions to fetch - * @param queryStr query string - * @return the json object - */ - - /* - * { "vnfs" : { "text" : "VNFs", "completion" : { "field" : "entity_suggest", "size": 1 } } } - */ - public static JsonObject createSuggestionsQuery(String maxResults, String queryStr) { - JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); - - JsonObjectBuilder completionBlob = Json.createObjectBuilder(); - completionBlob.add("field", "entity_suggest"); - completionBlob.add("size", maxResults); - - JsonObjectBuilder jsonAllBuilder = Json.createObjectBuilder(); - jsonAllBuilder.add("text", queryStr); - jsonAllBuilder.add("completion", completionBlob); - - jsonBuilder.add("vnfs", jsonAllBuilder.build()); - return jsonBuilder.build(); - } - - public static JsonObject getTermBlob(String key, String value) { - JsonObjectBuilder termBlobBuilder = Json.createObjectBuilder(); - JsonObjectBuilder jsonBuilder = Json.createObjectBuilder().add(key, value); - return termBlobBuilder.add("term", jsonBuilder.build()).build(); - } - - public static void getSummaryAggsBlob(JsonObjectBuilder aggsBlobBuilder, String aggsKey, - int resultSize) { - JsonObjectBuilder fieldBuilder = - Json.createObjectBuilder().add("field", aggsKey).add("size", resultSize); - JsonObject aggsFieldBlob = fieldBuilder.build(); - JsonObjectBuilder defaultBlobBuilder = Json.createObjectBuilder().add("terms", aggsFieldBlob); - JsonObject defaultBlob = defaultBlobBuilder.build(); - aggsBlobBuilder.add("default", defaultBlob); - } - - public static void buildSingleTermCountQuery(JsonObjectBuilder jsonBuilder, String key, - String value) { - jsonBuilder.add("query", getTermBlob(key, value)); - } - - public static void buildSingleTermSummaryQuery(JsonObjectBuilder jsonBuilder, String key, - String value, String groupByKey) { - JsonObjectBuilder queryBlobBuilder = Json.createObjectBuilder(); - JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); - - queryBlobBuilder.add("constant_score", - Json.createObjectBuilder().add("filter", getTermBlob(key, value))); - - getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); - - jsonBuilder.add("query", queryBlobBuilder.build()); - jsonBuilder.add("aggs", aggsBlobBuilder.build()); - } - - public static void buildMultiTermSummaryQuery(JsonObjectBuilder jsonBuilder, - Map attributes, String groupByKey) { - JsonObjectBuilder queryBlobBuilder = Json.createObjectBuilder(); - JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); - JsonArrayBuilder mustBlobBuilder = Json.createArrayBuilder(); - for (String key : attributes.keySet()) { - mustBlobBuilder.add(getTermBlob(key, attributes.get(key))); - } - JsonArray mustBlob = mustBlobBuilder.build(); - - queryBlobBuilder.add("constant_score", Json.createObjectBuilder().add("filter", - Json.createObjectBuilder().add("bool", Json.createObjectBuilder().add("must", mustBlob)))); - - getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); - - jsonBuilder.add("query", queryBlobBuilder.build()); - jsonBuilder.add("aggs", aggsBlobBuilder.build()); - } - - public static void buildZeroTermSummaryQuery(JsonObjectBuilder jsonBuilder, String groupByKey) { - JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); - - getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); - - jsonBuilder.add("aggs", aggsBlobBuilder.build()); - } - - public static void buildMultiTermCountQuery(JsonObjectBuilder jsonBuilder, - Map attributes) { - JsonArrayBuilder mustBlobBuilder = Json.createArrayBuilder(); - for (String key : attributes.keySet()) { - mustBlobBuilder.add(getTermBlob(key, attributes.get(key))); - } - jsonBuilder.add("query", Json.createObjectBuilder().add("bool", - Json.createObjectBuilder().add("must", mustBlobBuilder))); - } - - - - public static JsonObject createSummaryByEntityTypeQuery(Map attributes, - String groupByKey) { - JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); - jsonBuilder.add("size", "0"); // avoid source data - if (attributes.size() == 0) { - buildZeroTermSummaryQuery(jsonBuilder, groupByKey); - } else if (attributes.size() == 1) { - Map.Entry entry = attributes.entrySet().iterator().next(); - buildSingleTermSummaryQuery(jsonBuilder, entry.getKey(), entry.getValue(), groupByKey); - } else { - buildMultiTermSummaryQuery(jsonBuilder, attributes, groupByKey); - } - return jsonBuilder.build(); - } - - public static JsonObject createEntityCountsQuery(Map attributes) { - JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); - if (attributes.size() == 1) { - Map.Entry entry = attributes.entrySet().iterator().next(); - buildSingleTermCountQuery(jsonBuilder, entry.getKey(), entry.getValue()); - } else { - buildMultiTermCountQuery(jsonBuilder, attributes); - } - return jsonBuilder.build(); - } - - public static JsonArray getSortCriteria(String sortFieldName, String sortOrder) { - JsonArrayBuilder jsonBuilder = Json.createArrayBuilder(); - jsonBuilder.add(Json.createObjectBuilder().add(sortFieldName, - Json.createObjectBuilder().add("order", sortOrder))); - - return jsonBuilder.build(); - } - -} diff --git a/src/main/java/org/onap/aai/sparky/search/VnfSearchService.java b/src/main/java/org/onap/aai/sparky/search/VnfSearchService.java deleted file mode 100644 index 654aad0..0000000 --- a/src/main/java/org/onap/aai/sparky/search/VnfSearchService.java +++ /dev/null @@ -1,348 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.search; - -import java.io.IOException; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Map; - -import javax.json.Json; -import javax.json.JsonObject; -import javax.json.JsonObjectBuilder; -import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.core.MediaType; - -import org.json.JSONArray; -import org.json.JSONObject; -import org.onap.aai.sparky.dal.elasticsearch.HashQueryResponse; -import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; -import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.suggestivesearch.SuggestionEntity; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; -import org.onap.aai.sparky.viewandinspect.entity.QuerySearchEntity; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; - - -/** - * From the given HTTP request, create vnf-search query for document store, and process document - * store response. - */ - -public class VnfSearchService { - - private static final String APP_JSON = MediaType.APPLICATION_JSON; - - private static ElasticSearchConfig esConfig = null; - - private static final Logger LOG = LoggerFactory.getInstance().getLogger(VnfSearchService.class); - - private static SearchAdapter search = null; - private static final String ES_SUGGEST_API = TierSupportUiConstants.ES_SUGGEST_API; - private static final String ES_COUNT_API = TierSupportUiConstants.ES_COUNT_API; - private static final String ES_SEARCH_API = TierSupportUiConstants.ES_SEARCH_API; - - private static final String ENTITY_TYPE = "generic-vnf"; - - /** - * Get Full URL for search using elastic search configuration. - * - * @param api the api - * @return the full url - */ - private static String getFullUrl(String indexName, String api) { - - final String host = esConfig.getIpAddress(); - final String port = esConfig.getHttpPort(); - return String.format("http://%s:%s/%s/%s", host, port, indexName, api); - } - - /** - * Process operation result. - * - * @param api the api - * @param response the response - * @param opResult the op result - * @throws IOException Signals that an I/O exception has occurred. - */ - private static void buildVnfQuerySearchResponse(String apiKey, HttpServletResponse response, - OperationResult opResult) throws IOException { - int resonseCode = opResult.getResultCode(); - String result = opResult.getResult(); - - if (resonseCode > 300) { - setServletResponse(true, resonseCode, response, result); - return; - } - - if (result != null) { - JSONObject finalOutputToFe = new JSONObject(); - JSONObject responseJson = new JSONObject(result); - - if (apiKey.equalsIgnoreCase(ES_SUGGEST_API)) { // process suggestion results - try { - String suggestionsKey = "vnfs"; - int total = 0; - JSONArray suggestionsArray = new JSONArray(); - JSONArray suggestions = responseJson.getJSONArray(suggestionsKey); - if (suggestions.length() > 0) { - suggestionsArray = suggestions.getJSONObject(0).getJSONArray("options"); - for (int i = 0; i < suggestionsArray.length(); i++) { - suggestionsArray.getJSONObject(i).remove("score"); // FE doesn't like this noise: - // 'score' - } - - total = suggestionsArray.length(); - } - finalOutputToFe.put("totalFound", total); - finalOutputToFe.put("suggestions", suggestionsArray); - } catch (Exception e) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "Error parsing response from suggestions index. Response: " + result); - } - } else if (apiKey.equalsIgnoreCase(ES_COUNT_API)) { - try { - String shardsKey = "_shards"; - responseJson.remove(shardsKey); - finalOutputToFe = responseJson; - } catch (Exception e) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "Error fetching total count response from aggregation index. Response: " + result); - } - } else if (apiKey.equalsIgnoreCase(ES_SEARCH_API)) { - try { - JSONArray bucketsArray = (responseJson.getJSONObject("aggregations") - .getJSONObject("default").getJSONArray("buckets")); - int count = 0; - for (int i = 0; i < bucketsArray.length(); i++) { - count += bucketsArray.getJSONObject(i).getInt("doc_count"); - } - JSONObject content = new JSONObject(); - content.put("totalChartHits", count); - content.put("buckets", bucketsArray); - finalOutputToFe.put("groupby_aggregation", content); - } catch (Exception e) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "Error fetching group-by query response from aggregation index. Response: " + result); - } - } - - setServletResponse(false, resonseCode, response, finalOutputToFe.toString()); - } - } - - /** - * Sets the servlet response. - * - * @param isError the is error - * @param responseCode the response code - * @param response the response - * @param postPayload the post payload - * @throws IOException Signals that an I/O exception has occurred. - */ - public static void setServletResponse(boolean isError, int responseCode, - HttpServletResponse response, String postPayload) throws IOException { - - if (isError) { - LOG.error(AaiUiMsgs.ERROR_PARSING_JSON_PAYLOAD_VERBOSE, postPayload); - } - - response.setStatus(responseCode); - - if (postPayload != null) { - response.setContentType(APP_JSON); - PrintWriter out = response.getWriter(); - out.println(postPayload); - out.close(); - } - } - - /** - * Instantiates a new vnf search service. - */ - public VnfSearchService() { - try { - if (esConfig == null) { - esConfig = ElasticSearchConfig.getConfig(); - } - - if (search == null) { - search = new SearchAdapter(); - } - } catch (Exception exc) { - LOG.error(AaiUiMsgs.CONFIGURATION_ERROR, "Search"); - } - } - - - /** - * Gets the suggestions results. - * - * @param response the response - * @param maxResults maximum number of suggestions - * @param queryStr query string - * @return the suggestions results - * @throws IOException Signals that an I/O exception has occurred. - */ - public List getSuggestionsResults(QuerySearchEntity querySearchEntity, - int resultCountLimit) throws IOException { - List returnList = new ArrayList(); - - /* Create suggestions query */ - JsonObject vnfSearch = VnfSearchQueryBuilder - .createSuggestionsQuery(String.valueOf(resultCountLimit), querySearchEntity.getQueryStr()); - - /* Parse suggestions response */ - OperationResult opResult = - search.doPost(getFullUrl(esConfig.getAutosuggestIndexname(), ES_SUGGEST_API), - vnfSearch.toString(), APP_JSON); - - String result = opResult.getResult(); - - if (!opResult.wasSuccessful()) { - LOG.error(AaiUiMsgs.ERROR_PARSING_JSON_PAYLOAD_VERBOSE, result); - return returnList; - } - - JSONObject responseJson = new JSONObject(result); - String suggestionsKey = "vnfs"; - JSONArray suggestionsArray = new JSONArray(); - JSONArray suggestions = responseJson.getJSONArray(suggestionsKey); - if (suggestions.length() > 0) { - suggestionsArray = suggestions.getJSONObject(0).getJSONArray("options"); - for (int i = 0; i < suggestionsArray.length(); i++) { - JSONObject querySuggestion = suggestionsArray.getJSONObject(i); - if (querySuggestion != null) { - SuggestionEntity responseSuggestion = new SuggestionEntity(); - responseSuggestion.setText(querySuggestion.getString("text")); - responseSuggestion.setRoute("vnfSearch"); // TODO -> Read route from - // suggestive-search.properties instead of hard - // coding - responseSuggestion - .setHashId(NodeUtils.generateUniqueShaDigest(querySuggestion.getString("text"))); - returnList.add(responseSuggestion); - } - } - } - return returnList; - } - - - /** - * This method sets server response if lookup in ES has 0 count TODO: Change the response code to - * appropriate when FE-BE contract is finalized - * - * @param response - */ - public void setZeroCountResponse(HttpServletResponse response) throws IOException { - JSONObject payload = new JSONObject(); - payload.put("count", 0); - setServletResponse(false, 200, response, payload.toString()); - } - - /** - * This method sets server response if lookup in ES for an aggregation has 0 results TODO: Change - * the response code to appropriate when FE-BE contract is finalized - * - * @param response - */ - public void setEmptyAggResponse(HttpServletResponse response) throws IOException { - JSONObject aggPayload = new JSONObject(); - aggPayload.put("totalChartHits", 0); - aggPayload.put("buckets", new JSONArray()); - JSONObject payload = new JSONObject(); - payload.append("groupby_aggregation", aggPayload); - setServletResponse(false, 200, response, payload.toString()); - } - - public HashQueryResponse getJSONPayloadFromHash(String hashId) { - - HashQueryResponse hashQueryResponse = new HashQueryResponse(); - JsonObjectBuilder hashSearch = Json.createObjectBuilder(); - VnfSearchQueryBuilder.buildSingleTermCountQuery(hashSearch, "_id", hashId); - String hashSearchQuery = hashSearch.build().toString(); - OperationResult opResult = search.doPost( - getFullUrl(esConfig.getAutosuggestIndexname(), ES_SEARCH_API), hashSearchQuery, APP_JSON); - hashQueryResponse.setOpResult(opResult); - - if (opResult != null && opResult.wasSuccessful()) { - String result = opResult.getResult(); - if (result != null) { - JSONObject responseJson = new JSONObject(result); - JSONArray hits = responseJson.getJSONObject("hits").getJSONArray("hits"); - if (hits != null && hits.length() > 0) { - hashQueryResponse.setJsonPayload(hits.getJSONObject(0).getJSONObject("_source") - .getJSONObject("entity_suggest").toString()); - } - } - } - return hashQueryResponse; - } - - public void getEntityCountResults(HttpServletResponse response, Map attributes) - throws IOException { - // Create entity counts query - JsonObject vnfSearch = VnfSearchQueryBuilder.createEntityCountsQuery(attributes); - - // Parse response for entity counts query - OperationResult opResult = search.doPost( - getFullUrl(TierSupportUiConstants.getAggregationIndexName(ENTITY_TYPE), ES_COUNT_API), - vnfSearch.toString(), APP_JSON); - buildVnfQuerySearchResponse(ES_COUNT_API, response, opResult); - } - - public void getSummaryByEntityType(HttpServletResponse response, Map attributes, - String groupByKey) throws IOException { - // Create query for summary by entity type - JsonObject vnfSearch = - VnfSearchQueryBuilder.createSummaryByEntityTypeQuery(attributes, groupByKey); - - // Parse response for summary by entity type query - OperationResult opResult = search.doPost( - getFullUrl(TierSupportUiConstants.getAggregationIndexName(ENTITY_TYPE), ES_SEARCH_API), - vnfSearch.toString(), APP_JSON); - buildVnfQuerySearchResponse(ES_SEARCH_API, response, opResult); - } - - public SearchAdapter getSearch() { - return search; - } - - public void setSearch(SearchAdapter search) { - VnfSearchService.search = search; - } - - public static ElasticSearchConfig getEsConfig() { - return esConfig; - } - - public static void setEsConfig(ElasticSearchConfig esConfig) { - VnfSearchService.esConfig = esConfig; - } -} diff --git a/src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java b/src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java new file mode 100644 index 0000000..e593c3e --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java @@ -0,0 +1,34 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.api; + +import java.util.List; + +import org.onap.aai.sparky.search.entity.QuerySearchEntity; +import org.onap.aai.sparky.search.entity.SearchSuggestion; + +public interface SearchProvider { + + List search(QuerySearchEntity queryRequest); + +} diff --git a/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java b/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java index 5ce4d3c..9208354 100644 --- a/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java +++ b/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java @@ -54,7 +54,7 @@ public class SuggestionConfig { private String defaultPairingValue; - private SuggestionConfig() {} + public SuggestionConfig() {} /** * Returns initialized instance as per singleton pattern. @@ -69,6 +69,10 @@ public class SuggestionConfig { return config; } + public static void setConfig(SuggestionConfig config) { + SuggestionConfig.config = config; + } + public void initializeConfigProperties() { Properties props = ConfigHelper.loadConfigFromExplicitPath(CONFIG_FILE); diff --git a/src/main/java/org/onap/aai/sparky/search/entity/ExternalSearchRequestEntity.java b/src/main/java/org/onap/aai/sparky/search/entity/ExternalSearchRequestEntity.java new file mode 100644 index 0000000..465eadc --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/search/entity/ExternalSearchRequestEntity.java @@ -0,0 +1,69 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.entity; + +public class ExternalSearchRequestEntity { + private String view; + private String entityId; + private String entityType; + + public ExternalSearchRequestEntity() { + this.view = ""; + this.entityId = ""; + this.entityType = ""; + } + + public String getView() { + return view; + } + + public void setView(String view) { + this.view = view; + } + + public String getEntityId() { + return entityId; + } + + public void setEntityId(String entityId) { + this.entityId = entityId; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String createQueryString() { + return entityId + " " + entityType; + } + + @Override + public String toString() { + return "ExternalRequestEntitySearchEntity [view=" + view + ", entityId=" + entityId + + ", entityType=" + entityType + "]"; + } +} diff --git a/src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java b/src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java new file mode 100644 index 0000000..d90e329 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java @@ -0,0 +1,71 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.entity; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +/** + * The Class ViewAndInspectSearchRequest. + */ +public class QuerySearchEntity { + + private static final String DEFAULT_MAX_RESULTS = "10"; + public String maxResults; + public String queryStr; + + /** + * Instantiates a new view and inspect search request. + */ + public QuerySearchEntity() { + maxResults = DEFAULT_MAX_RESULTS; + queryStr = null; + } + + public String getMaxResults() { + return maxResults; + } + + public void setMaxResults(String maxResults) { + this.maxResults = maxResults; + } + + public String getQueryStr() { + return queryStr; + } + + public void setQueryStr(String queryStr) { + this.queryStr = queryStr; + } + + @JsonIgnore + public String[] getSearchTerms() { + + if (queryStr == null) { + return null; + } + + return queryStr.split(" "); + + } + +} diff --git a/src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java b/src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java new file mode 100644 index 0000000..823cf5a --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java @@ -0,0 +1,37 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.entity; + +public interface SearchSuggestion { + public String getHashId(); + + public void setHashId(String hashId); + + public String getRoute(); + + public void setRoute(String route); + + public String getText(); + + public void setText(String searchText); +} diff --git a/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java b/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java index a846e88..5f5dc74 100644 --- a/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java +++ b/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java @@ -34,6 +34,7 @@ import org.onap.aai.cl.eelf.LoggerFactory; import org.onap.aai.restclient.client.OperationResult; import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.onap.aai.sparky.dataintegrity.config.DiUiConstants; import org.onap.aai.sparky.logging.AaiUiMsgs; import org.onap.aai.sparky.search.filters.config.UiFilterDataSourceConfig; import org.onap.aai.sparky.search.filters.entity.UiFilterEntity; @@ -42,13 +43,15 @@ import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; /** * Performs all Elasticsearch related queries for filters related to the Sparky-FE. + * + * @author RICHARV */ public class FilterElasticSearchAdapter { private static ElasticSearchConfig esConfig = null; private static SearchAdapter search = null; private static final String ES_SEARCH_API = TierSupportUiConstants.ES_SEARCH_API; - private static final String APP_JSON = "application/json"; + private static final String APP_JSON = DiUiConstants.APP_JSON; private static final Logger LOG = LoggerFactory.getInstance().getLogger(FilterElasticSearchAdapter.class); private static final String AGGS = "aggregations"; @@ -104,7 +107,7 @@ public class FilterElasticSearchAdapter { FilterQueryBuilder.createFilterValueQueryObject(dataSourceConfig.getFieldName()); } - org.onap.aai.sparky.dal.rest.OperationResult opResult = + OperationResult opResult = search.doPost(getFullUrl(dataSourceConfig.getIndexName(), ES_SEARCH_API), filterValueQuery.toString(), APP_JSON); diff --git a/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java b/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java index fdcf6b2..b22db96 100644 --- a/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java +++ b/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java @@ -32,7 +32,6 @@ import org.apache.camel.component.restlet.RestletConstants; import org.onap.aai.cl.api.Logger; import org.onap.aai.cl.eelf.LoggerFactory; import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.search.filters.FilteredSearchHelper; import org.onap.aai.sparky.search.filters.entity.UiFilterEntity; import org.onap.aai.sparky.search.filters.entity.UiFiltersEntity; import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; diff --git a/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java b/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java index 3853913..b202684 100644 --- a/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java +++ b/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java @@ -27,7 +27,7 @@ import java.io.File; import org.onap.aai.cl.api.Logger; import org.onap.aai.cl.eelf.LoggerFactory; import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; import com.fasterxml.jackson.databind.ObjectMapper; diff --git a/src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java b/src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java new file mode 100644 index 0000000..d3cca45 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java @@ -0,0 +1,74 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.registry; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.search.api.SearchProvider; + +/** + * Make this a java-scoped singleton to resolve the contextual issue spanning a Spring Context and + * accessing the SPR in other parts of the code that are not directly instantiated by a Spring Bean. + * Eventually the SPR doesn’t have to be a real singleton, it could simply be a Spring bean scoped + * as a singleton and then wired in via dependency injection to the classes that need it. But I’m + * not there yet. This will get a demonstrable extension mechanism in place quickly at practically + * no cost, beyond what’s already in the email plus some testing. + */ + +public class SearchProviderRegistry { + + private List searchProviders; + + public SearchProviderRegistry() { + searchProviders = new ArrayList(); + } + + public List getSearchProviders() { + return searchProviders; + } + + public final void addSearchProvider(SearchProvider searchProvider) { + + if (searchProvider == null) { + return; + } + + if (!searchProviders.contains(searchProvider)) { + searchProviders.add(searchProvider); + } + } + + public final void addSearchProviders(List searchProviders) { + + if (searchProviders == null) { + return; + } + + for (SearchProvider searchProvider : searchProviders) { + addSearchProvider(searchProvider); + } + + } + +} diff --git a/src/main/java/org/onap/aai/sparky/security/EcompSso.java b/src/main/java/org/onap/aai/sparky/security/EcompSso.java index 16e01c0..de74a5a 100644 --- a/src/main/java/org/onap/aai/sparky/security/EcompSso.java +++ b/src/main/java/org/onap/aai/sparky/security/EcompSso.java @@ -25,13 +25,12 @@ package org.onap.aai.sparky.security; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.security.portal.config.PortalAuthenticationConfig; import org.onap.aai.cl.api.Logger; import org.onap.aai.cl.eelf.LoggerFactory; -import org.openecomp.portalsdk.core.onboarding.util.PortalApiProperties; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.security.portal.config.PortalAuthenticationConfig; import org.openecomp.portalsdk.core.onboarding.util.CipherUtil; - +import org.openecomp.portalsdk.core.onboarding.util.PortalApiProperties; /** * Provides authentication services for onboarded ECOMP applications. @@ -80,7 +79,7 @@ public class EcompSso { * then searches for a CSP cookie; if not found, for a WebJunction header. * * @param request - * @return User ID if the ECOMP cookie is present and the sign-on process established an User ID; + * @return ATT UID if the ECOMP cookie is present and the sign-on process established an ATT UID; * else null. */ public static String validateEcompSso(HttpServletRequest request) { @@ -100,23 +99,23 @@ public class EcompSso { } /** - * Searches the specified request for the CSP cookie, decodes it and gets the User ID. + * Searches the specified request for the CSP cookie, decodes it and gets the ATT UID. * * @param request - * @return User ID if the cookie is present in the request and can be decoded successfully - * (expired cookies do not decode); else null. + * @return ATTUID if the cookie is present in the request and can be decoded successfully (expired + * cookies do not decode); else null. */ private static String getLoginIdFromCookie(HttpServletRequest request) { - String userid = null; + String attuid = null; try { String[] cspFields = getCspData(request); if (cspFields != null && cspFields.length > 5) - userid = cspFields[5]; + attuid = cspFields[5]; } catch (Throwable t) { LOG.info(AaiUiMsgs.LOGIN_FILTER_INFO, "getLoginIdFromCookie failed " + t.getLocalizedMessage()); } - return userid; + return attuid; } /** diff --git a/src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java b/src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java new file mode 100644 index 0000000..51e77bb --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java @@ -0,0 +1,274 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security.filter; + +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; + +// import esGateKeeper.esGateKeeper; + +/** + * Redirects to the AT&T global login page if the user is not authenticated.
    + * Filter properties need to be configured in: csp-cookie-filter.properties + */ +public class CspCookieFilter implements Filter { + + /** Redirect URL for the login page. */ + private String globalLoginUrl; + + /** Application identifier. */ + private String applicationId; + + /** Gatekeeper environment setting (development or production). */ + private String gateKeeperEnvironment; + + private static final String FILTER_PARAMETER_CONFIG = "config"; + private static final String PROPERTY_GLOBAL_LOGIN_URL = "global.login.url"; + private static final String PROPERTY_APPLICATION_ID = "application.id"; + private static final String PROPERTY_GATEKEEPER_ENVIRONMENT = "gatekeeper.environment"; + // valid open redirect domains + private List redirectDomains = new ArrayList<>(); + private static final String PROPERTY_REDIRECT_DOMAINS = "redirect-domain"; + + /** Needed by esGateKeeper, does not accept any other value. */ + private static final String GATEKEEPER_ACCOUNT_NAME = "CSP"; + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(CspCookieFilter.class); + + + /* + * (non-Javadoc) + * + * @see javax.servlet.Filter#init(javax.servlet.FilterConfig) + */ + @Override + public void init(FilterConfig filterConfig) throws ServletException { + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "CspCookieFilter", "", "Init", ""); + + try { + setConfigurationProperties(filterConfig); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ERROR_CSP_CONFIG_FILE); + throw new ServletException(exc); + } + } + + + /* + * (non-Javadoc) + * + * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, + * javax.servlet.FilterChain) + */ + @Override + public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) + throws IOException, ServletException { + HttpServletRequest request = (HttpServletRequest) req; + HttpServletResponse response = (HttpServletResponse) res; + + Cookie[] cookies = request.getCookies(); + if ((cookies == null) || (cookies.length == 0)) { + doLogin(request, response); + return; + } + + /* + * String attEsSec = getSecurityCookie(cookies); + * + * if (attESSec == null || attESSec.length() == 0) { doLogin(request, response); return; } + * + * String attESSecUnEncrypted = esGateKeeper.esGateKeeper(attESSec, GATEKEEPER_ACCOUNT_NAME, + * gateKeeperEnvironment); if (attESSecUnEncrypted == null) { doLogin(request, response); } else + * { + */ + // LOG.info("User has valid cookie"); + chain.doFilter(request, response); + // } + } + + + /* + * (non-Javadoc) + * + * @see javax.servlet.Filter#destroy() + */ + @Override + public void destroy() {} + + /** + * Sets all required properties needed by this filter. + * + * @param filterConfig the filter configuration defined in the application web.xml + * @throws IOException if the properties failed to load. + */ + private void setConfigurationProperties(FilterConfig filterConfig) throws IOException { + InputStream inputStream = new FileInputStream(TierSupportUiConstants.STATIC_CONFIG_APP_LOCATION + + filterConfig.getInitParameter(FILTER_PARAMETER_CONFIG)); + Properties cspProperties = new Properties(); + cspProperties.load(inputStream); + globalLoginUrl = cspProperties.getProperty(PROPERTY_GLOBAL_LOGIN_URL); + applicationId = cspProperties.getProperty(PROPERTY_APPLICATION_ID); + gateKeeperEnvironment = cspProperties.getProperty(PROPERTY_GATEKEEPER_ENVIRONMENT); + redirectDomains = + Arrays.asList(cspProperties.getProperty(PROPERTY_REDIRECT_DOMAINS).split(",")); + } + + /** + * Returns the attESSec cookie if found in the client. + * + * @param cookies the cookies available in the client + * @return the attESSec authentication cookie generated by the login page. + */ + private String getSecurityCookie(Cookie[] cookies) { + String attEsSec = null; + for (int i = 0; i < cookies.length; i++) { + Cookie thisCookie = cookies[i]; + String cookieName = thisCookie.getName(); + + if ("attESSec".equals(cookieName)) { + attEsSec = thisCookie.getValue(); + break; + } + } + return attEsSec; + } + + /** + * Redirects to the AT&T global login page. If this is an AJAX request it returns an unauthorized + * HTTP error in the response. + * + * @param request the filter request object + * @param response the filter response object + * @throws IOException if there is an error setting the error response + */ + private void doLogin(HttpServletRequest request, HttpServletResponse response) + throws IOException { + if (isAjaxRequest(request)) { + response.sendError(HttpServletResponse.SC_UNAUTHORIZED, + "User is not authorized. Please login to application"); + } else { + // Fix for Safari 7.0.2 onwards to avoid login page cache + response.addHeader("Cache-Control", "no-cache, no-store"); + String redirectURL = createRedirectUrl(request); + if (this.isValidRedirectURL(redirectURL)) { + response.sendRedirect(redirectURL); + LOG.debug(AaiUiMsgs.VALID_REDIRECT_URL, redirectURL); + } else { + response.sendError(400, "Bad redirect URL: " + redirectURL); + LOG.error(AaiUiMsgs.INVALID_REDIRECT_URL, redirectURL); + } + } + } + + /** + * Checks if a redirect url is valid + * + * @param url URL to validate + * @return true if URL is a valid redirect URL, false otherwise + */ + private boolean isValidRedirectURL(String url) { + String redirectTo = url.substring(url.indexOf("?retURL=") + "?retURL=".length()); + try { + redirectTo = URLDecoder.decode(redirectTo, StandardCharsets.UTF_8.toString()); + } catch (UnsupportedEncodingException e) { + LOG.error(AaiUiMsgs.UNSUPPORTED_URL_ENCODING, e.getLocalizedMessage()); + return false; + } + for (String domain : this.redirectDomains) { + if (redirectTo.endsWith(domain)) + return true; + } + return false; + } + + + /** + * Returns true if the request is an AJAX request. + * + * @param request the filter request object + * @return true if the request is an AJAX request. + */ + private boolean isAjaxRequest(HttpServletRequest request) { + String headerValue = request.getHeader("X-Requested-With"); + if ("XMLHttpRequest".equals(headerValue)) { + return true; + } + return false; + } + + /** + * Returns the redirection URL to the AT&T Global login page. + * + * @param request the request + * @return the string + * @throws UnsupportedEncodingException the unsupported encoding exception + */ + private String createRedirectUrl(HttpServletRequest request) throws UnsupportedEncodingException { + String returnUrl = getReturnUrl(request); + + return globalLoginUrl + "?retURL=" + returnUrl + "&sysName=" + applicationId; + } + + /** + * Gets the URL encoded return URL. + * + * @param request the HTTP request + * @return an encoded URL to return to following login + * @throws UnsupportedEncodingException the unsupported encoding exception + */ + private String getReturnUrl(HttpServletRequest request) throws UnsupportedEncodingException { + StringBuffer retUrl = request.getRequestURL(); + String urlParams = request.getQueryString(); + if (urlParams != null) { + retUrl.append("?" + urlParams); + } + return URLEncoder.encode(retUrl.toString(), StandardCharsets.UTF_8.toString()); + } +} diff --git a/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java b/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java index 445cfba..2ec6b47 100644 --- a/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java +++ b/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java @@ -36,11 +36,11 @@ import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.ws.rs.core.HttpHeaders; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; import org.onap.aai.sparky.logging.AaiUiMsgs; import org.onap.aai.sparky.security.EcompSso; import org.onap.aai.sparky.security.portal.config.PortalAuthenticationConfig; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; import org.openecomp.portalsdk.core.onboarding.listener.PortalTimeoutHandler; import org.openecomp.portalsdk.core.onboarding.util.PortalApiConstants; import org.openecomp.portalsdk.core.onboarding.util.PortalApiProperties; @@ -126,10 +126,17 @@ public class LoginFilter implements Filter { // All other requests require ECOMP Portal authentication if (EcompSso.validateEcompSso(request) == null) { String redirectURL, logMessage; - - // Redirect to Portal UI - redirectURL = PortalApiProperties.getProperty(PortalApiConstants.ECOMP_REDIRECT_URL); - logMessage = "Unauthorized login attempt."; + if (request.getRequestURI().contains("/editAttributes")) { + // If request is for Edit Attributes UI, redirect straight to the application. + String appPath = request.getRequestURI().substring(request.getContextPath().length() + 1) + + (request.getQueryString() != null ? ("?" + request.getQueryString()) : ""); + redirectURL = SSOUtil.getECOMPSSORedirectURL(request, response, appPath); + logMessage = "Unauthenticated Edit Attributes UI login attempt."; + } else { + // Redirect to Portal UI + redirectURL = PortalApiProperties.getProperty(PortalApiConstants.ECOMP_REDIRECT_URL); + logMessage = "Unauthorized login attempt."; + } LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, logMessage + " | Remote IP: " + request.getRemoteAddr() + " | User agent: " diff --git a/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java b/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java index 050d558..d3ffac3 100644 --- a/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java +++ b/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java @@ -48,34 +48,6 @@ public class PortalRestAPIServiceImpl implements IPortalRestAPIService { private static final Logger LOG = LoggerFactory.getLogger(PortalRestAPIServiceImpl.class); private static final String ERROR_MESSAGE = "Failed to {0} user [loginId:{1}]"; - /** - * @return the userManager - */ - public UserManager getUserManager() { - return userManager; - } - - /** - * @param userManager the userManager to set - */ - public void setUserManager(UserManager userManager) { - this.userManager = userManager; - } - - /** - * @return the log - */ - public static Logger getLog() { - return LOG; - } - - /** - * @return the errorMessage - */ - public static String getErrorMessage() { - return ERROR_MESSAGE; - } - private UserManager userManager; /** @@ -175,8 +147,11 @@ public class PortalRestAPIServiceImpl implements IPortalRestAPIService { ///////////////////////////////////////////////////////////////////////////// // Role interface ///////////////////////////////////////////////////////////////////////////// + public List getAvailableRoles() throws PortalAPIException { + LOG.debug("Get available roles"); + return UserManager.getRoles(); + } - @Override public List getAvailableRoles(String requestedLoginId) throws PortalAPIException { LOG.debug("Get available roles"); return UserManager.getRoles(); diff --git a/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java b/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java index 6f103d0..f58fc31 100644 --- a/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java +++ b/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java @@ -28,6 +28,7 @@ import org.onap.aai.sparky.util.ConfigHelper; import org.onap.aai.sparky.util.Encryptor; import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; + /** * Provides Portal authentication configuration. */ diff --git a/src/main/java/org/onap/aai/sparky/suggestivesearch/SuggestionEntity.java b/src/main/java/org/onap/aai/sparky/suggestivesearch/SuggestionEntity.java deleted file mode 100644 index 92be827..0000000 --- a/src/main/java/org/onap/aai/sparky/suggestivesearch/SuggestionEntity.java +++ /dev/null @@ -1,61 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.suggestivesearch; - -public class SuggestionEntity { - private String route; - private String hashId; - private String text; - - public SuggestionEntity() {} - - public SuggestionEntity(String route, String hashId, String text) { - this.route = route; - this.hashId = hashId; - this.text = text; - } - - public String getRoute() { - return route; - } - - public void setRoute(String route) { - this.route = route; - } - - public String getHashId() { - return hashId; - } - - public void setHashId(String hashId) { - this.hashId = hashId; - } - - public String getText() { - return text; - } - - public void setText(String text) { - this.text = text; - } -} diff --git a/src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java b/src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java new file mode 100644 index 0000000..bf1a7ee --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java @@ -0,0 +1,564 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.util.EnumSet; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.aai.ActiveInventoryEntityStatistics; +import org.onap.aai.sparky.dal.aai.ActiveInventoryProcessingExceptionStatistics; +import org.onap.aai.sparky.dal.elasticsearch.ElasticSearchEntityStatistics; +import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.dal.rest.RestOperationalStatistics; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class AbstractEntitySynchronizer. + * + * @author davea. + */ +public abstract class AbstractEntitySynchronizer { + + protected static final int VERSION_CONFLICT_EXCEPTION_CODE = 409; + protected static final Integer RETRY_COUNT_PER_ENTITY_LIMIT = new Integer(3); + + protected final Logger logger; + protected ObjectMapper mapper; + protected long syncDurationInMs; + + /** + * The Enum StatFlag. + */ + protected enum StatFlag { + AAI_REST_STATS, AAI_ENTITY_STATS, AAI_PROCESSING_EXCEPTION_STATS, AAI_TASK_PROCESSING_STATS, ES_REST_STATS, ES_ENTITY_STATS, ES_TASK_PROCESSING_STATS + } + + protected EnumSet enabledStatFlags; + + protected ElasticSearchAdapter elasticSearchAdapter; + protected ActiveInventoryAdapter aaiAdapter; + + protected ExecutorService synchronizerExecutor; + protected ExecutorService aaiExecutor; + protected ExecutorService esExecutor; + + private RestOperationalStatistics esRestStats; + protected ElasticSearchEntityStatistics esEntityStats; + + private RestOperationalStatistics aaiRestStats; + protected ActiveInventoryEntityStatistics aaiEntityStats; + private ActiveInventoryProcessingExceptionStatistics aaiProcessingExceptionStats; + + private TaskProcessingStats aaiTaskProcessingStats; + private TaskProcessingStats esTaskProcessingStats; + + private TransactionRateMonitor aaiTransactionRateController; + private TransactionRateMonitor esTransactionRateController; + + protected AtomicInteger aaiWorkOnHand; + protected AtomicInteger esWorkOnHand; + protected String synchronizerName; + + protected abstract boolean isSyncDone(); + + protected boolean shouldSkipSync; + + public String getActiveInventoryStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) { + sb.append("\n\n ").append("REST Operational Stats:"); + sb.append(aaiRestStats.getStatisticsReport()); + } + + if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) { + sb.append("\n\n ").append("Entity Stats:"); + sb.append(aaiEntityStats.getStatisticsReport()); + } + + if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) { + sb.append("\n\n ").append("Processing Exception Stats:"); + sb.append(aaiProcessingExceptionStats.getStatisticsReport()); + } + + return sb.toString(); + + } + + public String getElasticSearchStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) { + sb.append("\n\n ").append("REST Operational Stats:"); + sb.append(esRestStats.getStatisticsReport()); + } + + if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) { + sb.append("\n\n ").append("Entity Stats:"); + sb.append(esEntityStats.getStatisticsReport()); + } + + return sb.toString(); + + } + + /** + * Adds the active inventory stat report. + * + * @param sb the sb + */ + private void addActiveInventoryStatReport(StringBuilder sb) { + + if (sb == null) { + return; + } + + sb.append("\n\n AAI"); + sb.append(getActiveInventoryStatisticsReport()); + + double currentTps = 0; + if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { + sb.append("\n\n ").append("Task Processor Stats:"); + sb.append(aaiTaskProcessingStats.getStatisticsReport(false, " ")); + + currentTps = aaiTransactionRateController.getCurrentTps(); + + sb.append("\n ").append("Current TPS: ").append(currentTps); + } + + sb.append("\n ").append("Current WOH: ").append(aaiWorkOnHand.get()); + + if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { + if (currentTps > 0) { + double numMillisecondsToCompletion = (aaiWorkOnHand.get() / currentTps) * 1000; + sb.append("\n ").append("SyncDurationRemaining=") + .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion)); + } + } + + } + + /** + * Adds the elastic stat report. + * + * @param sb the sb + */ + private void addElasticStatReport(StringBuilder sb) { + + if (sb == null) { + return; + } + + sb.append("\n\n ELASTIC"); + sb.append(getElasticSearchStatisticsReport()); + + double currentTps = 0; + + if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { + sb.append("\n\n ").append("Task Processor Stats:"); + sb.append(esTaskProcessingStats.getStatisticsReport(false, " ")); + + currentTps = esTransactionRateController.getCurrentTps(); + + sb.append("\n ").append("Current TPS: ").append(currentTps); + } + + sb.append("\n ").append("Current WOH: ").append(esWorkOnHand.get()); + + if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { + if (currentTps > 0) { + double numMillisecondsToCompletion = (esWorkOnHand.get() / currentTps) * 1000; + sb.append("\n ").append("SyncDurationRemaining=") + .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion)); + } + } + + + } + + /** + * Gets the stat report. + * + * @param syncOpTimeInMs the sync op time in ms + * @param showFinalReport the show final report + * @return the stat report + */ + protected String getStatReport(long syncOpTimeInMs, boolean showFinalReport) { + + StringBuilder sb = new StringBuilder(128); + + sb.append("\n").append(synchronizerName + " Statistics: ( Sync Operation Duration = " + + NodeUtils.getDurationBreakdown(syncOpTimeInMs) + " )"); + + addActiveInventoryStatReport(sb); + addElasticStatReport(sb); + + if (showFinalReport) { + sb.append("\n\n ").append("Sync Completed!\n"); + } else { + sb.append("\n\n ").append("Sync in Progress...\n"); + } + + return sb.toString(); + + } + + protected String indexName; + protected long syncStartedTimeStampInMs; + + /** + * Instantiates a new abstract entity synchronizer. + * + * @param logger the logger + * @param syncName the sync name + * @param numSyncWorkers the num sync workers + * @param numActiveInventoryWorkers the num active inventory workers + * @param numElasticsearchWorkers the num elasticsearch workers + * @param indexName the index name + * @throws Exception the exception + */ + protected AbstractEntitySynchronizer(Logger logger, String syncName, int numSyncWorkers, + int numActiveInventoryWorkers, int numElasticsearchWorkers, String indexName, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig) + throws Exception { + this.logger = logger; + this.synchronizerExecutor = + NodeUtils.createNamedExecutor(syncName + "-INTERNAL", numSyncWorkers, logger); + this.aaiExecutor = + NodeUtils.createNamedExecutor(syncName + "-AAI", numActiveInventoryWorkers, logger); + this.esExecutor = + NodeUtils.createNamedExecutor(syncName + "-ES", numElasticsearchWorkers, logger); + this.mapper = new ObjectMapper(); + this.indexName = indexName; + this.esRestStats = new RestOperationalStatistics(); + this.esEntityStats = new ElasticSearchEntityStatistics(); + this.aaiRestStats = new RestOperationalStatistics(); + this.aaiEntityStats = new ActiveInventoryEntityStatistics(); + this.aaiProcessingExceptionStats = new ActiveInventoryProcessingExceptionStatistics(); + this.aaiTaskProcessingStats = new TaskProcessingStats(aaiStatConfig); + this.esTaskProcessingStats = new TaskProcessingStats(esStatConfig); + + this.aaiTransactionRateController = + new TransactionRateMonitor(numActiveInventoryWorkers, aaiStatConfig); + this.esTransactionRateController = + new TransactionRateMonitor(numElasticsearchWorkers, esStatConfig); + + this.aaiWorkOnHand = new AtomicInteger(0); + this.esWorkOnHand = new AtomicInteger(0); + + enabledStatFlags = EnumSet.allOf(StatFlag.class); + + this.synchronizerName = "Abstact Entity Synchronizer"; + + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "AbstractEntitySynchronizer", "", "Sync", ""); + + this.shouldSkipSync = false; + this.syncStartedTimeStampInMs = System.currentTimeMillis(); + this.syncDurationInMs = -1; + } + + public boolean shouldSkipSync() { + return shouldSkipSync; + } + + public void setShouldSkipSync(boolean shouldSkipSync) { + this.shouldSkipSync = shouldSkipSync; + } + + /** + * Inc active inventory work on hand counter. + */ + protected void incActiveInventoryWorkOnHandCounter() { + aaiWorkOnHand.incrementAndGet(); + } + + /** + * Dec active inventory work on hand counter. + */ + protected void decActiveInventoryWorkOnHandCounter() { + aaiWorkOnHand.decrementAndGet(); + } + + /** + * Inc elastic search work on hand counter. + */ + protected void incElasticSearchWorkOnHandCounter() { + esWorkOnHand.incrementAndGet(); + } + + /** + * Dec elastic search work on hand counter. + */ + protected void decElasticSearchWorkOnHandCounter() { + esWorkOnHand.decrementAndGet(); + } + + /** + * Shutdown executors. + */ + protected void shutdownExecutors() { + try { + + if (synchronizerExecutor != null) { + synchronizerExecutor.shutdown(); + } + + if (aaiExecutor != null) { + aaiExecutor.shutdown(); + } + + if (esExecutor != null) { + esExecutor.shutdown(); + } + + } catch (Exception exc) { + logger.error(AaiUiMsgs.ERROR_SHUTDOWN_EXECUTORS, exc); + } + } + + /** + * Clear cache. + */ + public void clearCache() {} + + public ElasticSearchAdapter getElasticSearchAdapter() { + return elasticSearchAdapter; + } + + public void setElasticSearchAdapter(ElasticSearchAdapter elasticSearchAdapter) { + this.elasticSearchAdapter = elasticSearchAdapter; + } + + public ActiveInventoryAdapter getAaiAdapter() { + return aaiAdapter; + } + + public void setAaiAdapter(ActiveInventoryAdapter aaiAdapter) { + this.aaiAdapter = aaiAdapter; + } + + /** + * Gets the elastic full url. + * + * @param resourceUrl the resource url + * @param indexName the index name + * @param indexType the index type + * @return the elastic full url + * @throws Exception the exception + */ + protected String getElasticFullUrl(String resourceUrl, String indexName, String indexType) + throws Exception { + return ElasticSearchConfig.getConfig().getElasticFullUrl(resourceUrl, indexName, indexType); + } + + /** + * Gets the elastic full url. + * + * @param resourceUrl the resource url + * @param indexName the index name + * @return the elastic full url + * @throws Exception the exception + */ + protected String getElasticFullUrl(String resourceUrl, String indexName) throws Exception { + return ElasticSearchConfig.getConfig().getElasticFullUrl(resourceUrl, indexName); + } + + public String getIndexName() { + return indexName; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + + /** + * Gets the response length. + * + * @param txn the txn + * @return the response length + */ + private long getResponseLength(NetworkTransaction txn) { + + if (txn == null) { + return -1; + } + + OperationResult result = txn.getOperationResult(); + + if (result == null) { + return -1; + } + + if (result.getResult() != null) { + return result.getResult().length(); + } + + return -1; + } + + /** + * Update elastic search counters. + * + * @param method the method + * @param or the or + */ + protected void updateElasticSearchCounters(HttpMethod method, OperationResult or) { + updateElasticSearchCounters(new NetworkTransaction(method, null, or)); + } + + /** + * Update elastic search counters. + * + * @param method the method + * @param entityType the entity type + * @param or the or + */ + protected void updateElasticSearchCounters(HttpMethod method, String entityType, + OperationResult or) { + updateElasticSearchCounters(new NetworkTransaction(method, entityType, or)); + } + + /** + * Update elastic search counters. + * + * @param txn the txn + */ + protected void updateElasticSearchCounters(NetworkTransaction txn) { + + if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) { + esRestStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) { + esEntityStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { + + esTransactionRateController.trackResponseTime(txn.getOpTimeInMs()); + + esTaskProcessingStats.updateTaskResponseStatsHistogram(txn.getOpTimeInMs()); + esTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs()); + + // don't know the cost of the lengh calc, we'll see if it causes a + // problem + + long responsePayloadSizeInBytes = getResponseLength(txn); + if (responsePayloadSizeInBytes >= 0) { + esTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes); + } + + esTaskProcessingStats + .updateTransactionsPerSecondHistogram((long) esTransactionRateController.getCurrentTps()); + } + } + + /** + * Update active inventory counters. + * + * @param method the method + * @param or the or + */ + protected void updateActiveInventoryCounters(HttpMethod method, OperationResult or) { + updateActiveInventoryCounters(new NetworkTransaction(method, null, or)); + } + + /** + * Update active inventory counters. + * + * @param method the method + * @param entityType the entity type + * @param or the or + */ + protected void updateActiveInventoryCounters(HttpMethod method, String entityType, + OperationResult or) { + updateActiveInventoryCounters(new NetworkTransaction(method, entityType, or)); + } + + /** + * Update active inventory counters. + * + * @param txn the txn + */ + protected void updateActiveInventoryCounters(NetworkTransaction txn) { + + if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) { + aaiRestStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) { + aaiEntityStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) { + aaiProcessingExceptionStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { + aaiTransactionRateController.trackResponseTime(txn.getOpTimeInMs()); + + aaiTaskProcessingStats.updateTaskResponseStatsHistogram(txn.getOpTimeInMs()); + aaiTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs()); + + // don't know the cost of the lengh calc, we'll see if it causes a + // problem + + long responsePayloadSizeInBytes = getResponseLength(txn); + if (responsePayloadSizeInBytes >= 0) { + aaiTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes); + } + + aaiTaskProcessingStats.updateTransactionsPerSecondHistogram( + (long) aaiTransactionRateController.getCurrentTps()); + } + } + + /** + * Reset counters. + */ + protected void resetCounters() { + aaiRestStats.reset(); + aaiEntityStats.reset(); + aaiProcessingExceptionStats.reset(); + + esRestStats.reset(); + esEntityStats.reset(); + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java b/src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java new file mode 100644 index 0000000..e1785d4 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java @@ -0,0 +1,607 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.entity.ObjectIdCollection; +import org.onap.aai.sparky.sync.entity.SearchableEntity; +import org.onap.aai.sparky.sync.enumeration.OperationState; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** + * The Class ElasticSearchIndexCleaner. + */ +public class ElasticSearchIndexCleaner implements IndexCleaner { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ElasticSearchIndexCleaner.class); + + private static final String BULK_OP_LINE_TEMPLATE = "%s\n"; + private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + + private ObjectIdCollection before; + private ObjectIdCollection after; + + private ObjectMapper mapper; + private ElasticSearchAdapter esAdapter; + private ElasticSearchEndpointConfig endpointConfig; + private ElasticSearchSchemaConfig schemaConfig; + + /** + * Instantiates a new elastic search index cleaner. + * + * @param restDataProvider the rest data provider + * @param indexName the index name + * @param indexType the index type + * @param host the host + * @param port the port + * @param scrollContextTimeToLiveInMinutes the scroll context time to live in minutes + * @param numItemsToGetBulkRequest the num items to get bulk request + */ + public ElasticSearchIndexCleaner(ElasticSearchAdapter esAdapter, + ElasticSearchEndpointConfig endpointConfig, ElasticSearchSchemaConfig schemaConfig) { + this.esAdapter = esAdapter; + this.before = null; + this.after = null; + this.endpointConfig = endpointConfig; + this.schemaConfig = schemaConfig; + this.mapper = new ObjectMapper(); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePreOperationCollection() + */ + @Override + public OperationState populatePreOperationCollection() { + + try { + before = retrieveAllDocumentIdentifiers(); + return OperationState.OK; + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, schemaConfig.getIndexName(), exc.getMessage()); + return OperationState.ERROR; + } + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePostOperationCollection() + */ + @Override + public OperationState populatePostOperationCollection() { + try { + after = retrieveAllDocumentIdentifiers(); + return OperationState.OK; + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, schemaConfig.getIndexName(), exc.getMessage()); + return OperationState.ERROR; + } + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexCleaner#performCleanup() + */ + @Override + public OperationState performCleanup() { + // TODO Auto-generated method stub + LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP, schemaConfig.getIndexName()); + + int sizeBefore = before.getSize(); + int sizeAfter = after.getSize(); + + LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP_SIZE, String.valueOf(sizeBefore), + String.valueOf(sizeAfter)); + + /* + * If the processedImportIds size <= 0, then something has failed in the sync operation and we + * shouldn't do the selective delete right now. + */ + + if (sizeAfter > 0) { + + Collection presyncIds = before.getImportedObjectIds(); + presyncIds.removeAll(after.getImportedObjectIds()); + + try { + LOG.info(AaiUiMsgs.ES_SYNC_SELECTIVE_DELETE, schemaConfig.getIndexName(), + schemaConfig.getIndexDocType(), String.valueOf(presyncIds.size())); + + ObjectIdCollection bulkIds = new ObjectIdCollection(); + + Iterator it = presyncIds.iterator(); + int numItemsInBulkRequest = 0; + int numItemsRemainingToBeDeleted = presyncIds.size(); + + while (it.hasNext()) { + + bulkIds.addObjectId(it.next()); + numItemsInBulkRequest++; + + if (numItemsInBulkRequest >= endpointConfig.getScrollContextBatchRequestSize()) { + LOG.info(AaiUiMsgs.ES_BULK_DELETE, schemaConfig.getIndexName(), + String.valueOf(bulkIds.getSize())); + bulkDelete(bulkIds.getImportedObjectIds()); + numItemsRemainingToBeDeleted -= numItemsInBulkRequest; + numItemsInBulkRequest = 0; + bulkIds.clear(); + } + } + + if (numItemsRemainingToBeDeleted > 0) { + LOG.info(AaiUiMsgs.ES_BULK_DELETE, schemaConfig.getIndexName(), + String.valueOf(bulkIds.getSize())); + bulkDelete(bulkIds.getImportedObjectIds()); + } + + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_BULK_DELETE_ERROR, schemaConfig.getIndexName(), + exc.getLocalizedMessage()); + + } + } + + return OperationState.OK; + } + + @Override + public String getIndexName() { + return schemaConfig.getIndexName(); + } + + /** + * Builds the initial scroll request payload. + * + * @param numItemsToGetPerRequest the num items to get per request + * @param fieldList the field list + * @return the string + * @throws JsonProcessingException the json processing exception + */ + protected String buildInitialScrollRequestPayload(int numItemsToGetPerRequest, + List fieldList) throws JsonProcessingException { + + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.put("size", numItemsToGetPerRequest); + + ArrayNode fields = mapper.createArrayNode(); + + for (String f : fieldList) { + fields.add(f); + } + + rootNode.set("fields", fields); + + ObjectNode queryNode = mapper.createObjectNode(); + queryNode.set("match_all", mapper.createObjectNode()); + + rootNode.set("query", queryNode); + + return mapper.writeValueAsString(rootNode); + + } + + /** + * Builds the subsequent scroll context request payload. + * + * @param scrollId the scroll id + * @param contextTimeToLiveInMinutes the context time to live in minutes + * @return the string + * @throws JsonProcessingException the json processing exception + */ + protected String buildSubsequentScrollContextRequestPayload(String scrollId, + int contextTimeToLiveInMinutes) throws JsonProcessingException { + + ObjectNode rootNode = mapper.createObjectNode(); + + rootNode.put("scroll", contextTimeToLiveInMinutes + "m"); + rootNode.put("scroll_id", scrollId); + + return mapper.writeValueAsString(rootNode); + + } + + /** + * Parses the elastic search result. + * + * @param jsonResult the json result + * @return the json node + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected JsonNode parseElasticSearchResult(String jsonResult) + throws JsonProcessingException, IOException { + ObjectMapper mapper = new ObjectMapper(); + return mapper.readTree(jsonResult); + } + + /** + * Lookup index doc. + * + * @param ids the ids + * @param docs the docs + * @return the array list + */ + protected ArrayList lookupIndexDoc(ArrayList ids, + List docs) { + ArrayList objs = new ArrayList(); + + if (ids != null && docs != null) { + for (SearchableEntity d : docs) { + if (ids.contains(d.getId())) { + objs.add(d); + } + } + } + + return objs; + } + + /** + * Builds the delete data object. + * + * @param index the index + * @param type the type + * @param id the id + * @return the object node + */ + protected ObjectNode buildDeleteDataObject(String index, String type, String id) { + + ObjectNode indexDocProperties = mapper.createObjectNode(); + + indexDocProperties.put("_index", index); + indexDocProperties.put("_type", type); + indexDocProperties.put("_id", id); + + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.set("delete", indexDocProperties); + + return rootNode; + } + + /** + * This method might appear to be a little strange, and is simply an optimization to take an + * elipsed JsonNode key path and retrieve the node at the end of the path, if it exists. + * + * @param startNode the start node + * @param fieldPath the field path + * @return the node path + */ + protected JsonNode getNodePath(JsonNode startNode, String... fieldPath) { + + JsonNode jsonNode = null; + + for (String field : fieldPath) { + if (jsonNode == null) { + jsonNode = startNode.get(field); + } else { + jsonNode = jsonNode.get(field); + } + + /* + * This is our safety net in case any intermediate path returns a null + */ + + if (jsonNode == null) { + return null; + } + + } + + return jsonNode; + } + + /** + * Gets the full url. + * + * @param resourceUrl the resource url + * @return the full url + */ + private String getFullUrl(String resourceUrl) { + return String.format("http://%s:%s%s", endpointConfig.getEsIpAddress(), + endpointConfig.getEsServerPort(), resourceUrl); + } + + /** + * Retrieve all document identifiers. + * + * @return the object id collection + * @throws IOException Signals that an I/O exception has occurred. + */ + public ObjectIdCollection retrieveAllDocumentIdentifiers() throws IOException { + + ObjectIdCollection currentDocumentIds = new ObjectIdCollection(); + + long opStartTimeInMs = System.currentTimeMillis(); + + List fields = new ArrayList(); + fields.add("_id"); + // fields.add("entityType"); + + String scrollRequestPayload = + buildInitialScrollRequestPayload(endpointConfig.getScrollContextBatchRequestSize(), fields); + + final String fullUrlStr = + getFullUrl("/" + schemaConfig.getIndexName() + "/" + schemaConfig.getIndexDocType() + + "/_search?scroll=" + endpointConfig.getScrollContextTimeToLiveInMinutes() + "m"); + + OperationResult result = + esAdapter.doPost(fullUrlStr, scrollRequestPayload, MediaType.APPLICATION_JSON_TYPE); + + if (result.wasSuccessful()) { + + JsonNode rootNode = parseElasticSearchResult(result.getResult()); + + /* + * Check the result for success / failure, and enumerate all the index ids that resulted in + * success, and ignore the ones that failed or log them so we have a record of the failure. + */ + int totalRecordsAvailable = 0; + String scrollId = null; + int numRecordsFetched = 0; + + if (rootNode != null) { + + scrollId = getFieldValue(rootNode, "_scroll_id"); + final String tookStr = getFieldValue(rootNode, "took"); + int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr); + boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out")); + + if (timedOut) { + LOG.error(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "all document Identifiers", + String.valueOf(tookInMs)); + } else { + LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "all document Identifiers", + String.valueOf(tookInMs)); + } + + JsonNode hitsNode = rootNode.get("hits"); + totalRecordsAvailable = Integer.parseInt(hitsNode.get("total").asText()); + + LOG.info(AaiUiMsgs.COLLECT_TOTAL, "all document Identifiers", + String.valueOf(totalRecordsAvailable)); + + /* + * Collect all object ids + */ + + ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits"); + + Iterator nodeIterator = hitsArray.iterator(); + + String key = null; + String value = null; + JsonNode jsonNode = null; + + while (nodeIterator.hasNext()) { + + jsonNode = nodeIterator.next(); + + key = getFieldValue(jsonNode, "_id"); + + if (key != null) { + currentDocumentIds.addObjectId(key); + } + + } + + int totalRecordsRemainingToFetch = (totalRecordsAvailable - numRecordsFetched); + + int numRequiredAdditionalFetches = + (totalRecordsRemainingToFetch / endpointConfig.getScrollContextBatchRequestSize()); + + /* + * Do an additional fetch for the remaining items (if needed) + */ + + if (totalRecordsRemainingToFetch % endpointConfig.getScrollContextBatchRequestSize() != 0) { + numRequiredAdditionalFetches += 1; + } + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.SYNC_NUMBER_REQ_FETCHES, + String.valueOf(numRequiredAdditionalFetches)); + } + + + for (int x = 0; x < numRequiredAdditionalFetches; x++) { + + if (collectItemsFromScrollContext(scrollId, currentDocumentIds) != OperationState.OK) { + // abort the whole thing because now we can't reliably cleanup the orphans. + throw new IOException( + "Failed to collect pre-sync doc collection from index. Aborting operation"); + } + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.SYNC_NUMBER_TOTAL_FETCHES, + String.valueOf(currentDocumentIds.getSize()), + String.valueOf(totalRecordsAvailable)); + } + + } + + } + + } else { + // scroll context get failed, nothing else to do + LOG.error(AaiUiMsgs.ERROR_GENERIC, result.toString()); + } + + LOG.info(AaiUiMsgs.COLLECT_TOTAL_TIME, "all document Identifiers", + String.valueOf((System.currentTimeMillis() - opStartTimeInMs))); + + return currentDocumentIds; + + } + + /** + * Collect items from scroll context. + * + * @param scrollId the scroll id + * @param objectIds the object ids + * @return the operation state + * @throws IOException Signals that an I/O exception has occurred. + */ + private OperationState collectItemsFromScrollContext(String scrollId, + ObjectIdCollection objectIds) throws IOException { + + String requestPayload = buildSubsequentScrollContextRequestPayload(scrollId, + endpointConfig.getScrollContextTimeToLiveInMinutes()); + + final String fullUrlStr = getFullUrl("/_search/scroll"); + + OperationResult opResult = + esAdapter.doPost(fullUrlStr, requestPayload, MediaType.APPLICATION_JSON_TYPE); + + if (opResult.getResultCode() >= 300) { + LOG.warn(AaiUiMsgs.ES_SCROLL_CONTEXT_ERROR, opResult.getResult()); + return OperationState.ERROR; + } + + JsonNode rootNode = parseElasticSearchResult(opResult.getResult()); + boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out")); + final String tookStr = getFieldValue(rootNode, "took"); + int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr); + + JsonNode hitsNode = rootNode.get("hits"); + + /* + * Check the result for success / failure, and enumerate all the index ids that resulted in + * success, and ignore the ones that failed or log them so we have a record of the failure. + */ + + if (rootNode != null) { + + if (timedOut) { + LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "Scroll Context", String.valueOf(tookInMs)); + } else { + LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "Scroll Context", String.valueOf(tookInMs)); + } + + /* + * Collect all object ids + */ + + ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits"); + String key = null; + String value = null; + JsonNode jsonNode = null; + + Iterator nodeIterator = hitsArray.iterator(); + + while (nodeIterator.hasNext()) { + + jsonNode = nodeIterator.next(); + + key = getFieldValue(jsonNode, "_id"); + + if (key != null) { + objectIds.addObjectId(key); + + } + + } + } + + return OperationState.OK; + } + + /** + * Gets the field value. + * + * @param node the node + * @param fieldName the field name + * @return the field value + */ + protected String getFieldValue(JsonNode node, String fieldName) { + + JsonNode field = node.get(fieldName); + + if (field != null) { + return field.asText(); + } + + return null; + + } + + /** + * Bulk delete. + * + * @param docIds the doc ids + * @return the operation result + * @throws IOException Signals that an I/O exception has occurred. + */ + public OperationResult bulkDelete(Collection docIds) throws IOException { + + if (docIds == null || docIds.size() == 0) { + LOG.info(AaiUiMsgs.ES_BULK_DELETE_SKIP); + return new OperationResult(500, + "Skipping bulkDelete(); operation because docs to delete list is empty"); + } + + LOG.info(AaiUiMsgs.ES_BULK_DELETE_START, String.valueOf(docIds.size())); + + StringBuilder sb = new StringBuilder(128); + + for (String id : docIds) { + sb.append(String.format(BULK_OP_LINE_TEMPLATE, + buildDeleteDataObject(schemaConfig.getIndexName(), schemaConfig.getIndexDocType(), id))); + } + + sb.append("\n"); + + final String fullUrlStr = getFullUrl("/_bulk"); + + return esAdapter.doPost(fullUrlStr, sb.toString(), MediaType.APPLICATION_FORM_URLENCODED_TYPE); + + } + + /* + + */ + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java b/src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java new file mode 100644 index 0000000..9013600 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java @@ -0,0 +1,109 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.io.IOException; + +import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.util.ConfigHelper; +import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +public class ElasticSearchSchemaFactory { + + private static final String SETTINGS = "settings"; + private static final String MAPPINGS = "mappings"; + + private static ObjectMapper mapper = new ObjectMapper(); + + protected static String getConfigAsString(String configItem, String configFileName) + throws ElasticSearchOperationException { + String indexConfig = null; + + try { + indexConfig = ConfigHelper.getFileContents(configFileName); + } catch (IOException exc) { + throw new ElasticSearchOperationException( + "Failed to read index " + configItem + " from file = " + configFileName + ".", exc); + } + + if (indexConfig == null) { + throw new ElasticSearchOperationException( + "Failed to load index " + configItem + " with filename = " + configFileName + "."); + } + return indexConfig; + } + + + + public static String getIndexSchema(ElasticSearchSchemaConfig schemaConfig) + throws ElasticSearchOperationException { + + JsonNode esSettingsNode = null; + JsonNode esMappingsNodes = null; + + try { + + if (schemaConfig.getIndexSettingsFileName() != null) { + esSettingsNode = mapper.readTree(getConfigAsString(SETTINGS, + TierSupportUiConstants.getConfigPath(schemaConfig.getIndexSettingsFileName()))); + } + + if (schemaConfig.getIndexMappingsFileName() != null) { + esMappingsNodes = mapper.readTree(getConfigAsString(MAPPINGS, + TierSupportUiConstants.getConfigPath(schemaConfig.getIndexMappingsFileName()))); + } + + } catch (IOException e1) { + + throw new ElasticSearchOperationException( + "Caught an exception building initial ES index. Error: " + e1.getMessage()); + } + + ObjectNode esConfig = null; + + ObjectNode mappings = + (ObjectNode) mapper.createObjectNode().set(schemaConfig.getIndexDocType(), esMappingsNodes); + + if (esSettingsNode == null) { + esConfig = (ObjectNode) mapper.createObjectNode().set(MAPPINGS, mappings); + } else { + esConfig = (ObjectNode) mapper.createObjectNode().set(SETTINGS, esSettingsNode); + esConfig.set(MAPPINGS, mappings); + } + + try { + return mapper.writeValueAsString(esConfig); + } catch (JsonProcessingException exc) { + throw new ElasticSearchOperationException("Error getting object node as string", exc); + } + + } + + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java b/src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java new file mode 100644 index 0000000..3b0ec57 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java @@ -0,0 +1,55 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import org.onap.aai.sparky.sync.enumeration.OperationState; + +/** + * The Interface IndexCleaner. + */ +public interface IndexCleaner { + + /** + * Populate pre operation collection. + * + * @return the operation state + */ + public OperationState populatePreOperationCollection(); + + /** + * Populate post operation collection. + * + * @return the operation state + */ + public OperationState populatePostOperationCollection(); + + /** + * Perform cleanup. + * + * @return the operation state + */ + public OperationState performCleanup(); + + public String getIndexName(); + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java b/src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java new file mode 100644 index 0000000..a6941ad --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java @@ -0,0 +1,176 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; + +/** + * The Class IndexIntegrityValidator. + */ +public class IndexIntegrityValidator implements IndexValidator { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(IndexIntegrityValidator.class); + + private ElasticSearchEndpointConfig endpointConfig; + private ElasticSearchSchemaConfig schemaConfig; + private String tableConfigJson; + + private final ElasticSearchAdapter esAdapter; + + /** + * Instantiates a new index integrity validator. + * + * @param restDataProvider the rest data provider + * @param indexName the index name + * @param indexType the index type + * @param host the host + * @param port the port + * @param tableConfigJson the table config json + */ + public IndexIntegrityValidator(ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig esSchemaConfig, ElasticSearchEndpointConfig esEndpointConfig, + String tableConfigJson) { + + this.esAdapter = esAdapter; + this.schemaConfig = esSchemaConfig; + this.endpointConfig = esEndpointConfig; + this.tableConfigJson = tableConfigJson; + } + + public ElasticSearchEndpointConfig getEndpointConfig() { + return endpointConfig; + } + + public void setEndpointConfig(ElasticSearchEndpointConfig endpointConfig) { + this.endpointConfig = endpointConfig; + } + + public ElasticSearchSchemaConfig getSchemaConfig() { + return schemaConfig; + } + + public void setSchemaConfig(ElasticSearchSchemaConfig schemaConfig) { + this.schemaConfig = schemaConfig; + } + + public ElasticSearchAdapter getEsAdapter() { + return esAdapter; + } + + @Override + public String getIndexName() { + return schemaConfig.getIndexName(); + } + + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexValidator#exists() + */ + @Override + public boolean exists() { + final String fullUrlStr = getFullUrl("/" + schemaConfig.getIndexName() + "/"); + OperationResult existsResult = esAdapter.doHead(fullUrlStr, MediaType.APPLICATION_JSON_TYPE); + + int rc = existsResult.getResultCode(); + + if (rc >= 200 && rc < 300) { + LOG.info(AaiUiMsgs.INDEX_EXISTS, schemaConfig.getIndexName()); + return true; + } else { + LOG.info(AaiUiMsgs.INDEX_NOT_EXIST, schemaConfig.getIndexName()); + return false; + } + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexValidator#integrityValid() + */ + @Override + public boolean integrityValid() { + return true; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexValidator#createOrRepair() + */ + @Override + public void createOrRepair() { + + String message = + "IndexIntegrityValidator.createOrRepair() for indexName = " + schemaConfig.getIndexName(); + LOG.info(AaiUiMsgs.INFO_GENERIC, message); + + final String fullUrlStr = getFullUrl("/" + schemaConfig.getIndexName() + "/"); + OperationResult createResult = + esAdapter.doPut(fullUrlStr, tableConfigJson, MediaType.APPLICATION_JSON_TYPE); + + int rc = createResult.getResultCode(); + + if (rc >= 200 && rc < 300) { + LOG.info(AaiUiMsgs.INDEX_RECREATED, schemaConfig.getIndexName()); + } else if (rc == 400) { + LOG.info(AaiUiMsgs.INDEX_ALREADY_EXISTS, schemaConfig.getIndexName()); + } else { + LOG.warn(AaiUiMsgs.INDEX_INTEGRITY_CHECK_FAILED, schemaConfig.getIndexName(), + createResult.getResult()); + } + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexValidator#destroyIndex() + */ + @Override + public void destroyIndex() { + // we don't do this for now + } + + /** + * Gets the full url. + * + * @param resourceUrl the resource url + * @return the full url + */ + private String getFullUrl(String resourceUrl) { + return String.format("http://%s:%s%s", endpointConfig.getEsIpAddress(), + endpointConfig.getEsServerPort(), resourceUrl); + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java b/src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java new file mode 100644 index 0000000..6e581f6 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java @@ -0,0 +1,65 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; + +/** + * The Interface IndexSynchronizer. + * + * @author davea. + */ +public interface IndexSynchronizer { + + /** + * Do sync. + * + * @return the operation state + */ + public OperationState doSync(); + + public SynchronizerState getState(); + + /** + * Gets the stat report. + * + * @param finalReport the final report + * @return the stat report + */ + public String getStatReport(boolean finalReport); + + /** + * Shutdown. + */ + public void shutdown(); + + public String getIndexName(); + + /** + * Clear cache. + */ + public void clearCache(); + + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/IndexValidator.java b/src/main/java/org/onap/aai/sparky/sync/IndexValidator.java new file mode 100644 index 0000000..e78d95c --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/IndexValidator.java @@ -0,0 +1,56 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +/** + * The Interface IndexValidator. + */ +public interface IndexValidator { + + /** + * Exists. + * + * @return true, if successful + */ + public boolean exists(); + + /** + * Integrity valid. + * + * @return true, if successful + */ + public boolean integrityValid(); + + /** + * Creates the or repair. + */ + public void createOrRepair(); + + /** + * Destroy index. + */ + public void destroyIndex(); + + public String getIndexName(); + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/SyncController.java b/src/main/java/org/onap/aai/sparky/sync/SyncController.java new file mode 100644 index 0000000..f482c66 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/SyncController.java @@ -0,0 +1,96 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.util.Calendar; +import java.util.Date; + +import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; + +public interface SyncController { + + String getControllerName(); + + boolean isPeriodicSyncEnabled(); + + boolean isRunOnceSyncEnabled(); + + /** + * Perform action. + * + * @param requestedAction the requested action + * @return + */ + OperationState performAction(SyncActions requestedAction); + + /** + * Register entity synchronizer. + * + * @param entitySynchronizer the entity synchronizer + */ + void registerEntitySynchronizer(IndexSynchronizer entitySynchronizer); + + /** + * Register index validator. + * + * @param indexValidator the index validator + */ + void registerIndexValidator(IndexValidator indexValidator); + + /** + * Register index cleaner. + * + * @param indexCleaner the index cleaner + */ + void registerIndexCleaner(IndexCleaner indexCleaner); + + /** + * Shutdown. + */ + void shutdown(); + + SynchronizerState getState(); + + long getDelayInMs(); + + void setDelayInMs(long delayInMs); + + long getSyncFrequencyInMs(); + + void setSyncFrequencyInMs(long syncFrequencyInMs); + + Date getSyncStartTime(); + + void setSyncStartTime(Date syncStartTime); + + Date getLastExecutionDate(); + + void setLastExecutionDate(Date lastExecutionDate); + + Calendar getCreationTime(); + + String getNextSyncTime(); + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java b/src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java new file mode 100644 index 0000000..1c3d425 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java @@ -0,0 +1,692 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.util.Calendar; +import java.util.Collection; +import java.util.Date; +import java.util.LinkedHashSet; +import java.util.TimeZone; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.util.NodeUtils; + +/** + * The Class SyncController. + * + * @author davea. + */ +public class SyncControllerImpl implements SyncController { + private static final Logger LOG = LoggerFactory.getInstance().getLogger(SyncControllerImpl.class); + + /** + * The Enum InternalState. + */ + private enum InternalState { + IDLE, PRE_SYNC, SYNC_OPERATION, SELECTIVE_DELETE, ABORTING_SYNC, REPAIRING_INDEX, POST_SYNC, TEST_INDEX_INTEGRITY, GENERATE_FINAL_REPORT + } + + /** + * The Enum SyncActions. + */ + public enum SyncActions { + SYNCHRONIZE, REPAIR_INDEX, INDEX_INTEGRITY_VALIDATION_COMPLETE, PRE_SYNC_COMPLETE, SYNC_COMPLETE, SYNC_ABORTED, SYNC_FAILURE, POST_SYNC_COMPLETE, PURGE_COMPLETE, REPORT_COMPLETE + } + + private Collection registeredSynchronizers; + private Collection registeredIndexValidators; + private Collection registeredIndexCleaners; + private InternalState currentInternalState; + private ExecutorService syncControllerExecutor; + private ExecutorService statReporterExecutor; + + private long delayInMs; + private long syncFrequencyInMs; + private Date syncStartTime; + + private Date lastExecutionDate; + private AtomicInteger runCount; + private Semaphore performingActionGate; + private Calendar creationTime; + + private String syncStartTimeWithTimeZone; + private String controllerName; + + protected SyncControllerConfig syncControllerConfig; + + + + /** + * Instantiates a new sync controller. + * + * @param name the name + * @throws Exception the exception + */ + public SyncControllerImpl(SyncControllerConfig syncControllerConfig) throws Exception { + this(syncControllerConfig, null); + } + + public SyncControllerImpl(SyncControllerConfig syncControllerConfig, String targetEntityType) + throws Exception { + + this.syncControllerConfig = syncControllerConfig; + + this.delayInMs = 0L; + this.syncFrequencyInMs = 86400000L; + this.syncStartTime = null; + this.lastExecutionDate = null; + this.runCount = new AtomicInteger(0); + this.performingActionGate = new Semaphore(1); + registeredSynchronizers = new LinkedHashSet(); + registeredIndexValidators = new LinkedHashSet(); + registeredIndexCleaners = new LinkedHashSet(); + + String controllerName = syncControllerConfig.getControllerName(); + + if (targetEntityType != null) { + controllerName += " (" + targetEntityType + ")"; + } + + this.controllerName = controllerName; + + this.syncControllerExecutor = NodeUtils.createNamedExecutor("SyncController-" + controllerName, + syncControllerConfig.getNumSyncControllerWorkers(), LOG); + this.statReporterExecutor = + NodeUtils.createNamedExecutor("StatReporter-" + controllerName, 1, LOG); + + this.currentInternalState = InternalState.IDLE; + + this.creationTime = Calendar + .getInstance(TimeZone.getTimeZone(syncControllerConfig.getTimeZoneOfSyncStartTimeStamp())); + + } + + + + /** + * Change internal state. + * + * @param newState the new state + * @param causedByAction the caused by action + */ + private void changeInternalState(InternalState newState, SyncActions causedByAction) { + LOG.info(AaiUiMsgs.SYNC_INTERNAL_STATE_CHANGED, controllerName, currentInternalState.toString(), + newState.toString(), causedByAction.toString()); + + this.currentInternalState = newState; + + performStateAction(); + } + + + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.SyncController2#getDelayInMs() + */ + @Override + public long getDelayInMs() { + return delayInMs; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.SyncController2#setDelayInMs(long) + */ + @Override + public void setDelayInMs(long delayInMs) { + this.delayInMs = delayInMs; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.SyncController2#getSyncFrequencyInMs() + */ + @Override + public long getSyncFrequencyInMs() { + return syncFrequencyInMs; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.SyncController2#setSyncFrequencyInMs(long) + */ + @Override + public void setSyncFrequencyInMs(long syncFrequencyInMs) { + this.syncFrequencyInMs = syncFrequencyInMs; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.SyncController2#getSyncStartTime() + */ + @Override + public Date getSyncStartTime() { + return syncStartTime; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.SyncController2#setSyncStartTime(java.util.Date) + */ + @Override + public void setSyncStartTime(Date syncStartTime) { + this.syncStartTime = syncStartTime; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.SyncController2#getLastExecutionDate() + */ + @Override + public Date getLastExecutionDate() { + return lastExecutionDate; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.SyncController2#setLastExecutionDate(java.util.Date) + */ + @Override + public void setLastExecutionDate(Date lastExecutionDate) { + this.lastExecutionDate = lastExecutionDate; + } + + @Override + public String getControllerName() { + return controllerName; + } + + + + @Override + public OperationState performAction(SyncActions requestedAction) { + + if (currentInternalState == InternalState.IDLE) { + + try { + + /* + * non-blocking semaphore acquire used to guarantee only 1 execution of the synchronization + * at a time. + */ + + switch (requestedAction) { + case SYNCHRONIZE: + + if (performingActionGate.tryAcquire()) { + try { + + long opStartTime = System.currentTimeMillis(); + + LOG.info(AaiUiMsgs.INFO_GENERIC, + getControllerName() + " started synchronization at " + + SynchronizerConstants.SIMPLE_DATE_FORMAT.format(opStartTime).replaceAll( + SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD)); + + runCount.incrementAndGet(); + + changeInternalState(InternalState.TEST_INDEX_INTEGRITY, requestedAction); + + long opEndTime = System.currentTimeMillis(); + + long opTime = (opEndTime - opStartTime); + + String durationMessage = + String.format(getControllerName() + " synchronization took '%d' ms.", opTime); + + LOG.info(AaiUiMsgs.SYNC_DURATION, durationMessage); + + if (syncControllerConfig.isPeriodicSyncEnabled()) { + + LOG.info(AaiUiMsgs.INFO_GENERIC, + getControllerName() + " next sync to begin at " + getNextSyncTime()); + + TimeZone tz = + TimeZone.getTimeZone(syncControllerConfig.getTimeZoneOfSyncStartTimeStamp()); + + if (opTime > this.getSyncFrequencyInMs()) { + + String durationWasLongerMessage = String.format( + getControllerName() + " synchronization took '%d' ms which is larger than" + + " synchronization interval of '%d' ms.", + opTime, this.getSyncFrequencyInMs()); + + LOG.info(AaiUiMsgs.SYNC_DURATION, durationWasLongerMessage); + } + } + + } catch (Exception syncException) { + String message = "An error occurred while performing action = " + requestedAction + + ". Error = " + syncException.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } finally { + performingActionGate.release(); + } + } else { + return OperationState.IGNORED_SYNC_NOT_IDLE; + } + + break; + + default: + break; + } + + return OperationState.OK; + + } catch (Exception exc) { + String message = "An error occurred while performing action = " + requestedAction + + ". Error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return OperationState.ERROR; + } finally { + + } + } else { + LOG.error(AaiUiMsgs.SYNC_NOT_VALID_STATE_DURING_REQUEST, currentInternalState.toString()); + return OperationState.IGNORED_SYNC_NOT_IDLE; + } + } + + /** + * Perform state action. + */ + private void performStateAction() { + + try { + switch (currentInternalState) { + + case TEST_INDEX_INTEGRITY: + performIndexIntegrityValidation(); + break; + + case PRE_SYNC: + performPreSyncCleanupCollection(); + break; + + case SYNC_OPERATION: + performSynchronization(); + break; + + case POST_SYNC: + performIndexSyncPostCollection(); + changeInternalState(InternalState.SELECTIVE_DELETE, SyncActions.POST_SYNC_COMPLETE); + break; + + case SELECTIVE_DELETE: + performIndexCleanup(); + changeInternalState(InternalState.GENERATE_FINAL_REPORT, SyncActions.PURGE_COMPLETE); + break; + + case GENERATE_FINAL_REPORT: + + dumpStatReport(true); + clearCaches(); + changeInternalState(InternalState.IDLE, SyncActions.REPORT_COMPLETE); + break; + + case ABORTING_SYNC: + performSyncAbort(); + break; + + default: + break; + } + } catch (Exception exc) { + /* + * Perhaps we should abort the sync on an exception + */ + String message = "Caught an error which performing action. Error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + @Override + public void registerEntitySynchronizer(IndexSynchronizer entitySynchronizer) { + + String indexName = entitySynchronizer.getIndexName(); + + if (indexName != null) { + registeredSynchronizers.add(entitySynchronizer); + } else { + String message = "Failed to register entity synchronizer because index name is null"; + LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); + } + + } + + @Override + public void registerIndexValidator(IndexValidator indexValidator) { + + String indexName = indexValidator.getIndexName(); + + if (indexName != null) { + registeredIndexValidators.add(indexValidator); + } else { + String message = "Failed to register index validator because index name is null"; + LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); + } + + } + + @Override + public void registerIndexCleaner(IndexCleaner indexCleaner) { + + String indexName = indexCleaner.getIndexName(); + + if (indexName != null) { + registeredIndexCleaners.add(indexCleaner); + } else { + String message = "Failed to register index cleaner because index name is null"; + LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); + } + } + + /* + * State machine should drive our flow dosync just dispatches an action and the state machine + * determines what is in play and what is next + */ + + /** + * Dump stat report. + * + * @param showFinalReport the show final report + */ + private void dumpStatReport(boolean showFinalReport) { + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + + String statReport = synchronizer.getStatReport(showFinalReport); + + if (statReport != null) { + LOG.info(AaiUiMsgs.INFO_GENERIC, statReport); + } + } + } + + /** + * Clear caches. + */ + private void clearCaches() { + + /* + * Any entity caches that were built as part of the sync operation should be cleared to save + * memory. The original intent of the caching was to provide a short-lived cache to satisfy + * entity requests from multiple synchronizers yet minimizing interactions with the AAI. + */ + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + synchronizer.clearCache(); + } + } + + /** + * Perform pre sync cleanup collection. + */ + private void performPreSyncCleanupCollection() { + + /* + * ask the index cleaners to collect the their pre-sync object id collections + */ + + for (IndexCleaner cleaner : registeredIndexCleaners) { + cleaner.populatePreOperationCollection(); + } + + changeInternalState(InternalState.SYNC_OPERATION, SyncActions.PRE_SYNC_COMPLETE); + + } + + /** + * Perform index sync post collection. + */ + private void performIndexSyncPostCollection() { + + /* + * ask the entity purgers to collect the their pre-sync object id collections + */ + + for (IndexCleaner cleaner : registeredIndexCleaners) { + cleaner.populatePostOperationCollection(); + } + + } + + /** + * Perform index cleanup. + */ + private void performIndexCleanup() { + + /* + * ask the entity purgers to collect the their pre-sync object id collections + */ + + for (IndexCleaner cleaner : registeredIndexCleaners) { + cleaner.performCleanup(); + } + + } + + /** + * Perform sync abort. + */ + private void performSyncAbort() { + changeInternalState(InternalState.IDLE, SyncActions.SYNC_ABORTED); + } + + /** + * Perform index integrity validation. + */ + private void performIndexIntegrityValidation() { + + /* + * loop through registered index validators and test and fix, if needed + */ + + for (IndexValidator validator : registeredIndexValidators) { + try { + if (!validator.exists()) { + validator.createOrRepair(); + } + } catch (Exception exc) { + String message = "Index validator caused an error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + changeInternalState(InternalState.PRE_SYNC, SyncActions.INDEX_INTEGRITY_VALIDATION_COMPLETE); + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.SyncControllerInterface#shutdown() + */ + @Override + public void shutdown() { + + this.syncControllerExecutor.shutdown(); + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + + try { + synchronizer.shutdown(); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Synchronizer shutdown caused an error = " + exc.getMessage()); + } + + } + this.statReporterExecutor.shutdown(); + } + + /* + * Need some kind of task running that responds to a transient boolean to kill it or we just stop + * the executor that it is in? + */ + + + + /** + * Perform synchronization. + */ + private void performSynchronization() { + + /* + * Get all the synchronizers running in parallel + */ + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + supplyAsync(new Supplier() { + + @Override + public Void get() { + + synchronizer.doSync(); + return null; + } + + }, this.syncControllerExecutor).whenComplete((result, error) -> { + + /* + * We don't bother checking the result, because it will always be null as the doSync() is + * non-blocking. + */ + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "doSync operation failed with an error = " + error.getMessage()); + } + }); + } + + boolean allDone = false; + long nextReportTimeStampInMs = System.currentTimeMillis() + 30000L; + boolean dumpPeriodicStatReport = false; + + while (!allDone) { + int totalFinished = 0; + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + if (dumpPeriodicStatReport) { + if (synchronizer.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + String statReport = synchronizer.getStatReport(false); + + if (statReport != null) { + LOG.info(AaiUiMsgs.INFO_GENERIC, statReport); + } + } + } + + if (synchronizer.getState() == SynchronizerState.IDLE + || synchronizer.getState() == SynchronizerState.ABORTED) { + totalFinished++; + } + } + + if (System.currentTimeMillis() > nextReportTimeStampInMs) { + dumpPeriodicStatReport = true; + nextReportTimeStampInMs = System.currentTimeMillis() + 30000L; + } else { + dumpPeriodicStatReport = false; + } + + allDone = (totalFinished == registeredSynchronizers.size()); + + try { + Thread.sleep(250); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred while waiting for sync to complete. Error = " + exc.getMessage()); + } + + } + + changeInternalState(InternalState.POST_SYNC, SyncActions.SYNC_COMPLETE); + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.SyncControllerInterface#getState() + */ + @Override + public SynchronizerState getState() { + + switch (currentInternalState) { + + case IDLE: { + return SynchronizerState.IDLE; + } + + default: { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + + } + } + + } + + @Override + public Calendar getCreationTime() { + return creationTime; + } + + @Override + public String getNextSyncTime() { + // TODO Auto-generated method stub + return null; + } + + @Override + public boolean isPeriodicSyncEnabled() { + return syncControllerConfig.isPeriodicSyncEnabled(); + } + + @Override + public boolean isRunOnceSyncEnabled() { + return syncControllerConfig.isRunOnceSyncEnabled(); + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java b/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java new file mode 100644 index 0000000..cb2f3ce --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java @@ -0,0 +1,27 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +public interface SyncControllerRegistrar { + public void registerController(); +} diff --git a/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java b/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java new file mode 100644 index 0000000..90845e0 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java @@ -0,0 +1,48 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.util.ArrayList; +import java.util.List; + +public class SyncControllerRegistry { + + private List controllers; + + public SyncControllerRegistry() { + controllers = new ArrayList(); + } + + public void registerSyncController(SyncController controller) { + controllers.add(controller); + } + + public List getControllers() { + return controllers; + } + + public void setControllers(List controllers) { + this.controllers = controllers; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java b/src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java new file mode 100644 index 0000000..a137065 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java @@ -0,0 +1,220 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.lang.Thread.UncaughtExceptionHandler; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.springframework.context.ApplicationListener; +import org.springframework.context.event.ApplicationContextEvent; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; + +public class SyncControllerService implements ApplicationListener { + + private SyncControllerRegistry syncControllerRegistry; + private ExecutorService runonceSyncExecutor; + private ScheduledExecutorService periodicSyncExecutor; + private boolean syncStarted; + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(SyncControllerService.class); + + private class SyncControllerTask implements Runnable { + + private SyncController controller; + + public SyncControllerTask(SyncController controller) { + this.controller = controller; + } + + @Override + public void run() { + + try { + + if (controller.getState() == SynchronizerState.IDLE) { + + /* + * This is a blocking-call, but would be nicer if it was async internally within the + * controller but at the moment, that's not the way it works. + */ + + if (controller.performAction(SyncActions.SYNCHRONIZE) != OperationState.OK) { + + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " is not idle, sync attempt has been skipped."); + } + } else { + + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " is not idle, sync attempt has been skipped."); + } + + } catch (Exception exception) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Error while attempting synchronization. Error = " + exception.getMessage()); + } + + } + + } + + public SyncControllerService(SyncControllerRegistry syncControllerRegistry, int numRunOnceWorkers, + int numPeriodicWorkers) { + this.syncControllerRegistry = syncControllerRegistry; + this.syncStarted = false; + + UncaughtExceptionHandler uncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() { + + @Override + public void uncaughtException(Thread thread, Throwable exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, thread.getName() + ": " + exc); + } + }; + + runonceSyncExecutor = Executors.newFixedThreadPool(numRunOnceWorkers, + new ThreadFactoryBuilder().setNameFormat("RunonceSyncWorker-%d") + .setUncaughtExceptionHandler(uncaughtExceptionHandler).build()); + + + periodicSyncExecutor = Executors.newScheduledThreadPool(numPeriodicWorkers, + new ThreadFactoryBuilder().setNameFormat("PeriodicSyncWorker-%d") + .setUncaughtExceptionHandler(uncaughtExceptionHandler).build()); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void startSync() { + + long syncInitialDelayInMs = 0; + + for (SyncController controller : syncControllerRegistry.getControllers()) { + + syncInitialDelayInMs = controller.getDelayInMs(); + + if (!controller.isPeriodicSyncEnabled()) { + + if (controller.isRunOnceSyncEnabled()) { + LOG.info(AaiUiMsgs.INFO_GENERIC, controller.getControllerName() + " is enabled."); + runonceSyncExecutor.submit(new SyncControllerTask(controller)); + } else { + LOG.info(AaiUiMsgs.INFO_GENERIC, controller.getControllerName() + " is disabled."); + } + + } else { + + /** + * Do both. We'll take one instance of the SyncController and wrap the object instance into + * two SyncControllerTasks. The responsibility for preventing a conflicting sync should live + * in the SyncController instance. If a sync is underway when the periodic sync kicks in, + * then it will be ignored by the SyncController which is already underway. + * + * The SyncController instance itself would then also be stateful such that it would know + * the last time it ran, and the next time it is supposed to run, the number times a sync + * has executed, etc. + */ + + if (controller.isRunOnceSyncEnabled()) { + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " run-once sync is enabled."); + runonceSyncExecutor.submit(new SyncControllerTask(controller)); + } else { + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " run-once sync is disabled."); + } + + /* + * The controller knows it's configuredfrequency and we can just ask it to tell us what the + * delay and frequency needs to be, rather than trying to calculate the configured frequency + * per controller which "could" be different for each controller. + */ + + if (controller.isPeriodicSyncEnabled()) { + + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " periodic sync is enabled and scheduled to start @ " + + controller.getNextSyncTime()); + + periodicSyncExecutor.scheduleAtFixedRate(new SyncControllerTask(controller), + controller.getDelayInMs(), controller.getSyncFrequencyInMs(), TimeUnit.MILLISECONDS); + + } else { + + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " periodic sync is disabled."); + + } + + } + + } + + } + + public void shutdown() { + + if (runonceSyncExecutor != null) { + runonceSyncExecutor.shutdown(); + } + + if (periodicSyncExecutor != null) { + periodicSyncExecutor.shutdown(); + } + + if (syncControllerRegistry != null) { + for (SyncController controller : syncControllerRegistry.getControllers()) { + controller.shutdown(); + } + } + + } + + @Override + public synchronized void onApplicationEvent(ApplicationContextEvent arg0) { + + /* + * Start sync service processing when spring-context-initialization has finished + */ + + if (!syncStarted) { + syncStarted = true; + startSync(); + } + + } + + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java b/src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java new file mode 100644 index 0000000..73d34bc --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java @@ -0,0 +1,63 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.text.SimpleDateFormat; +import java.util.Date; + +/** + * The Class SynchronizerConstants. + */ +public final class SynchronizerConstants { + // Error values for invalid user input + public static final int DEFAULT_CONFIG_ERROR_INT_VALUE = Integer.MAX_VALUE; + public static final Date DEFAULT_CONFIG_ERROR_DATE_VALUE = new Date(Long.MAX_VALUE); + public static final SimpleDateFormat SIMPLE_DATE_FORMAT = + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); + + public static final String DEPTH_MODIFIER = "?depth=0"; + public static final String DEPTH_ALL_MODIFIER = "?depth=all"; + public static final String DEPTH_AND_NODES_ONLY_MODIFIER = "?depth=0&nodes-only"; + public static final String NODES_ONLY_MODIFIER = "?nodes-only"; + + // constants for scheduling synchronizer + public static final int COMPONENTS_IN_TIMESTAMP = 2; + public static final String DEFAULT_INITIAL_DELAY_IN_MS = "0"; + public static final String DEFAULT_TASK_FREQUENCY_IN_DAY = "0"; + public static final String DEFAULT_START_TIMESTAMP = "05:00:00 UTC"; + public static final long DELAY_NO_STARTUP_SYNC_IN_MS = 0; + public static final long DELAY_NO_PERIODIC_SYNC_IN_MS = 0; + public static final int IDX_TIME_IN_TIMESTAMP = 0; + public static final int IDX_TIMEZONE_IN_TIMESTAMP = 1; + public static final long MILLISEC_IN_A_MIN = 60000; + public static final long MILLISEC_IN_A_DAY = 24 * 60 * 60 * 1000; + public static final String TIME_STD = "GMT"; + public static final String TIME_CONFIG_STD = "UTC"; + public static final String TIMESTAMP24HOURS_PATTERN = + "([01]?[0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9] UTC[+|-][0-5][0-9]:[0-5][0-9]"; + + /** + * Instantiates a new synchronizer constants. + */ + private SynchronizerConstants() {} +} diff --git a/src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java b/src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java new file mode 100644 index 0000000..3e8a0ea --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java @@ -0,0 +1,133 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import org.onap.aai.sparky.analytics.AbstractStatistics; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; + +/** + * The Class TaskProcessingStats. + */ +public class TaskProcessingStats extends AbstractStatistics { + + private static String TASK_AGE_STATS = "taskAgeStats"; + private static String TASK_RESPONSE_STATS = "taskResponseStats"; + private static String RESPONSE_SIZE_IN_BYTES = "taskResponseSizeInBytes"; + // private static String QUEUE_ITEM_LENGTH = "queueItemLength"; + private static String TPS = "transactionsPerSecond"; + + /** + * Instantiates a new task processing stats. + * + * @param config the config + */ + public TaskProcessingStats(NetworkStatisticsConfig config) { + + addHistogram(TASK_AGE_STATS, config.getTaskAgeHistogramLabel(), + config.getTaskAgeHistogramMaxYAxis(), config.getTaskAgeHistogramNumBins(), + config.getTaskAgeHistogramNumDecimalPoints()); + + addHistogram(TASK_RESPONSE_STATS, config.getResponseTimeHistogramLabel(), + config.getResponseTimeHistogramMaxYAxis(), config.getResponseTimeHistogramNumBins(), + config.getResponseTimeHistogramNumDecimalPoints()); + + addHistogram(RESPONSE_SIZE_IN_BYTES, config.getBytesHistogramLabel(), + config.getBytesHistogramMaxYAxis(), config.getBytesHistogramNumBins(), + config.getBytesHistogramNumDecimalPoints()); + + /* + * addHistogram(QUEUE_ITEM_LENGTH, config.getQueueLengthHistogramLabel(), + * config.getQueueLengthHistogramMaxYAxis(), config.getQueueLengthHistogramNumBins(), + * config.getQueueLengthHistogramNumDecimalPoints()); + */ + + addHistogram(TPS, config.getTpsHistogramLabel(), config.getTpsHistogramMaxYAxis(), + config.getTpsHistogramNumBins(), config.getTpsHistogramNumDecimalPoints()); + + } + + /* + * public void updateQueueItemLengthHistogram(long value) { updateHistogram(QUEUE_ITEM_LENGTH, + * value); } + */ + + /** + * Update task age stats histogram. + * + * @param value the value + */ + public void updateTaskAgeStatsHistogram(long value) { + updateHistogram(TASK_AGE_STATS, value); + } + + /** + * Update task response stats histogram. + * + * @param value the value + */ + public void updateTaskResponseStatsHistogram(long value) { + updateHistogram(TASK_RESPONSE_STATS, value); + } + + /** + * Update response size in bytes histogram. + * + * @param value the value + */ + public void updateResponseSizeInBytesHistogram(long value) { + updateHistogram(RESPONSE_SIZE_IN_BYTES, value); + } + + /** + * Update transactions per second histogram. + * + * @param value the value + */ + public void updateTransactionsPerSecondHistogram(long value) { + updateHistogram(TPS, value); + } + + /** + * Gets the statistics report. + * + * @param verboseEnabled the verbose enabled + * @param indentPadding the indent padding + * @return the statistics report + */ + public String getStatisticsReport(boolean verboseEnabled, String indentPadding) { + + StringBuilder sb = new StringBuilder(); + + sb.append("\n").append(getHistogramStats(TASK_AGE_STATS, verboseEnabled, indentPadding)); + // sb.append("\n").append(getHistogramStats(QUEUE_ITEM_LENGTH, verboseEnabled, indentPadding)); + sb.append("\n").append(getHistogramStats(TASK_RESPONSE_STATS, verboseEnabled, indentPadding)); + sb.append("\n") + .append(getHistogramStats(RESPONSE_SIZE_IN_BYTES, verboseEnabled, indentPadding)); + sb.append("\n").append(getHistogramStats(TPS, verboseEnabled, indentPadding)); + + return sb.toString(); + + } + + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java b/src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java new file mode 100644 index 0000000..a120661 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java @@ -0,0 +1,73 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.util.concurrent.atomic.AtomicInteger; + +import org.onap.aai.sparky.analytics.AveragingRingBuffer; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; + +public class TransactionRateMonitor { + + private AtomicInteger numTransactions; + private AveragingRingBuffer responseTimeTracker; + private long startTimeInMs; + + /** + * Instantiates a new transaction rate controller. + * + * @param config the config + */ + public TransactionRateMonitor(int numWorkerThreads, NetworkStatisticsConfig config) { + + this.responseTimeTracker = new AveragingRingBuffer( + config.getNumSamplesPerThreadForRunningAverage() * numWorkerThreads); + this.startTimeInMs = System.currentTimeMillis(); + this.numTransactions = new AtomicInteger(0); + } + + /** + * Track response time. + * + * @param responseTimeInMs the response time in ms + */ + public void trackResponseTime(long responseTimeInMs) { + this.numTransactions.incrementAndGet(); + responseTimeTracker.addSample(responseTimeInMs); + } + + public long getAvg() { + return responseTimeTracker.getAvg(); + } + + public double getCurrentTps() { + if (numTransactions.get() > 0) { + double timeDelta = System.currentTimeMillis() - startTimeInMs; + double numTxns = numTransactions.get(); + return (numTxns / timeDelta) * 1000.0; + } + + return 0.0; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java b/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java new file mode 100644 index 0000000..6bea1a4 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java @@ -0,0 +1,70 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.config; + +public class ElasticSearchEndpointConfig { + + private String esIpAddress; + private String esServerPort; + private int scrollContextTimeToLiveInMinutes; + private int scrollContextBatchRequestSize; + + public ElasticSearchEndpointConfig() { + + } + + public String getEsIpAddress() { + return esIpAddress; + } + + public void setEsIpAddress(String esIpAddress) { + this.esIpAddress = esIpAddress; + } + + public String getEsServerPort() { + return esServerPort; + } + + public void setEsServerPort(String esServerPort) { + this.esServerPort = esServerPort; + } + + public int getScrollContextTimeToLiveInMinutes() { + return scrollContextTimeToLiveInMinutes; + } + + public void setScrollContextTimeToLiveInMinutes(int scrollContextTimeToLiveInMinutes) { + this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes; + } + + public int getScrollContextBatchRequestSize() { + return scrollContextBatchRequestSize; + } + + public void setScrollContextBatchRequestSize(int scrollContextBatchRequestSize) { + this.scrollContextBatchRequestSize = scrollContextBatchRequestSize; + } + + + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java b/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java new file mode 100644 index 0000000..1e4ba15 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java @@ -0,0 +1,75 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.config; + +public class ElasticSearchSchemaConfig { + + private String indexName; + private String indexDocType; + private String indexSettingsFileName; + private String indexMappingsFileName; + + public String getIndexName() { + return indexName; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + public String getIndexDocType() { + return indexDocType; + } + + public void setIndexDocType(String indexDocType) { + this.indexDocType = indexDocType; + } + + public String getIndexSettingsFileName() { + return indexSettingsFileName; + } + + public void setIndexSettingsFileName(String indexSettingsFileName) { + this.indexSettingsFileName = indexSettingsFileName; + } + + public String getIndexMappingsFileName() { + return indexMappingsFileName; + } + + public void setIndexMappingsFileName(String indexMappingsFileName) { + this.indexMappingsFileName = indexMappingsFileName; + } + + @Override + public String toString() { + return "ElasticSearchSchemaConfig [" + + (indexName != null ? "indexName=" + indexName + ", " : "") + + (indexDocType != null ? "indexDocType=" + indexDocType + ", " : "") + + (indexSettingsFileName != null ? "indexSettingsFileName=" + indexSettingsFileName + ", " + : "") + + (indexMappingsFileName != null ? "indexMappingsFileName=" + indexMappingsFileName : "") + + "]"; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java b/src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java new file mode 100644 index 0000000..34de88b --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java @@ -0,0 +1,237 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.config; + +public class NetworkStatisticsConfig { + + private int numSamplesPerThreadForRunningAverage; + + private String bytesHistogramLabel; + + private long bytesHistogramMaxYAxis; + + private int bytesHistogramNumBins; + + private int bytesHistogramNumDecimalPoints; + + private String queueLengthHistogramLabel; + + private long queueLengthHistogramMaxYAxis; + + private int queueLengthHistogramNumBins; + + private int queueLengthHistogramNumDecimalPoints; + + private String taskAgeHistogramLabel; + + private long taskAgeHistogramMaxYAxis; + + private int taskAgeHistogramNumBins; + + private int taskAgeHistogramNumDecimalPoints; + + private String responseTimeHistogramLabel; + + private long responseTimeHistogramMaxYAxis; + + private int responseTimeHistogramNumBins; + + private int responseTimeHistogramNumDecimalPoints; + + private String tpsHistogramLabel; + + private long tpsHistogramMaxYAxis; + + private int tpsHistogramNumBins; + + private int tpsHistogramNumDecimalPoints; + + public int getNumSamplesPerThreadForRunningAverage() { + return numSamplesPerThreadForRunningAverage; + } + + public void setNumSamplesPerThreadForRunningAverage(int numSamplesPerThreadForRunningAverage) { + this.numSamplesPerThreadForRunningAverage = numSamplesPerThreadForRunningAverage; + } + + public String getBytesHistogramLabel() { + return bytesHistogramLabel; + } + + public void setBytesHistogramLabel(String bytesHistogramLabel) { + this.bytesHistogramLabel = bytesHistogramLabel; + } + + public long getBytesHistogramMaxYAxis() { + return bytesHistogramMaxYAxis; + } + + public void setBytesHistogramMaxYAxis(long bytesHistogramMaxYAxis) { + this.bytesHistogramMaxYAxis = bytesHistogramMaxYAxis; + } + + public int getBytesHistogramNumBins() { + return bytesHistogramNumBins; + } + + public void setBytesHistogramNumBins(int bytesHistogramNumBins) { + this.bytesHistogramNumBins = bytesHistogramNumBins; + } + + public int getBytesHistogramNumDecimalPoints() { + return bytesHistogramNumDecimalPoints; + } + + public void setBytesHistogramNumDecimalPoints(int bytesHistogramNumDecimalPoints) { + this.bytesHistogramNumDecimalPoints = bytesHistogramNumDecimalPoints; + } + + public String getQueueLengthHistogramLabel() { + return queueLengthHistogramLabel; + } + + public void setQueueLengthHistogramLabel(String queueLengthHistogramLabel) { + this.queueLengthHistogramLabel = queueLengthHistogramLabel; + } + + public long getQueueLengthHistogramMaxYAxis() { + return queueLengthHistogramMaxYAxis; + } + + public void setQueueLengthHistogramMaxYAxis(long queueLengthHistogramMaxYAxis) { + this.queueLengthHistogramMaxYAxis = queueLengthHistogramMaxYAxis; + } + + public int getQueueLengthHistogramNumBins() { + return queueLengthHistogramNumBins; + } + + public void setQueueLengthHistogramNumBins(int queueLengthHistogramNumBins) { + this.queueLengthHistogramNumBins = queueLengthHistogramNumBins; + } + + public int getQueueLengthHistogramNumDecimalPoints() { + return queueLengthHistogramNumDecimalPoints; + } + + public void setQueueLengthHistogramNumDecimalPoints(int queueLengthHistogramNumDecimalPoints) { + this.queueLengthHistogramNumDecimalPoints = queueLengthHistogramNumDecimalPoints; + } + + public String getTaskAgeHistogramLabel() { + return taskAgeHistogramLabel; + } + + public void setTaskAgeHistogramLabel(String taskAgeHistogramLabel) { + this.taskAgeHistogramLabel = taskAgeHistogramLabel; + } + + public long getTaskAgeHistogramMaxYAxis() { + return taskAgeHistogramMaxYAxis; + } + + public void setTaskAgeHistogramMaxYAxis(long taskAgeHistogramMaxYAxis) { + this.taskAgeHistogramMaxYAxis = taskAgeHistogramMaxYAxis; + } + + public int getTaskAgeHistogramNumBins() { + return taskAgeHistogramNumBins; + } + + public void setTaskAgeHistogramNumBins(int taskAgeHistogramNumBins) { + this.taskAgeHistogramNumBins = taskAgeHistogramNumBins; + } + + public int getTaskAgeHistogramNumDecimalPoints() { + return taskAgeHistogramNumDecimalPoints; + } + + public void setTaskAgeHistogramNumDecimalPoints(int taskAgeHistogramNumDecimalPoints) { + this.taskAgeHistogramNumDecimalPoints = taskAgeHistogramNumDecimalPoints; + } + + public String getResponseTimeHistogramLabel() { + return responseTimeHistogramLabel; + } + + public void setResponseTimeHistogramLabel(String responseTimeHistogramLabel) { + this.responseTimeHistogramLabel = responseTimeHistogramLabel; + } + + public long getResponseTimeHistogramMaxYAxis() { + return responseTimeHistogramMaxYAxis; + } + + public void setResponseTimeHistogramMaxYAxis(long responseTimeHistogramMaxYAxis) { + this.responseTimeHistogramMaxYAxis = responseTimeHistogramMaxYAxis; + } + + public int getResponseTimeHistogramNumBins() { + return responseTimeHistogramNumBins; + } + + public void setResponseTimeHistogramNumBins(int responseTimeHistogramNumBins) { + this.responseTimeHistogramNumBins = responseTimeHistogramNumBins; + } + + public int getResponseTimeHistogramNumDecimalPoints() { + return responseTimeHistogramNumDecimalPoints; + } + + public void setResponseTimeHistogramNumDecimalPoints(int responseTimeHistogramNumDecimalPoints) { + this.responseTimeHistogramNumDecimalPoints = responseTimeHistogramNumDecimalPoints; + } + + public String getTpsHistogramLabel() { + return tpsHistogramLabel; + } + + public void setTpsHistogramLabel(String tpsHistogramLabel) { + this.tpsHistogramLabel = tpsHistogramLabel; + } + + public long getTpsHistogramMaxYAxis() { + return tpsHistogramMaxYAxis; + } + + public void setTpsHistogramMaxYAxis(long tpsHistogramMaxYAxis) { + this.tpsHistogramMaxYAxis = tpsHistogramMaxYAxis; + } + + public int getTpsHistogramNumBins() { + return tpsHistogramNumBins; + } + + public void setTpsHistogramNumBins(int tpsHistogramNumBins) { + this.tpsHistogramNumBins = tpsHistogramNumBins; + } + + public int getTpsHistogramNumDecimalPoints() { + return tpsHistogramNumDecimalPoints; + } + + public void setTpsHistogramNumDecimalPoints(int tpsHistogramNumDecimalPoints) { + this.tpsHistogramNumDecimalPoints = tpsHistogramNumDecimalPoints; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java b/src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java new file mode 100644 index 0000000..eb3a73f --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java @@ -0,0 +1,303 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.config; + +import java.text.DateFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Date; +import java.util.List; +import java.util.TimeZone; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.onap.aai.sparky.sync.SynchronizerConstants; + +public class SyncControllerConfig { + + private static final String UNKNOWN_CONTROLLER_NAME = "UnknownControllerName"; + + private String controllerName; + private boolean enabled; + private int syncTaskDelayInMs; + private int syncTaskFrequencyInDays; + + private int numSyncControllerWorkers; + private boolean runOnceSyncEnabled; + private boolean periodicSyncEnabled; + + private String targetSyncStartTimeStamp; + + private int numInternalSyncWorkers; + private int numSyncElasticWorkers; + private int numSyncActiveInventoryWorkers; + + /* + * calculated variables based on incoming config + */ + private String timeZoneOfSyncStartTimeStamp; + private int syncTaskStartTimeHr; + private int syncTaskStartTimeMin; + private int syncTaskStartTimeSec; + + + + public SyncControllerConfig() { + controllerName = UNKNOWN_CONTROLLER_NAME; + enabled = false; + syncTaskDelayInMs = 0; + syncTaskFrequencyInDays = 365; + numSyncControllerWorkers = 1; + runOnceSyncEnabled = false; + periodicSyncEnabled = false; + targetSyncStartTimeStamp = SynchronizerConstants.DEFAULT_START_TIMESTAMP; + numInternalSyncWorkers = 2; + numSyncElasticWorkers = 5; + numSyncActiveInventoryWorkers = 5; + } + + protected void initializeSyncTimeParameters() { + + if (syncTaskDelayInMs < 0) { + throw new IllegalArgumentException("syncTaskDelayInMs must >= 0"); + } + + Pattern pattern = Pattern.compile(SynchronizerConstants.TIMESTAMP24HOURS_PATTERN); + Matcher matcher = pattern.matcher(targetSyncStartTimeStamp); + if (!matcher.matches()) { + throw new IllegalArgumentException("Invalid time format for targetSyncStartTimeStamp"); + } + + List timestampVal = Arrays.asList(targetSyncStartTimeStamp.split(" ")); + + if (timestampVal.size() == SynchronizerConstants.COMPONENTS_IN_TIMESTAMP) { + + // Need both time and timezone offset + timeZoneOfSyncStartTimeStamp = timestampVal + .get(SynchronizerConstants.IDX_TIMEZONE_IN_TIMESTAMP).replaceAll("UTC", "GMT"); + + String time = timestampVal.get(SynchronizerConstants.IDX_TIME_IN_TIMESTAMP); + DateFormat format = new SimpleDateFormat("HH:mm:ss"); + + Date date = null; + + try { + date = format.parse(time); + } catch (ParseException parseException) { + throw new IllegalArgumentException(parseException); + } + + Calendar calendar = Calendar.getInstance(); + calendar.setTime(date); + + syncTaskStartTimeHr = calendar.get(Calendar.HOUR_OF_DAY); + syncTaskStartTimeMin = calendar.get(Calendar.MINUTE); + syncTaskStartTimeSec = calendar.get(Calendar.SECOND); + } else { + throw new IllegalArgumentException("Invalid timestamp format from targetSyncStartTimeStamp"); + } + + } + + + public int getNumInternalSyncWorkers() { + return numInternalSyncWorkers; + } + + public void setNumInternalSyncWorkers(int numInternalSyncWorkers) { + this.numInternalSyncWorkers = numInternalSyncWorkers; + } + + public int getNumSyncElasticWorkers() { + return numSyncElasticWorkers; + } + + public void setNumSyncElasticWorkers(int numSyncElasticWorkers) { + this.numSyncElasticWorkers = numSyncElasticWorkers; + } + + public int getNumSyncActiveInventoryWorkers() { + return numSyncActiveInventoryWorkers; + } + + public void setNumSyncActiveInventoryWorkers(int numSyncActiveInventoryWorkers) { + this.numSyncActiveInventoryWorkers = numSyncActiveInventoryWorkers; + } + + public String getTargetSyncStartTimeStamp() { + return targetSyncStartTimeStamp; + } + + public void setTargetSyncStartTimeStamp(String targetSyncStartTimeStamp) { + this.targetSyncStartTimeStamp = targetSyncStartTimeStamp; + initializeSyncTimeParameters(); + } + + public String getControllerName() { + return controllerName; + } + + public void setControllerName(String controllerName) { + this.controllerName = controllerName; + } + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + public int getSyncTaskDelayInMs() { + return syncTaskDelayInMs; + } + + public void setSyncTaskDelayInMs(int syncTaskDelayInMs) { + this.syncTaskDelayInMs = syncTaskDelayInMs; + } + + public int getSyncTaskFrequencyInDays() { + return syncTaskFrequencyInDays; + } + + public void setSyncTaskFrequencyInDays(int syncTaskFrequencyInDays) { + this.syncTaskFrequencyInDays = syncTaskFrequencyInDays; + } + + public int getNumSyncControllerWorkers() { + return numSyncControllerWorkers; + } + + public void setNumSyncControllerWorkers(int numSyncControllerWorkers) { + this.numSyncControllerWorkers = numSyncControllerWorkers; + } + + public boolean isRunOnceSyncEnabled() { + return runOnceSyncEnabled; + } + + public void setRunOnceSyncEnabled(boolean runOnceSyncEnabled) { + this.runOnceSyncEnabled = runOnceSyncEnabled; + } + + public boolean isPeriodicSyncEnabled() { + return periodicSyncEnabled; + } + + public void setPeriodicSyncEnabled(boolean periodicSyncEnabled) { + this.periodicSyncEnabled = periodicSyncEnabled; + } + + public long getSyncFrequencyInMs() { + + return (syncTaskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); + + } + + public Calendar getTargetSyncTime() { + + TimeZone tz = TimeZone.getTimeZone(timeZoneOfSyncStartTimeStamp); + Calendar targetSyncTime = Calendar.getInstance(tz); + + targetSyncTime.set(Calendar.HOUR_OF_DAY, syncTaskStartTimeHr); + targetSyncTime.set(Calendar.MINUTE, syncTaskStartTimeMin); + targetSyncTime.set(Calendar.SECOND, syncTaskStartTimeSec); + + return targetSyncTime; + + } + + + public String getNextSyncTime() { + + int taskFrequencyInSeconds = 0; + if (getSyncFrequencyInMs() > 0) { + taskFrequencyInSeconds = (int) (getSyncFrequencyInMs() / 1000); + } + + if (taskFrequencyInSeconds < 86400) { + + TimeZone tz = TimeZone.getTimeZone(timeZoneOfSyncStartTimeStamp); + Calendar targetSyncTime = Calendar.getInstance(tz); + targetSyncTime.add(Calendar.SECOND, taskFrequencyInSeconds); + + return SynchronizerConstants.SIMPLE_DATE_FORMAT.format(targetSyncTime.getTimeInMillis()) + .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD); + + } else { + + return SynchronizerConstants.SIMPLE_DATE_FORMAT + .format(getNextSyncTime(getTargetSyncTime(), taskFrequencyInSeconds)) + .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD); + + } + + } + + public long getNextSyncTime(Calendar syncTime, int taskFrequencyInSeconds) { + + TimeZone tz = TimeZone.getTimeZone(timeZoneOfSyncStartTimeStamp); + Calendar timeNow = Calendar.getInstance(tz); + + return getNextSyncTime(syncTime, timeNow.getTimeInMillis(), taskFrequencyInSeconds); + } + + /** + * Gets the first sync time. + * + * @param calendar the calendar + * @param timeNow the time now in ms + * @param taskFrequencyInMs task period in ms + * @return the first sync time + */ + + public long getNextSyncTime(Calendar syncTime, long timeNowInMs, int taskFrequencyInSeconds) { + if (taskFrequencyInSeconds == 0) { + return 0; + } else if (timeNowInMs > syncTime.getTimeInMillis()) { + + /* + * If current time is after the scheduled sync start time, then we'll skip ahead to the next + * sync time period + */ + + syncTime.add(Calendar.SECOND, taskFrequencyInSeconds); + } + + return syncTime.getTimeInMillis(); + } + + public String getTimeZoneOfSyncStartTimeStamp() { + return timeZoneOfSyncStartTimeStamp; + } + + public void setTimeZoneOfSyncStartTimeStamp(String timeZoneOfSyncStartTimeStamp) { + this.timeZoneOfSyncStartTimeStamp = timeZoneOfSyncStartTimeStamp; + } + + + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java b/src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java new file mode 100644 index 0000000..c4f805e --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java @@ -0,0 +1,102 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.HashMap; +import java.util.Map; + +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** + * The Class AggregationEntity. + */ +public class AggregationEntity extends IndexableEntity implements IndexDocument { + private Map attributes = new HashMap(); + protected ObjectMapper mapper = new ObjectMapper(); + + /** + * Instantiates a new aggregation entity. + */ + public AggregationEntity() { + super(); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. + * The best we can hope for is identification of resources by generated Id until the + * Identity-Service UUID is tagged against all resources, then we can use that instead. + */ + this.id = NodeUtils.generateUniqueShaDigest(link); + } + + public void copyAttributeKeyValuePair(Map map) { + for (String key : map.keySet()) { + if (!key.equalsIgnoreCase("relationship-list")) { // ignore relationship data which is not + // required in aggregation + this.attributes.put(key, map.get(key).toString()); // not sure if entity attribute can + // contain an object as value + } + } + } + + public void addAttributeKeyValuePair(String key, String value) { + this.attributes.put(key, value); + } + + @Override + public String getAsJson() { + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.put("link", this.getLink()); + rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); + for (String key : this.attributes.keySet()) { + rootNode.put(key, this.attributes.get(key)); + } + return rootNode.toString(); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "") + + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " + : "") + + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") + + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") + "]"; + } +} diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java b/src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java new file mode 100644 index 0000000..9ee6365 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java @@ -0,0 +1,104 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.ArrayList; +import java.util.List; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterListItemConfig; +import org.onap.aai.sparky.search.filters.config.UiViewListItemConfig; +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.databind.ObjectMapper; + +public class AggregationSuggestionEntity extends IndexableEntity implements IndexDocument { + + private static final String FILTER_ID = "filterId"; + private static final String FILTER_LIST = "filterList"; + + private List inputs = new ArrayList<>(); + private final String outputString = "VNFs"; + protected ObjectMapper mapper = new ObjectMapper(); + List filterIds = new ArrayList<>(); + + public AggregationSuggestionEntity() { + super(); + inputs.add("VNFs"); + inputs.add("generic-vnfs"); + } + + @Override + public void deriveFields() { + this.id = NodeUtils.generateUniqueShaDigest(this.outputString); + } + + @Override + public String getAsJson() { + JSONArray inputArray = new JSONArray(); + for (String input : inputs) { + input = input.replace(",", ""); + input = input.replace("[", ""); + input = input.replace("]", ""); + inputArray.put(input); + } + + JSONObject entitySuggest = new JSONObject(); + entitySuggest.put("input", inputArray); + entitySuggest.put("output", this.outputString); + entitySuggest.put("weight", 100); + + JSONArray payloadFilters = new JSONArray(); + + for (String filterId : filterIds) { + JSONObject filterPayload = new JSONObject(); + filterPayload.put(FILTER_ID, filterId); + payloadFilters.put(filterPayload); + } + + JSONObject payloadNode = new JSONObject(); + payloadNode.put(FILTER_LIST, payloadFilters); + entitySuggest.put("payload", payloadNode); + + JSONObject rootNode = new JSONObject(); + rootNode.put("entity_suggest", entitySuggest); + + return rootNode.toString(); + } + + public void initializeFilters() { + for (UiViewListItemConfig view : FiltersConfig.getInstance().getViewsConfig().getViews()) { + if (view.getViewName().equals("vnfSearch")) { + for (UiFilterListItemConfig currentViewFilter : view.getFilters()) { + filterIds.add(currentViewFilter.getFilterId()); + } + } + } + } + + public void setFilterIds(List filterIds) { + this.filterIds = filterIds; + } +} diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java b/src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java new file mode 100644 index 0000000..f7818a4 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java @@ -0,0 +1,39 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +/** + * The Interface IndexDocument. + */ +public interface IndexDocument { + + /** + * Derive fields. + */ + public void deriveFields(); + + public String getId(); + + public String getAsJson() throws Exception; + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java b/src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java new file mode 100644 index 0000000..cef7bfe --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java @@ -0,0 +1,99 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.ArrayList; + +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + + +/** + * The Class IndexableCrossEntityReference. + */ + +public class IndexableCrossEntityReference extends IndexableEntity implements IndexDocument { + + protected String crossReferenceEntityValues; + protected ArrayList crossEntityReferenceCollection = new ArrayList(); + protected ObjectMapper mapper = new ObjectMapper(); + + /** + * Instantiates a new indexable cross entity reference. + */ + public IndexableCrossEntityReference() { + super(); + } + + /** + * Adds the cross entity reference value. + * + * @param crossEntityReferenceValue the cross entity reference value + */ + public void addCrossEntityReferenceValue(String crossEntityReferenceValue) { + if (!crossEntityReferenceCollection.contains(crossEntityReferenceValue)) { + crossEntityReferenceCollection.add(crossEntityReferenceValue); + } + } + + public String getCrossReferenceEntityValues() { + return crossReferenceEntityValues; + } + + public void setCrossReferenceEntityValues(String crossReferenceEntityValues) { + this.crossReferenceEntityValues = crossReferenceEntityValues; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + this.id = NodeUtils.generateUniqueShaDigest(link); + this.crossReferenceEntityValues = NodeUtils.concatArray(crossEntityReferenceCollection, ";"); + } + + @Override + public String getAsJson() throws JsonProcessingException { + + return NodeUtils.convertObjectToJson(this, false); + + } + + @Override + public String toString() { + return "IndexableCrossEntityReference [" + + (crossReferenceEntityValues != null + ? "crossReferenceEntityValues=" + crossReferenceEntityValues + ", " : "") + + (crossEntityReferenceCollection != null + ? "crossEntityReferenceCollection=" + crossEntityReferenceCollection + ", " : "") + + (mapper != null ? "mapper=" + mapper : "") + "]"; + } + + + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java b/src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java new file mode 100644 index 0000000..5ee9a9f --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java @@ -0,0 +1,98 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.sql.Timestamp; +import java.text.SimpleDateFormat; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Class IndexableEntity. + */ +public abstract class IndexableEntity { + protected String id; // generated, SHA-256 digest + protected String entityType; + protected String entityPrimaryKeyValue; + protected String lastmodTimestamp; + protected String link; + + private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + + /** + * Instantiates a new indexable entity. + */ + public IndexableEntity() { + SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT); + Timestamp timestamp = new Timestamp(System.currentTimeMillis()); + String currentFormattedTimeStamp = dateFormat.format(timestamp); + this.setEntityTimeStamp(currentFormattedTimeStamp); + } + + @JsonIgnore + public String getId() { + return id; + } + + @JsonProperty("entityType") + public String getEntityType() { + return entityType; + } + + @JsonProperty("entityPrimaryKeyValue") + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + @JsonProperty("lastmodTimestamp") + public String getEntityTimeStamp() { + return lastmodTimestamp; + } + + public void setId(String id) { + this.id = id; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public void setEntityPrimaryKeyValue(String fieldValue) { + this.entityPrimaryKeyValue = fieldValue; + } + + public void setEntityTimeStamp(String lastmodTimestamp) { + this.lastmodTimestamp = lastmodTimestamp; + } + + @JsonProperty("link") + public String getLink() { + return link; + } + + public void setLink(String link) { + this.link = link; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java b/src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java new file mode 100644 index 0000000..10036b3 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java @@ -0,0 +1,57 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; + +/** + * The Class MergableEntity. + */ +public class MergableEntity { + private Map other = new HashMap(); + + /** + * Any. + * + * @return the map + */ + @JsonAnyGetter + public Map any() { + return other; + } + + /** + * Sets the. + * + * @param name the name + * @param value the value + */ + @JsonAnySetter + public void set(String name, String value) { + other.put(name, value); + } +} diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java b/src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java new file mode 100644 index 0000000..217dcdf --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java @@ -0,0 +1,76 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.Collection; +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; + +/** + * The Class ObjectIdCollection. + */ +public class ObjectIdCollection { + + protected ConcurrentHashMap importedObjectIds = + new ConcurrentHashMap(); + + public Collection getImportedObjectIds() { + return importedObjectIds.values(); + } + + /** + * Adds the object id. + * + * @param id the id + */ + public void addObjectId(String id) { + importedObjectIds.putIfAbsent(id, id); + } + + public int getSize() { + return importedObjectIds.values().size(); + } + + /** + * Adds the all. + * + * @param items the items + */ + public void addAll(List items) { + if (items == null) { + return; + } + + items.stream().forEach((item) -> { + importedObjectIds.putIfAbsent(item, item); + }); + + } + + /** + * Clear. + */ + public void clear() { + importedObjectIds.clear(); + } +} diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java b/src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java new file mode 100644 index 0000000..dd52bd2 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java @@ -0,0 +1,144 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class SearchableEntity. + */ +public class SearchableEntity extends IndexableEntity implements IndexDocument { + + @JsonIgnore + protected List searchTagCollection = new ArrayList(); + + @JsonIgnore + protected List searchTagIdCollection = new ArrayList(); + + @JsonIgnore + protected ObjectMapper mapper = new ObjectMapper(); + + /** + * Instantiates a new searchable entity. + */ + public SearchableEntity() { + super(); + } + + /* + * Generated fields, leave the settings for junit overrides + */ + + protected String searchTags; // generated based on searchTagCollection values + + protected String searchTagIDs; + + /** + * Generates the sha based id. + */ + public void generateId() { + this.id = NodeUtils.generateUniqueShaDigest(link); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. + * The best we can hope for is identification of resources by generated Id until the + * Identity-Service UUID is tagged against all resources, then we can use that instead. + */ + generateId(); + this.searchTags = NodeUtils.concatArray(searchTagCollection, ";"); + this.searchTagIDs = NodeUtils.concatArray(this.searchTagIdCollection, ";"); + } + + /** + * Adds the search tag with key. + * + * @param searchTag the search tag + * @param searchTagKey the key associated with the search tag (key:value) + */ + public void addSearchTagWithKey(String searchTag, String searchTagKey) { + searchTagIdCollection.add(searchTagKey); + searchTagCollection.add(searchTag); + } + + public List getSearchTagCollection() { + return searchTagCollection; + } + + @JsonProperty("searchTags") + public String getSearchTags() { + return searchTags; + } + + @JsonProperty("searchTagIDs") + public String getSearchTagIDs() { + return searchTagIDs; + } + + @JsonIgnore + public List getSearchTagIdCollection() { + return searchTagIdCollection; + } + + @Override + @JsonIgnore + public String getAsJson() throws JsonProcessingException { + return NodeUtils.convertObjectToJson(this, false); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "") + + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " + : "") + + (searchTagCollection != null ? "searchTagCollection=" + searchTagCollection + ", " : "") + + (searchTagIdCollection != null ? "searchTagIDCollection=" + searchTagIdCollection + ", " + : "") + + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") + + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") + + (searchTags != null ? "searchTags=" + searchTags + ", " : "") + + (searchTagIDs != null ? "searchTagIDs=" + searchTagIDs : "") + "]"; + } +} diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java b/src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java new file mode 100644 index 0000000..9d2886e --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java @@ -0,0 +1,90 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +/** + * The Class SelfLinkDescriptor. + */ +public class SelfLinkDescriptor { + private String selfLink; + private String entityType; + private String depthModifier; + + public String getDepthModifier() { + return depthModifier; + } + + public void setDepthModifier(String depthModifier) { + this.depthModifier = depthModifier; + } + + public String getSelfLink() { + return selfLink; + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public SelfLinkDescriptor(String selfLink) { + this(selfLink, null, null); + } + + /** + * Instantiates a new self link descriptor. + * + * @param selfLink the self link + * @param entityType the entity type + */ + public SelfLinkDescriptor(String selfLink, String entityType) { + this(selfLink, null, entityType); + } + + public SelfLinkDescriptor(String selfLink, String depthModifier, String entityType) { + this.selfLink = selfLink; + this.entityType = entityType; + this.depthModifier = depthModifier; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "SelfLinkDescriptor [" + (selfLink != null ? "selfLink=" + selfLink + ", " : "") + + (entityType != null ? "entityType=" + entityType + ", " : "") + + (depthModifier != null ? "depthModifier=" + depthModifier : "") + "]"; + } + +} + diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java b/src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java new file mode 100644 index 0000000..fdabf86 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java @@ -0,0 +1,326 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.search.filters.config.FiltersDetailsConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterConfig; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.util.SuggestionsPermutation; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +public class SuggestionSearchEntity extends IndexableEntity implements IndexDocument { + private static final String FILTER_ID = "filterId"; + private static final String FILTER_VALUE = "filterValue"; + private static final String FILTER_LIST = "filterList"; + + private String entityType; + private List suggestionConnectorWords = new ArrayList(); + private List suggestionAttributeTypes = new ArrayList(); + private List suggestionAttributeValues = new ArrayList(); + private List suggestionTypeAliases = new ArrayList(); + private List suggestionInputPermutations = new ArrayList(); + private List suggestableAttr = new ArrayList(); + + private Map inputOutputData = new HashMap(); + Map filters = new HashMap(); + private JSONObject payload = new JSONObject(); + private JSONArray payloadFilters = new JSONArray(); + private StringBuffer outputString = new StringBuffer(); + private String aliasToUse; + + private SuggestionEntityLookup entityLookup; + + public JSONObject getPayload() { + return payload; + } + + public void setPayload(JSONObject payload) { + this.payload = payload; + } + + protected ObjectMapper mapper = new ObjectMapper(); + + public SuggestionSearchEntity() { + super(); + + FiltersDetailsConfig filterConfigList = FiltersConfig.getInstance().getFiltersConfig(); + // Populate the map with keys that will match the suggestableAttr values + for (UiFilterConfig filter : filterConfigList.getFilters()) { + if (filter.getDataSource() != null) { + filters.put(filter.getDataSource().getFieldName(), filter); + } + } + } + + public SuggestionSearchEntity(SuggestionEntityLookup entityLookup) { + + this.entityLookup = entityLookup; + + FiltersDetailsConfig filterConfigList = FiltersConfig.getInstance().getFiltersConfig(); + // Populate the map with keys that will match the suggestableAttr values + for (UiFilterConfig filter : filterConfigList.getFilters()) { + if (filter.getDataSource() != null) { + filters.put(filter.getDataSource().getFieldName(), filter); + } + } + } + + public SuggestionSearchEntity(SuggestionEntityLookup entityLookup, FiltersConfig config) { + + FiltersDetailsConfig filterConfigList = config.getFiltersConfig(); + // Populate the map with keys that will match the suggestableAttr values + for (UiFilterConfig filter : filterConfigList.getFilters()) { + if (filter.getDataSource() != null) { + filters.put(filter.getDataSource().getFieldName(), filter); + } + } + } + + public void setSuggestableAttr(ArrayList attributes) { + for (String attribute : attributes) { + this.suggestableAttr.add(attribute); + } + } + + public void setPayloadFromResponse(JsonNode node) { + if (suggestableAttr != null) { + JSONObject nodePayload = new JSONObject(); + for (String attribute : suggestableAttr) { + if (node.get(attribute) != null) { + inputOutputData.put(attribute, node.get(attribute).asText()); + this.payload.put(attribute, node.get(attribute).asText()); + } + } + } + } + + public void setFilterBasedPayloadFromResponse(JsonNode node, String entityName, + ArrayList uniqueList) { + + HashMap desc = entityLookup.getSuggestionSearchEntityOxmModel().get(entityName); + + if (desc == null) { + return; + } + + String attr = desc.get("suggestibleAttributes"); + + if (attr == null) { + return; + } + + List suggestableAttrOxm = Arrays.asList(attr.split(",")); + + /* + * Note: (1) 'uniqueList' is one item within the power set of the suggestable attributes. (2) + * 'inputeOutputData' is used to generate permutations of strings + */ + for (String selectiveAttr : uniqueList) { + if (node.get(selectiveAttr) != null) { + inputOutputData.put(selectiveAttr, node.get(selectiveAttr).asText()); + } + } + + if (suggestableAttrOxm != null) { + for (String attribute : suggestableAttrOxm) { + if (node.get(attribute) != null && uniqueList.contains(attribute)) { + UiFilterConfig filterConfig = filters.get(attribute); + if (filterConfig != null) { + JSONObject filterPayload = new JSONObject(); + filterPayload.put(FILTER_ID, filterConfig.getFilterId()); + filterPayload.put(FILTER_VALUE, node.get(attribute).asText()); + this.payloadFilters.put(filterPayload); + } else { + this.payload.put(attribute, node.get(attribute).asText()); + } + } else { + UiFilterConfig emptyValueFilterConfig = filters.get(attribute); + if (emptyValueFilterConfig != null) { + JSONObject emptyValueFilterPayload = new JSONObject(); + emptyValueFilterPayload.put(FILTER_ID, emptyValueFilterConfig.getFilterId()); + this.payloadFilters.put(emptyValueFilterPayload); + } + } + } + this.payload.put(FILTER_LIST, this.payloadFilters); + } + } + + @Override + public String getEntityType() { + return entityType; + } + + @Override + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public List getSuggestionConnectorWords() { + return suggestionConnectorWords; + } + + public void setSuggestionConnectorWords(List suggestionConnectorWords) { + this.suggestionConnectorWords = suggestionConnectorWords; + } + + public List getSuggestionPropertyTypes() { + return this.suggestionAttributeTypes; + } + + public void setSuggestionPropertyTypes(List suggestionPropertyTypes) { + this.suggestionAttributeTypes = suggestionPropertyTypes; + } + + public List getSuggestionAttributeValues() { + return this.suggestionAttributeValues; + } + + public void setSuggestionAttributeValues(List suggestionAttributeValues) { + this.suggestionAttributeValues = suggestionAttributeValues; + } + + public List getSuggestionAliases() { + return this.suggestionTypeAliases; + } + + public void setSuggestionAliases(List suggestionAliases) { + this.suggestionTypeAliases = suggestionAliases; + } + + public List getSuggestionInputPermutations() { + return this.suggestionInputPermutations; + } + + public void setSuggestionInputPermutations(List permutations) { + this.suggestionInputPermutations = permutations; + } + + public void generateSuggestionInputPermutations() { + + List entityNames = new ArrayList<>(); + entityNames.add(entityType); + HashMap desc = + entityLookup.getSuggestionSearchEntityOxmModel().get(this.entityType); + String attr = desc.get("suggestionAliases"); + String[] suggestionAliasesArray = attr.split(","); + suggestionTypeAliases = Arrays.asList(suggestionAliasesArray); + this.setAliasToUse(suggestionAliasesArray[suggestionAliasesArray.length - 1]); + for (String alias : suggestionTypeAliases) { + entityNames.add(alias); + } + + ArrayList listToPermutate = new ArrayList<>(inputOutputData.values()); + + for (String entity : entityNames) { + listToPermutate.add(entity); // add entity-name or alias in list to permutate + List> lists = SuggestionsPermutation.getListPermutations(listToPermutate); + for (List li : lists) { + suggestionInputPermutations.add(String.join(" ", li)); + } + // prepare for the next pass: remove the entity-name or alias from the list + listToPermutate.remove(entity); + } + } + + public boolean isSuggestableDoc() { + return this.getPayload().length() != 0; + } + + + @Override + public void deriveFields() { + + int entryCounter = 1; + for (Map.Entry outputValue : inputOutputData.entrySet()) { + if (outputValue.getValue() != null && outputValue.getValue().length() > 0) { + this.outputString.append(outputValue.getValue()); + if (entryCounter < inputOutputData.entrySet().size()) { + this.outputString.append(" and "); + } else { + this.outputString.append(" "); + } + } + entryCounter++; + } + + this.outputString.append(this.getAliasToUse()); + this.id = NodeUtils.generateUniqueShaDigest(outputString.toString()); + } + + @Override + public String getAsJson() { + // TODO Auto-generated method stub + JSONObject rootNode = new JSONObject(); + + JSONArray suggestionsArray = new JSONArray(); + for (String suggestion : suggestionInputPermutations) { + suggestionsArray.put(suggestion); + } + + JSONObject entitySuggest = new JSONObject(); + + entitySuggest.put("input", suggestionsArray); + entitySuggest.put("output", this.outputString); + entitySuggest.put("payload", this.payload); + rootNode.put("entity_suggest", entitySuggest); + + return rootNode.toString(); + } + + public String getAliasToUse() { + return aliasToUse; + } + + public void setAliasToUse(String aliasToUse) { + this.aliasToUse = aliasToUse; + } + + public Map getInputOutputData() { + return inputOutputData; + } + + public void setInputOutputData(Map inputOutputData) { + this.inputOutputData = inputOutputData; + } + + @Override + public String toString() { + return "SuggestionSearchEntity [entityType=" + entityType + ", suggestionConnectorWords=" + + suggestionConnectorWords + ", suggestionAttributeTypes=" + suggestionAttributeTypes + + ", suggestionAttributeValues=" + suggestionAttributeValues + ", suggestionTypeAliases=" + + suggestionTypeAliases + ", mapper=" + mapper + "]"; + } +} diff --git a/src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java b/src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java new file mode 100644 index 0000000..8dd25a1 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java @@ -0,0 +1,54 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +/** + * The Enum TransactionStorageType. + */ +public enum TransactionStorageType { + EDGE_TAG_QUERY(0, "aaiOffline/edge-tag-query"), ACTIVE_INVENTORY_QUERY(1, + "aaiOffline/active-inventory-query"); + + private Integer index; + private String outputFolder; + + /** + * Instantiates a new transaction storage type. + * + * @param index the index + * @param outputFolder the output folder + */ + TransactionStorageType(Integer index, String outputFolder) { + this.index = index; + this.outputFolder = outputFolder; + } + + public Integer getIndex() { + return index; + } + + public String getOutputFolder() { + return outputFolder; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java b/src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java new file mode 100644 index 0000000..af25301 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java @@ -0,0 +1,30 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.enumeration; + +/** + * The Enum OperationState. + */ +public enum OperationState { + INIT, OK, ERROR, ABORT, PENDING, IGNORED_SYNC_NOT_IDLE +} diff --git a/src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java b/src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java new file mode 100644 index 0000000..12f0c0a --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java @@ -0,0 +1,30 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.enumeration; + +/** + * The Enum SynchronizerState. + */ +public enum SynchronizerState { + IDLE, PERFORMING_SYNCHRONIZATION, ABORTED +} diff --git a/src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java b/src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java new file mode 100644 index 0000000..55c8d47 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java @@ -0,0 +1,100 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.slf4j.MDC; + +/* + * Consider abstraction the tasks into common elemnts, because most of them repeat a generic call + * flow pattern + */ + +/** + * The Class PerformActiveInventoryRetrieval. + */ +public class PerformActiveInventoryRetrieval implements Supplier { + + private static Logger logger = + LoggerFactory.getInstance().getLogger(PerformActiveInventoryRetrieval.class); + + private NetworkTransaction txn; + private ActiveInventoryAdapter aaiAdapter; + private Map contextMap; + + /** + * Instantiates a new perform active inventory retrieval. + * + * @param txn the txn + * @param aaiProvider the aai provider + */ + public PerformActiveInventoryRetrieval(NetworkTransaction txn, + ActiveInventoryAdapter aaiAdapter) { + this.txn = txn; + this.aaiAdapter = aaiAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* + * (non-Javadoc) + * + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + + txn.setTaskAgeInMs(); + + long startTimeInMs = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + OperationResult result = null; + try { + + final String absoluteSelfLink = + aaiAdapter.repairSelfLink(txn.getLink(), txn.getQueryParameters()); + result = aaiAdapter.queryActiveInventoryWithRetries(absoluteSelfLink, "application/json", 5); + } catch (Exception exc) { + logger.error(AaiUiMsgs.ERROR_GENERIC, + "Failure to resolve self link from AAI. Error = " + exc.getMessage()); + result = new OperationResult(500, + "Caught an exception while trying to resolve link = " + exc.getMessage()); + } finally { + txn.setOperationResult(result); + txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + } + + return txn; + } + + protected void setContextMap(Map contextMap) { + this.contextMap = contextMap; + } +} diff --git a/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java b/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java new file mode 100644 index 0000000..6359dbe --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java @@ -0,0 +1,88 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.slf4j.MDC; + +/** + * The Class PerformElasticSearchPut. + */ +public class PerformElasticSearchPut implements Supplier { + + private ElasticSearchAdapter esAdapter; + private String jsonPayload; + private NetworkTransaction txn; + private Map contextMap; + + /** + * Instantiates a new perform elastic search put. + * + * @param jsonPayload the json payload + * @param txn the txn + * @param restDataProvider the rest data provider + */ + public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn, + ElasticSearchAdapter esAdapter) { + this.jsonPayload = jsonPayload; + this.txn = txn; + this.esAdapter = esAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + } + + public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn, + ElasticSearchAdapter esAdapter, Map contextMap) { + this.jsonPayload = jsonPayload; + this.txn = txn; + this.esAdapter = esAdapter; + this.contextMap = contextMap; + } + + /* + * (non-Javadoc) + * + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + txn.setTaskAgeInMs(); + MDC.setContextMap(contextMap); + + long startTimeInMs = System.currentTimeMillis(); + + OperationResult or = + esAdapter.doPut(txn.getLink(), jsonPayload, MediaType.APPLICATION_JSON_TYPE); + + txn.setOperationResult(or); + txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + + return txn; + } +} diff --git a/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java b/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java new file mode 100644 index 0000000..0f37a0d --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java @@ -0,0 +1,72 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.slf4j.MDC; + +/** + * The Class PerformElasticSearchRetrieval. + */ +public class PerformElasticSearchRetrieval implements Supplier { + + private NetworkTransaction txn; + private ElasticSearchAdapter esAdapter; + private Map contextMap; + + /** + * Instantiates a new perform elastic search retrieval. + * + * @param elasticSearchTxn the elastic search txn + * @param restDataProvider the rest data provider + */ + public PerformElasticSearchRetrieval(NetworkTransaction elasticSearchTxn, + ElasticSearchAdapter esAdapter) { + this.txn = elasticSearchTxn; + this.esAdapter = esAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* + * (non-Javadoc) + * + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + MDC.setContextMap(contextMap); + long startTimeInMs = System.currentTimeMillis(); + OperationResult or = esAdapter.doGet(txn.getLink(), MediaType.APPLICATION_JSON_TYPE); + txn.setOperationResult(or); + txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + return txn; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java b/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java new file mode 100644 index 0000000..1d8371f --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java @@ -0,0 +1,80 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.slf4j.MDC; + +/** + * The Class PerformElasticSearchUpdate. + */ +public class PerformElasticSearchUpdate implements Supplier { + + private ElasticSearchAdapter esAdapter; + private NetworkTransaction operationTracker; + private String updatePayload; + private String updateUrl; + private Map contextMap; + + /** + * Instantiates a new perform elastic search update. + * + * @param updateUrl the update url + * @param updatePayload the update payload + * @param esDataProvider the es data provider + * @param transactionTracker the transaction tracker + */ + public PerformElasticSearchUpdate(String updateUrl, String updatePayload, + ElasticSearchAdapter esAdapter, NetworkTransaction transactionTracker) { + this.updateUrl = updateUrl; + this.updatePayload = updatePayload; + this.esAdapter = esAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + this.operationTracker = new NetworkTransaction(); + operationTracker.setEntityType(transactionTracker.getEntityType()); + operationTracker.setDescriptor(transactionTracker.getDescriptor()); + operationTracker.setOperationType(transactionTracker.getOperationType()); + } + + /* + * (non-Javadoc) + * + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + operationTracker.setTaskAgeInMs(); + MDC.setContextMap(contextMap); + long startTimeInMs = System.currentTimeMillis(); + OperationResult or = esAdapter.doBulkOperation(updateUrl, updatePayload); + operationTracker.setOperationResult(or); + operationTracker.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + return operationTracker; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java b/src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java new file mode 100644 index 0000000..4ef796d --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java @@ -0,0 +1,90 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.sync.entity.IndexDocument; +import org.slf4j.MDC; + +/** + * The Class StoreDocumentTask. + */ +public class StoreDocumentTask implements Supplier { + + private IndexDocument doc; + + private NetworkTransaction txn; + + private ElasticSearchAdapter esAdapter; + private Map contextMap; + + /** + * Instantiates a new store document task. + * + * @param doc the doc + * @param txn the txn + * @param esDataProvider the es data provider + */ + public StoreDocumentTask(IndexDocument doc, NetworkTransaction txn, + ElasticSearchAdapter esAdapter) { + this.doc = doc; + this.txn = txn; + this.esAdapter = esAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* + * (non-Javadoc) + * + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + txn.setTaskAgeInMs(); + + long startTimeInMs = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + OperationResult operationResult = null; + + try { + + operationResult = + esAdapter.doPut(txn.getLink(), doc.getAsJson(), MediaType.APPLICATION_JSON_TYPE); + txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + } catch (Exception exception) { + operationResult.setResult(500, exception.getMessage()); + } + + txn.setOperationResult(operationResult); + + return txn; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java b/src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java new file mode 100644 index 0000000..959fed1 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java @@ -0,0 +1,53 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import org.onap.aai.sparky.sync.SyncController; +import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; + +public class SyncControllerTask implements Runnable { + + private SyncController controller; + + public SyncControllerTask(SyncController controller) { + this.controller = controller; + } + + @Override + public void run() { + + controller.performAction(SyncActions.SYNCHRONIZE); + + while (controller.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + // exit out of the sync-wait-loop + break; + } + } + + } + +} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/AbstractEntitySynchronizer.java b/src/main/java/org/onap/aai/sparky/synchronizer/AbstractEntitySynchronizer.java deleted file mode 100644 index dde633c..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/AbstractEntitySynchronizer.java +++ /dev/null @@ -1,568 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.EnumSet; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; - -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider; -import org.onap.aai.sparky.dal.aai.ActiveInventoryEntityStatistics; -import org.onap.aai.sparky.dal.aai.ActiveInventoryProcessingExceptionStatistics; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.elasticsearch.ElasticSearchDataProvider; -import org.onap.aai.sparky.dal.elasticsearch.ElasticSearchEntityStatistics; -import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.onap.aai.sparky.dal.rest.HttpMethod; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.rest.RestOperationalStatistics; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.mdc.MdcContext; -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * The Class AbstractEntitySynchronizer. - * - * @author davea. - */ -public abstract class AbstractEntitySynchronizer { - - protected static final int VERSION_CONFLICT_EXCEPTION_CODE = 409; - protected static final Integer RETRY_COUNT_PER_ENTITY_LIMIT = new Integer(3); - - protected final Logger logger; - protected ObjectMapper mapper; - protected OxmModelLoader oxmModelLoader; - protected long syncDurationInMs; - - /** - * The Enum StatFlag. - */ - protected enum StatFlag { - AAI_REST_STATS, AAI_ENTITY_STATS, AAI_PROCESSING_EXCEPTION_STATS, AAI_TASK_PROCESSING_STATS, ES_REST_STATS, ES_ENTITY_STATS, ES_TASK_PROCESSING_STATS - } - - protected EnumSet enabledStatFlags; - - protected ActiveInventoryDataProvider aaiDataProvider; - protected ElasticSearchDataProvider esDataProvider; - - protected ExecutorService synchronizerExecutor; - protected ExecutorService aaiExecutor; - protected ExecutorService esExecutor; - - private RestOperationalStatistics esRestStats; - protected ElasticSearchEntityStatistics esEntityStats; - - private RestOperationalStatistics aaiRestStats; - protected ActiveInventoryEntityStatistics aaiEntityStats; - private ActiveInventoryProcessingExceptionStatistics aaiProcessingExceptionStats; - - private TaskProcessingStats aaiTaskProcessingStats; - private TaskProcessingStats esTaskProcessingStats; - - private TransactionRateController aaiTransactionRateController; - private TransactionRateController esTransactionRateController; - - protected AtomicInteger aaiWorkOnHand; - protected AtomicInteger esWorkOnHand; - protected String synchronizerName; - - protected abstract boolean isSyncDone(); - - protected boolean shouldSkipSync; - - public String getActiveInventoryStatisticsReport() { - - StringBuilder sb = new StringBuilder(128); - - if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) { - sb.append("\n\n ").append("REST Operational Stats:"); - sb.append(aaiRestStats.getStatisticsReport()); - } - - if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) { - sb.append("\n\n ").append("Entity Stats:"); - sb.append(aaiEntityStats.getStatisticsReport()); - } - - if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) { - sb.append("\n\n ").append("Processing Exception Stats:"); - sb.append(aaiProcessingExceptionStats.getStatisticsReport()); - } - - return sb.toString(); - - } - - public String getElasticSearchStatisticsReport() { - - StringBuilder sb = new StringBuilder(128); - - if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) { - sb.append("\n\n ").append("REST Operational Stats:"); - sb.append(esRestStats.getStatisticsReport()); - } - - if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) { - sb.append("\n\n ").append("Entity Stats:"); - sb.append(esEntityStats.getStatisticsReport()); - } - - return sb.toString(); - - } - - /** - * Adds the active inventory stat report. - * - * @param sb the sb - */ - private void addActiveInventoryStatReport(StringBuilder sb) { - - if (sb == null) { - return; - } - - sb.append("\n\n AAI"); - sb.append(getActiveInventoryStatisticsReport()); - - double currentTps = 0; - if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { - sb.append("\n\n ").append("Task Processor Stats:"); - sb.append(aaiTaskProcessingStats.getStatisticsReport(false, " ")); - - currentTps = aaiTransactionRateController.getCurrentTps(); - - sb.append("\n ").append("Current TPS: ").append(currentTps); - } - - sb.append("\n ").append("Current WOH: ").append(aaiWorkOnHand.get()); - - if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { - if (currentTps > 0) { - double numMillisecondsToCompletion = (aaiWorkOnHand.get() / currentTps) * 1000; - sb.append("\n ").append("SyncDurationRemaining=") - .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion)); - } - } - - } - - /** - * Adds the elastic stat report. - * - * @param sb the sb - */ - private void addElasticStatReport(StringBuilder sb) { - - if (sb == null) { - return; - } - - sb.append("\n\n ELASTIC"); - sb.append(getElasticSearchStatisticsReport()); - - double currentTps = 0; - - if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { - sb.append("\n\n ").append("Task Processor Stats:"); - sb.append(esTaskProcessingStats.getStatisticsReport(false, " ")); - - currentTps = esTransactionRateController.getCurrentTps(); - - sb.append("\n ").append("Current TPS: ").append(currentTps); - } - - sb.append("\n ").append("Current WOH: ").append(esWorkOnHand.get()); - - if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { - if (currentTps > 0) { - double numMillisecondsToCompletion = (esWorkOnHand.get() / currentTps) * 1000; - sb.append("\n ").append("SyncDurationRemaining=") - .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion)); - } - } - - - } - - /** - * Gets the stat report. - * - * @param syncOpTimeInMs the sync op time in ms - * @param showFinalReport the show final report - * @return the stat report - */ - protected String getStatReport(long syncOpTimeInMs, boolean showFinalReport) { - - StringBuilder sb = new StringBuilder(128); - - sb.append("\n").append(synchronizerName + " Statistics: ( Sync Operation Duration = " - + NodeUtils.getDurationBreakdown(syncOpTimeInMs) + " )"); - - addActiveInventoryStatReport(sb); - addElasticStatReport(sb); - - if (showFinalReport) { - sb.append("\n\n ").append("Sync Completed!\n"); - } else { - sb.append("\n\n ").append("Sync in Progress...\n"); - } - - return sb.toString(); - - } - - protected String indexName; - protected long syncStartedTimeStampInMs; - - /** - * Instantiates a new abstract entity synchronizer. - * - * @param logger the logger - * @param syncName the sync name - * @param numSyncWorkers the num sync workers - * @param numActiveInventoryWorkers the num active inventory workers - * @param numElasticsearchWorkers the num elasticsearch workers - * @param indexName the index name - * @throws Exception the exception - */ - protected AbstractEntitySynchronizer(Logger logger, String syncName, int numSyncWorkers, - int numActiveInventoryWorkers, int numElasticsearchWorkers, String indexName) - throws Exception { - this.logger = logger; - this.synchronizerExecutor = - NodeUtils.createNamedExecutor(syncName + "-INTERNAL", numSyncWorkers, logger); - this.aaiExecutor = - NodeUtils.createNamedExecutor(syncName + "-AAI", numActiveInventoryWorkers, logger); - this.esExecutor = - NodeUtils.createNamedExecutor(syncName + "-ES", numElasticsearchWorkers, logger); - this.mapper = new ObjectMapper(); - this.oxmModelLoader = OxmModelLoader.getInstance(); - this.indexName = indexName; - this.esRestStats = new RestOperationalStatistics(); - this.esEntityStats = new ElasticSearchEntityStatistics(oxmModelLoader); - this.aaiRestStats = new RestOperationalStatistics(); - this.aaiEntityStats = new ActiveInventoryEntityStatistics(oxmModelLoader); - this.aaiProcessingExceptionStats = new ActiveInventoryProcessingExceptionStatistics(); - this.aaiTaskProcessingStats = - new TaskProcessingStats(ActiveInventoryConfig.getConfig().getTaskProcessorConfig()); - this.esTaskProcessingStats = - new TaskProcessingStats(ElasticSearchConfig.getConfig().getProcessorConfig()); - - this.aaiTransactionRateController = - new TransactionRateController(ActiveInventoryConfig.getConfig().getTaskProcessorConfig()); - this.esTransactionRateController = - new TransactionRateController(ElasticSearchConfig.getConfig().getProcessorConfig()); - - this.aaiWorkOnHand = new AtomicInteger(0); - this.esWorkOnHand = new AtomicInteger(0); - - enabledStatFlags = EnumSet.allOf(StatFlag.class); - - this.synchronizerName = "Abstact Entity Synchronizer"; - - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "AbstractEntitySynchronizer", "", "Sync", ""); - - this.shouldSkipSync = false; - this.syncStartedTimeStampInMs = System.currentTimeMillis(); - this.syncDurationInMs = -1; - } - - public boolean shouldSkipSync() { - return shouldSkipSync; - } - - public void setShouldSkipSync(boolean shouldSkipSync) { - this.shouldSkipSync = shouldSkipSync; - } - - /** - * Inc active inventory work on hand counter. - */ - protected void incActiveInventoryWorkOnHandCounter() { - aaiWorkOnHand.incrementAndGet(); - } - - /** - * Dec active inventory work on hand counter. - */ - protected void decActiveInventoryWorkOnHandCounter() { - aaiWorkOnHand.decrementAndGet(); - } - - /** - * Inc elastic search work on hand counter. - */ - protected void incElasticSearchWorkOnHandCounter() { - esWorkOnHand.incrementAndGet(); - } - - /** - * Dec elastic search work on hand counter. - */ - protected void decElasticSearchWorkOnHandCounter() { - esWorkOnHand.decrementAndGet(); - } - - /** - * Shutdown executors. - */ - protected void shutdownExecutors() { - try { - synchronizerExecutor.shutdown(); - aaiExecutor.shutdown(); - esExecutor.shutdown(); - aaiDataProvider.shutdown(); - esDataProvider.shutdown(); - } catch (Exception exc) { - logger.error(AaiUiMsgs.ERROR_SHUTDOWN_EXECUTORS, exc); - } - } - - /** - * Clear cache. - */ - public void clearCache() { - if (aaiDataProvider != null) { - aaiDataProvider.clearCache(); - } - } - - protected ActiveInventoryDataProvider getAaiDataProvider() { - return aaiDataProvider; - } - - public void setAaiDataProvider(ActiveInventoryDataProvider aaiDataProvider) { - this.aaiDataProvider = aaiDataProvider; - } - - protected ElasticSearchDataProvider getEsDataProvider() { - return esDataProvider; - } - - public void setEsDataProvider(ElasticSearchDataProvider provider) { - this.esDataProvider = provider; - } - - /** - * Gets the elastic full url. - * - * @param resourceUrl the resource url - * @param indexName the index name - * @param indexType the index type - * @return the elastic full url - * @throws Exception the exception - */ - protected String getElasticFullUrl(String resourceUrl, String indexName, String indexType) - throws Exception { - return ElasticSearchConfig.getConfig().getElasticFullUrl(resourceUrl, indexName, indexType); - } - - /** - * Gets the elastic full url. - * - * @param resourceUrl the resource url - * @param indexName the index name - * @return the elastic full url - * @throws Exception the exception - */ - protected String getElasticFullUrl(String resourceUrl, String indexName) throws Exception { - return ElasticSearchConfig.getConfig().getElasticFullUrl(resourceUrl, indexName); - } - - public String getIndexName() { - return indexName; - } - - public void setIndexName(String indexName) { - this.indexName = indexName; - } - - - /** - * Gets the response length. - * - * @param txn the txn - * @return the response length - */ - private long getResponseLength(NetworkTransaction txn) { - - if (txn == null) { - return -1; - } - - OperationResult result = txn.getOperationResult(); - - if (result == null) { - return -1; - } - - if (result.getResult() != null) { - return result.getResult().length(); - } - - return -1; - } - - /** - * Update elastic search counters. - * - * @param method the method - * @param or the or - */ - protected void updateElasticSearchCounters(HttpMethod method, OperationResult or) { - updateElasticSearchCounters(new NetworkTransaction(method, null, or)); - } - - /** - * Update elastic search counters. - * - * @param method the method - * @param entityType the entity type - * @param or the or - */ - protected void updateElasticSearchCounters(HttpMethod method, String entityType, - OperationResult or) { - updateElasticSearchCounters(new NetworkTransaction(method, entityType, or)); - } - - /** - * Update elastic search counters. - * - * @param txn the txn - */ - protected void updateElasticSearchCounters(NetworkTransaction txn) { - - if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) { - esRestStats.updateCounters(txn); - } - - if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) { - esEntityStats.updateCounters(txn); - } - - if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { - - esTransactionRateController.trackResponseTime(txn.getOperationResult().getResponseTimeInMs()); - - esTaskProcessingStats - .updateTaskResponseStatsHistogram(txn.getOperationResult().getResponseTimeInMs()); - esTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs()); - - // don't know the cost of the lengh calc, we'll see if it causes a - // problem - - long responsePayloadSizeInBytes = getResponseLength(txn); - if (responsePayloadSizeInBytes >= 0) { - esTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes); - } - - esTaskProcessingStats - .updateTransactionsPerSecondHistogram((long) esTransactionRateController.getCurrentTps()); - } - } - - /** - * Update active inventory counters. - * - * @param method the method - * @param or the or - */ - protected void updateActiveInventoryCounters(HttpMethod method, OperationResult or) { - updateActiveInventoryCounters(new NetworkTransaction(method, null, or)); - } - - /** - * Update active inventory counters. - * - * @param method the method - * @param entityType the entity type - * @param or the or - */ - protected void updateActiveInventoryCounters(HttpMethod method, String entityType, - OperationResult or) { - updateActiveInventoryCounters(new NetworkTransaction(method, entityType, or)); - } - - /** - * Update active inventory counters. - * - * @param txn the txn - */ - protected void updateActiveInventoryCounters(NetworkTransaction txn) { - - if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) { - aaiRestStats.updateCounters(txn); - } - - if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) { - aaiEntityStats.updateCounters(txn); - } - - if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) { - aaiProcessingExceptionStats.updateCounters(txn); - } - - if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { - aaiTransactionRateController - .trackResponseTime(txn.getOperationResult().getResponseTimeInMs()); - - aaiTaskProcessingStats - .updateTaskResponseStatsHistogram(txn.getOperationResult().getResponseTimeInMs()); - aaiTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs()); - - // don't know the cost of the lengh calc, we'll see if it causes a - // problem - - long responsePayloadSizeInBytes = getResponseLength(txn); - if (responsePayloadSizeInBytes >= 0) { - aaiTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes); - } - - aaiTaskProcessingStats.updateTransactionsPerSecondHistogram( - (long) aaiTransactionRateController.getCurrentTps()); - } - } - - /** - * Reset counters. - */ - protected void resetCounters() { - aaiRestStats.reset(); - aaiEntityStats.reset(); - aaiProcessingExceptionStats.reset(); - - esRestStats.reset(); - esEntityStats.reset(); - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/AggregationSuggestionSynchronizer.java b/src/main/java/org/onap/aai/sparky/synchronizer/AggregationSuggestionSynchronizer.java deleted file mode 100644 index cd5877a..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/AggregationSuggestionSynchronizer.java +++ /dev/null @@ -1,183 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.util.Map; -import java.util.concurrent.ExecutorService; - -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.rest.HttpMethod; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.entity.AggregationSuggestionEntity; -import org.onap.aai.sparky.synchronizer.enumeration.OperationState; -import org.onap.aai.sparky.synchronizer.enumeration.SynchronizerState; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchPut; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; -import org.onap.aai.cl.mdc.MdcContext; -import org.slf4j.MDC; - -public class AggregationSuggestionSynchronizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(AggregationSuggestionSynchronizer.class); - - private boolean isSyncInProgress; - private boolean shouldPerformRetry; - private Map contextMap; - protected ExecutorService esPutExecutor; - - public AggregationSuggestionSynchronizer(String indexName) throws Exception { - super(LOG, "ASS-" + indexName.toUpperCase(), 2, 5, 5, indexName); - - this.isSyncInProgress = false; - this.shouldPerformRetry = false; - this.synchronizerName = "Aggregation Suggestion Synchronizer"; - this.contextMap = MDC.getCopyOfContextMap(); - this.esPutExecutor = NodeUtils.createNamedExecutor("ASS-ES-PUT", 2, LOG); - } - - @Override - protected boolean isSyncDone() { - int totalWorkOnHand = esWorkOnHand.get(); - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, - indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand); - } - - if (totalWorkOnHand > 0 || !isSyncInProgress) { - return false; - } - - return true; - } - - @Override - public OperationState doSync() { - isSyncInProgress = true; - this.syncDurationInMs = -1; - syncStartedTimeStampInMs = System.currentTimeMillis(); - syncEntity(); - - while (!isSyncDone()) { - try { - if (shouldPerformRetry) { - syncEntity(); - } - Thread.sleep(1000); - } catch (Exception exc) { - // We don't care about this exception - } - } - - return OperationState.OK; - } - - private void syncEntity() { - String txnId = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnId, "AggregationSuggestionSynchronizer", "", "Sync", ""); - - AggregationSuggestionEntity syncEntity = new AggregationSuggestionEntity(); - syncEntity.deriveFields(); - - String link = null; - try { - link = getElasticFullUrl("/" + syncEntity.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); - } - - try { - String jsonPayload = null; - jsonPayload = syncEntity.getIndexDocumentJson(); - if (link != null && jsonPayload != null) { - - NetworkTransaction elasticPutTxn = new NetworkTransaction(); - elasticPutTxn.setLink(link); - elasticPutTxn.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - final Map contextMap = MDC.getCopyOfContextMap(); - supplyAsync( - new PerformElasticSearchPut(jsonPayload, elasticPutTxn, esDataProvider, contextMap), - esPutExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = "Aggregation suggestion entity sync UPDATE PUT error - " - + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message); - } else { - updateElasticSearchCounters(result); - wasEsOperationSuccessful(result); - } - }); - } - } catch (Exception exc) { - String message = - "Exception caught during aggregation suggestion entity sync PUT operation. Message - " - + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message); - } - } - - private void wasEsOperationSuccessful(NetworkTransaction result) { - if (result != null) { - OperationResult opResult = result.getOperationResult(); - - if (!opResult.wasSuccessful()) { - shouldPerformRetry = true; - } else { - isSyncInProgress = false; - shouldPerformRetry = false; - } - } - } - - @Override - public SynchronizerState getState() { - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - } - - @Override - public String getStatReport(boolean shouldDisplayFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return getStatReport(syncDurationInMs, shouldDisplayFinalReport); - } - - @Override - public void shutdown() { - this.shutdownExecutors(); - } -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/AggregationSynchronizer.java b/src/main/java/org/onap/aai/sparky/synchronizer/AggregationSynchronizer.java deleted file mode 100644 index 817e633..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/AggregationSynchronizer.java +++ /dev/null @@ -1,771 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.sql.Timestamp; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Deque; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Supplier; - -import javax.json.Json; - -import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.onap.aai.sparky.dal.rest.HttpMethod; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.config.SynchronizerConfiguration; -import org.onap.aai.sparky.synchronizer.entity.AggregationEntity; -import org.onap.aai.sparky.synchronizer.entity.MergableEntity; -import org.onap.aai.sparky.synchronizer.entity.SelfLinkDescriptor; -import org.onap.aai.sparky.synchronizer.enumeration.OperationState; -import org.onap.aai.sparky.synchronizer.enumeration.SynchronizerState; -import org.onap.aai.sparky.synchronizer.task.PerformActiveInventoryRetrieval; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchPut; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchRetrieval; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchUpdate; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; -import org.slf4j.MDC; - -import org.onap.aai.cl.mdc.MdcContext; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectReader; -import com.fasterxml.jackson.databind.node.ArrayNode; - -/** - * The Class AutosuggestionSynchronizer. - */ -public class AggregationSynchronizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - /** - * The Class RetryAggregationEntitySyncContainer. - */ - private class RetryAggregationEntitySyncContainer { - NetworkTransaction txn; - AggregationEntity ae; - - /** - * Instantiates a new retry aggregation entity sync container. - * - * @param txn the txn - * @param ae the se - */ - public RetryAggregationEntitySyncContainer(NetworkTransaction txn, AggregationEntity ae) { - this.txn = txn; - this.ae = ae; - } - - public NetworkTransaction getNetworkTransaction() { - return txn; - } - - public AggregationEntity getAggregationEntity() { - return ae; - } - } - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(AggregationSynchronizer.class); - private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; - - private boolean allWorkEnumerated; - private Deque selflinks; - private Deque retryQueue; - private Map retryLimitTracker; - protected ExecutorService esPutExecutor; - private ConcurrentHashMap entityCounters; - private boolean syncInProgress; - private Map contextMap; - private String entityType; - - /** - * Instantiates a new entity aggregation synchronizer. - * - * @param indexName the index name - * @throws Exception the exception - */ - public AggregationSynchronizer(String entityType, String indexName) throws Exception { - super(LOG, "AGGES-" + indexName.toUpperCase(), 2, 5, 5, indexName); // multiple Autosuggestion - // Entity Synchronizer will - // run for different indices - - this.entityType = entityType; - this.allWorkEnumerated = false; - this.entityCounters = new ConcurrentHashMap(); - this.synchronizerName = "Entity Aggregation Synchronizer"; - this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); - this.syncInProgress = false; - this.allWorkEnumerated = false; - this.selflinks = new ConcurrentLinkedDeque(); - this.retryQueue = new ConcurrentLinkedDeque(); - this.retryLimitTracker = new ConcurrentHashMap(); - - this.esPutExecutor = NodeUtils.createNamedExecutor("AGGES-ES-PUT", 1, LOG); - Map descriptor = new HashMap(); - descriptor.put(entityType, oxmModelLoader.getEntityDescriptors().get(entityType)); - this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors(descriptor); - this.esEntityStats.initializeCountersFromOxmEntityDescriptors(descriptor); - this.contextMap = MDC.getCopyOfContextMap(); - } - - /** - * Collect all the work. - * - * @return the operation state - */ - private OperationState collectAllTheWork() { - final Map contextMap = MDC.getCopyOfContextMap(); - final String entity = this.getEntityType(); - try { - - aaiWorkOnHand.set(1); - - supplyAsync(new Supplier() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - OperationResult typeLinksResult = null; - try { - typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(entity); - aaiWorkOnHand.decrementAndGet(); - processEntityTypeSelfLinks(typeLinksResult); - } catch (Exception exc) { - // TODO -> LOG, what should be logged here? - } - - return null; - } - - }, aaiExecutor).whenComplete((result, error) -> { - - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "An error occurred getting data from AAI. Error = " + error.getMessage()); - } - }); - - while (aaiWorkOnHand.get() != 0) { - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); - } - - Thread.sleep(1000); - } - - aaiWorkOnHand.set(selflinks.size()); - allWorkEnumerated = true; - syncEntityTypes(); - - while (!isSyncDone()) { - performRetrySync(); - Thread.sleep(1000); - } - - /* - * Make sure we don't hang on to retries that failed which could cause issues during future - * syncs - */ - retryLimitTracker.clear(); - - } catch (Exception exc) { - // TODO -> LOG, waht should be logged here? - } - - return OperationState.OK; - } - - - /** - * Perform retry sync. - */ - private void performRetrySync() { - while (retryQueue.peek() != null) { - - RetryAggregationEntitySyncContainer rsc = retryQueue.poll(); - if (rsc != null) { - - AggregationEntity ae = rsc.getAggregationEntity(); - NetworkTransaction txn = rsc.getNetworkTransaction(); - - String link = null; - try { - /* - * In this retry flow the se object has already derived its fields - */ - link = getElasticFullUrl("/" + ae.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction retryTransaction = new NetworkTransaction(); - retryTransaction.setLink(link); - retryTransaction.setEntityType(txn.getEntityType()); - retryTransaction.setDescriptor(txn.getDescriptor()); - retryTransaction.setOperationType(HttpMethod.GET); - - /* - * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already - * called incrementAndGet when queuing the failed PUT! - */ - - supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), - esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, ae); - } - }); - } - - } - } - } - - /** - * Perform document upsert. - * - * @param esGetTxn the es get txn - * @param ae the ae - */ - protected void performDocumentUpsert(NetworkTransaction esGetTxn, AggregationEntity ae) { - /** - *

    - *

      - * As part of the response processing we need to do the following: - *
    • 1. Extract the version (if present), it will be the ETAG when we use the - * Search-Abstraction-Service - *
    • 2. Spawn next task which is to do the PUT operation into elastic with or with the version - * tag - *
    • a) if version is null or RC=404, then standard put, no _update with version tag - *
    • b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic - *
    - *

    - */ - String link = null; - try { - link = getElasticFullUrl("/" + ae.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); - return; - } - - String versionNumber = null; - boolean wasEntryDiscovered = false; - if (esGetTxn.getOperationResult().getResultCode() == 404) { - LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, ae.getEntityPrimaryKeyValue()); - } else if (esGetTxn.getOperationResult().getResultCode() == 200) { - wasEntryDiscovered = true; - try { - versionNumber = NodeUtils.extractFieldValueFromObject( - NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), - "_version"); - } catch (IOException exc) { - String message = - "Error extracting version number from response, aborting aggregation entity sync of " - + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); - return; - } - } else { - /* - * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we - * return. - */ - LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, - String.valueOf(esGetTxn.getOperationResult().getResultCode())); - return; - } - - try { - String jsonPayload = null; - if (wasEntryDiscovered) { - try { - ArrayList sourceObject = new ArrayList(); - NodeUtils.extractObjectsByKey( - NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), - "_source", sourceObject); - - if (!sourceObject.isEmpty()) { - String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); - MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); - ObjectReader updater = mapper.readerForUpdating(me); - MergableEntity merged = updater.readValue(ae.getIndexDocumentJson()); - jsonPayload = mapper.writeValueAsString(merged); - } - } catch (IOException exc) { - String message = - "Error extracting source value from response, aborting aggregation entity sync of " - + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); - return; - } - } else { - jsonPayload = ae.getIndexDocumentJson(); - } - - if (wasEntryDiscovered) { - if (versionNumber != null && jsonPayload != null) { - - String requestPayload = esDataProvider.buildBulkImportOperationRequest(getIndexName(), - ElasticSearchConfig.getConfig().getType(), ae.getId(), versionNumber, jsonPayload); - - NetworkTransaction transactionTracker = new NetworkTransaction(); - transactionTracker.setEntityType(esGetTxn.getEntityType()); - transactionTracker.setDescriptor(esGetTxn.getDescriptor()); - transactionTracker.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), - requestPayload, esDataProvider, transactionTracker), esPutExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = "Aggregation entity sync UPDATE PUT error - " - + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetTxn, ae); - } - }); - } - - } else { - if (link != null && jsonPayload != null) { - - NetworkTransaction updateElasticTxn = new NetworkTransaction(); - updateElasticTxn.setLink(link); - updateElasticTxn.setEntityType(esGetTxn.getEntityType()); - updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); - updateElasticTxn.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), - esPutExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = - "Aggregation entity sync UPDATE PUT error - " + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetTxn, ae); - } - }); - } - } - } catch (Exception exc) { - String message = "Exception caught during aggregation entity sync PUT operation. Message - " - + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - /** - * Should allow retry. - * - * @param id the id - * @return true, if successful - */ - private boolean shouldAllowRetry(String id) { - boolean isRetryAllowed = true; - if (retryLimitTracker.get(id) != null) { - Integer currentCount = retryLimitTracker.get(id); - if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { - isRetryAllowed = false; - String message = "Aggregation entity re-sync limit reached for " + id - + ", re-sync will no longer be attempted for this entity"; - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } else { - Integer newCount = new Integer(currentCount.intValue() + 1); - retryLimitTracker.put(id, newCount); - } - } else { - Integer firstRetryCount = new Integer(1); - retryLimitTracker.put(id, firstRetryCount); - } - - return isRetryAllowed; - } - - /** - * Process store document result. - * - * @param esPutResult the es put result - * @param esGetResult the es get result - * @param ae the ae - */ - private void processStoreDocumentResult(NetworkTransaction esPutResult, - NetworkTransaction esGetResult, AggregationEntity ae) { - - OperationResult or = esPutResult.getOperationResult(); - - if (!or.wasSuccessful()) { - if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { - - if (shouldAllowRetry(ae.getId())) { - esWorkOnHand.incrementAndGet(); - - RetryAggregationEntitySyncContainer rsc = - new RetryAggregationEntitySyncContainer(esGetResult, ae); - retryQueue.push(rsc); - - String message = "Store document failed during aggregation entity synchronization" - + " due to version conflict. Entity will be re-synced."; - LOG.warn(AaiUiMsgs.ERROR_GENERIC, message); - } - } else { - String message = - "Store document failed during aggregation entity synchronization with result code " - + or.getResultCode() + " and result message " + or.getResult(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - } - - /** - * Sync entity types. - */ - private void syncEntityTypes() { - - while (selflinks.peek() != null) { - - SelfLinkDescriptor linkDescriptor = selflinks.poll(); - aaiWorkOnHand.decrementAndGet(); - - OxmEntityDescriptor descriptor = null; - - if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); - // go to next element in iterator - continue; - } - - NetworkTransaction txn = new NetworkTransaction(); - txn.setDescriptor(descriptor); - txn.setLink(linkDescriptor.getSelfLink()); - txn.setOperationType(HttpMethod.GET); - txn.setEntityType(linkDescriptor.getEntityType()); - - aaiWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) - .whenComplete((result, error) -> { - - aaiWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); - } else { - if (result == null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, - linkDescriptor.getSelfLink()); - } else { - updateActiveInventoryCounters(result); - fetchDocumentForUpsert(result); - } - } - }); - } - - } - - } - - /** - * Fetch document for upsert. - * - * @param txn the txn - */ - private void fetchDocumentForUpsert(NetworkTransaction txn) { - // modified - if (!txn.getOperationResult().wasSuccessful()) { - String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - return; - } - - try { - final String jsonResult = txn.getOperationResult().getResult(); - if (jsonResult != null && jsonResult.length() > 0) { - - AggregationEntity ae = new AggregationEntity(oxmModelLoader); - ae.setLink(ActiveInventoryConfig.extractResourcePath(txn.getLink())); - populateAggregationEntityDocument(ae, jsonResult, txn.getDescriptor()); - ae.deriveFields(); - - String link = null; - try { - link = getElasticFullUrl("/" + ae.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction n2 = new NetworkTransaction(); - n2.setLink(link); - n2.setEntityType(txn.getEntityType()); - n2.setDescriptor(txn.getDescriptor()); - n2.setOperationType(HttpMethod.GET); - - esWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), esExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, ae); - } - }); - } - } - - } catch (JsonProcessingException exc) { - // TODO -> LOG, waht should be logged here? - } catch (IOException exc) { - // TODO -> LOG, waht should be logged here? - } - } - - - /** - * Populate aggregation entity document. - * - * @param doc the doc - * @param result the result - * @param resultDescriptor the result descriptor - * @throws JsonProcessingException the json processing exception - * @throws IOException Signals that an I/O exception has occurred. - */ - protected void populateAggregationEntityDocument(AggregationEntity doc, String result, - OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { - doc.setEntityType(resultDescriptor.getEntityName()); - JsonNode entityNode = mapper.readTree(result); - Map map = mapper.convertValue(entityNode, Map.class); - doc.copyAttributeKeyValuePair(map); - } - - /** - * Process entity type self links. - * - * @param operationResult the operation result - */ - private void processEntityTypeSelfLinks(OperationResult operationResult) { - - JsonNode rootNode = null; - - final String jsonResult = operationResult.getResult(); - - if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { - - try { - rootNode = mapper.readTree(jsonResult); - } catch (IOException exc) { - String message = "Could not deserialize JSON (representing operation result) as node tree. " - + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); - return; - } - - JsonNode resultData = rootNode.get("result-data"); - ArrayNode resultDataArrayNode = null; - - if (resultData.isArray()) { - resultDataArrayNode = (ArrayNode) resultData; - - Iterator elementIterator = resultDataArrayNode.elements(); - JsonNode element = null; - - while (elementIterator.hasNext()) { - element = elementIterator.next(); - - final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); - final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); - - OxmEntityDescriptor descriptor = null; - - if (resourceType != null && resourceLink != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(resourceType); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); - // go to next element in iterator - continue; - } - - selflinks.add(new SelfLinkDescriptor(resourceLink, - SynchronizerConfiguration.NODES_ONLY_MODIFIER, resourceType)); - - - } - } - } - } - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#doSync() - */ - @Override - public OperationState doSync() { - this.syncDurationInMs = -1; - syncStartedTimeStampInMs = System.currentTimeMillis(); - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "AggregationSynchronizer", "", "Sync", ""); - - return collectAllTheWork(); - } - - @Override - public SynchronizerState getState() { - - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) - */ - @Override - public String getStatReport(boolean showFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return getStatReport(syncDurationInMs, showFinalReport); - } - - public String getEntityType() { - return entityType; - } - - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#shutdown() - */ - @Override - public void shutdown() { - this.shutdownExecutors(); - } - - @Override - protected boolean isSyncDone() { - - int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " - + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); - } - - if (totalWorkOnHand > 0 || !allWorkEnumerated) { - return false; - } - - this.syncInProgress = false; - - return true; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() - */ - @Override - public void clearCache() { - - if (syncInProgress) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, - "Autosuggestion Entity Summarizer in progress, request to clear cache ignored"); - return; - } - - super.clearCache(); - this.resetCounters(); - if (entityCounters != null) { - entityCounters.clear(); - } - - allWorkEnumerated = false; - - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/AutosuggestionSynchronizer.java b/src/main/java/org/onap/aai/sparky/synchronizer/AutosuggestionSynchronizer.java deleted file mode 100644 index 328fb97..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/AutosuggestionSynchronizer.java +++ /dev/null @@ -1,737 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Deque; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Supplier; - -import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.rest.HttpMethod; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.config.SynchronizerConfiguration; -import org.onap.aai.sparky.synchronizer.entity.SelfLinkDescriptor; -import org.onap.aai.sparky.synchronizer.entity.SuggestionSearchEntity; -import org.onap.aai.sparky.synchronizer.enumeration.OperationState; -import org.onap.aai.sparky.synchronizer.enumeration.SynchronizerState; -import org.onap.aai.sparky.synchronizer.task.PerformActiveInventoryRetrieval; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchPut; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchRetrieval; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.sparky.util.SuggestionsPermutation; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; -import org.onap.aai.cl.mdc.MdcContext; -import org.slf4j.MDC; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; - -/** - * The Class AutosuggestionSynchronizer. - */ -public class AutosuggestionSynchronizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - private class RetrySuggestionEntitySyncContainer { - NetworkTransaction txn; - SuggestionSearchEntity ssec; - - /** - * Instantiates a new RetrySuggestionEntitySyncContainer. - * - * @param txn the txn - * @param icer the icer - */ - public RetrySuggestionEntitySyncContainer(NetworkTransaction txn, SuggestionSearchEntity icer) { - this.txn = txn; - this.ssec = icer; - } - - public NetworkTransaction getNetworkTransaction() { - return txn; - } - - public SuggestionSearchEntity getSuggestionSearchEntity() { - return ssec; - } - } - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(AutosuggestionSynchronizer.class); - private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; - - private boolean allWorkEnumerated; - private Deque selflinks; - private ConcurrentHashMap entityCounters; - private boolean syncInProgress; - private Map contextMap; - protected ExecutorService esPutExecutor; - private Deque retryQueue; - private Map retryLimitTracker; - - /** - * Instantiates a new historical entity summarizer. - * - * @param indexName the index name - * @throws Exception the exception - */ - public AutosuggestionSynchronizer(String indexName) throws Exception { - super(LOG, "ASES-" + indexName.toUpperCase(), 2, 5, 5, indexName); // multiple Autosuggestion - // Entity Synchronizer will - // run for different indices - - this.allWorkEnumerated = false; - this.selflinks = new ConcurrentLinkedDeque(); - this.entityCounters = new ConcurrentHashMap(); - this.synchronizerName = "Autosuggestion Entity Synchronizer"; - this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); - this.syncInProgress = false; - this.contextMap = MDC.getCopyOfContextMap(); - this.esPutExecutor = NodeUtils.createNamedExecutor("SUES-ES-PUT", 5, LOG); - this.syncDurationInMs = -1; - } - - /** - * Collect all the work. - * - * @return the operation state - */ - private OperationState collectAllTheWork() { - final Map contextMap = MDC.getCopyOfContextMap(); - Map descriptorMap = - oxmModelLoader.getSuggestionSearchEntityDescriptors(); - - if (descriptorMap.isEmpty()) { - LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES); - LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES); - return OperationState.ERROR; - } - - Collection syncTypes = descriptorMap.keySet(); - - try { - - /* - * launch a parallel async thread to process the documents for each entity-type (to max the of - * the configured executor anyway) - */ - - aaiWorkOnHand.set(syncTypes.size()); - - for (String key : syncTypes) { - - supplyAsync(new Supplier() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - OperationResult typeLinksResult = null; - try { - typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); - aaiWorkOnHand.decrementAndGet(); - processEntityTypeSelfLinks(typeLinksResult); - } catch (Exception exc) { - // TODO -> LOG, what should be logged here? - } - - return null; - } - - }, aaiExecutor).whenComplete((result, error) -> { - - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "An error occurred getting data from AAI. Error = " + error.getMessage()); - } - }); - - } - - while (aaiWorkOnHand.get() != 0) { - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); - } - - Thread.sleep(1000); - } - - aaiWorkOnHand.set(selflinks.size()); - allWorkEnumerated = true; - syncEntityTypes(); - - while (!isSyncDone()) { - performRetrySync(); - Thread.sleep(1000); - } - - /* - * Make sure we don't hang on to retries that failed which could cause issues during future - * syncs - */ - retryLimitTracker.clear(); - - } catch (Exception exc) { - // TODO -> LOG, waht should be logged here? - } - - return OperationState.OK; - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#doSync() - */ - @Override - public OperationState doSync() { - this.syncDurationInMs = -1; - syncStartedTimeStampInMs = System.currentTimeMillis(); - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "AutosuggestionSynchronizer", "", "Sync", ""); - - return collectAllTheWork(); - } - - /** - * Process entity type self links. - * - * @param operationResult the operation result - */ - private void processEntityTypeSelfLinks(OperationResult operationResult) { - - JsonNode rootNode = null; - - final String jsonResult = operationResult.getResult(); - - if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { - - try { - rootNode = mapper.readTree(jsonResult); - } catch (IOException exc) { - String message = "Could not deserialize JSON (representing operation result) as node tree. " - + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); - return; - } - - JsonNode resultData = rootNode.get("result-data"); - ArrayNode resultDataArrayNode = null; - - if (resultData.isArray()) { - resultDataArrayNode = (ArrayNode) resultData; - - Iterator elementIterator = resultDataArrayNode.elements(); - JsonNode element = null; - - while (elementIterator.hasNext()) { - element = elementIterator.next(); - - final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); - final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); - - OxmEntityDescriptor descriptor = null; - - if (resourceType != null && resourceLink != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(resourceType); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); - // go to next element in iterator - continue; - } - selflinks.add(new SelfLinkDescriptor(resourceLink, - SynchronizerConfiguration.NODES_ONLY_MODIFIER, resourceType)); - - - } - } - } - } - } - - /** - * Sync entity types. - */ - private void syncEntityTypes() { - - while (selflinks.peek() != null) { - - SelfLinkDescriptor linkDescriptor = selflinks.poll(); - aaiWorkOnHand.decrementAndGet(); - - OxmEntityDescriptor descriptor = null; - - if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); - // go to next element in iterator - continue; - } - - NetworkTransaction txn = new NetworkTransaction(); - txn.setDescriptor(descriptor); - txn.setLink(linkDescriptor.getSelfLink()); - txn.setOperationType(HttpMethod.GET); - txn.setEntityType(linkDescriptor.getEntityType()); - - aaiWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) - .whenComplete((result, error) -> { - - aaiWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); - } else { - if (result == null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, - linkDescriptor.getSelfLink()); - } else { - updateActiveInventoryCounters(result); - fetchDocumentForUpsert(result); - } - } - }); - } - - } - - } - - /* - * Return a set of valid suggestion attributes for the provided entityName that are present in the - * JSON - * - * @param node JSON node in which the attributes should be found - * - * @param entityName Name of the entity - * - * @return List of all valid suggestion attributes(key's) - */ - public List getSuggestionFromReponse(JsonNode node, String entityName) { - List suggestableAttr = new ArrayList(); - HashMap desc = oxmModelLoader.getOxmModel().get(entityName); - String attr = desc.get("suggestibleAttributes"); - suggestableAttr = Arrays.asList(attr.split(",")); - List suggestableValue = new ArrayList<>(); - for (String attribute : suggestableAttr) { - if (node.get(attribute) != null && node.get(attribute).asText().length() > 0) { - suggestableValue.add(attribute); - } - } - return suggestableValue; - } - - /** - * Fetch all the documents for upsert. Based on the number of permutations that are available the - * number of documents will be different - * - * @param txn the txn - */ - private void fetchDocumentForUpsert(NetworkTransaction txn) { - if (!txn.getOperationResult().wasSuccessful()) { - String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - return; - } - try { - final String jsonResult = txn.getOperationResult().getResult(); - - if (jsonResult != null && jsonResult.length() > 0) { - - // Step 1: Calculate the number of possible permutations of attributes - String entityName = txn.getDescriptor().getEntityName(); - JsonNode entityNode = mapper.readTree(jsonResult); - - SuggestionsPermutation suggPermutation = new SuggestionsPermutation(); - ArrayList> uniqueLists = suggPermutation - .getSuggestionsPermutation(getSuggestionFromReponse(entityNode, entityName)); - - // Now we have a list of all possible permutations for the status that are - // defined for this entity type. Try inserting a document for every combination. - for (ArrayList uniqueList : uniqueLists) { - SuggestionSearchEntity sse = new SuggestionSearchEntity(oxmModelLoader); - sse.setSuggestableAttr(uniqueList); - sse.setPayloadFromResponse(entityNode); - sse.setLink(txn.getLink()); - sse.setLink(ActiveInventoryConfig.extractResourcePath(txn.getLink())); - populateSuggestionSearchEntityDocument(sse, jsonResult, txn); - // The unique id for the document will be created at derive fields - sse.deriveFields(); - // Insert the document only if it has valid statuses - if (sse.isSuggestableDoc()) { - String link = null; - try { - link = getElasticFullUrl("/" + sse.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction n2 = new NetworkTransaction(); - n2.setLink(link); - n2.setEntityType(txn.getEntityType()); - n2.setDescriptor(txn.getDescriptor()); - n2.setOperationType(HttpMethod.GET); - - esWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), esExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, sse); - } - }); - } - } - } - } - } catch (JsonProcessingException exc) { - // TODO -> LOG, waht should be logged here? - } catch (IOException exc) { - // TODO -> LOG, waht should be logged here? - } - } - - protected void populateSuggestionSearchEntityDocument(SuggestionSearchEntity sse, String result, - NetworkTransaction txn) throws JsonProcessingException, IOException { - - OxmEntityDescriptor resultDescriptor = txn.getDescriptor(); - - sse.setEntityType(resultDescriptor.getEntityName()); - - JsonNode entityNode = mapper.readTree(result); - - List primaryKeyValues = new ArrayList(); - String pkeyValue = null; - - for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { - pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); - if (pkeyValue != null) { - primaryKeyValues.add(pkeyValue); - } else { - String message = "populateSuggestionSearchEntityDocument()," - + " pKeyValue is null for entityType = " + resultDescriptor.getEntityName(); - LOG.warn(AaiUiMsgs.WARN_GENERIC, message); - } - } - - final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); - sse.setEntityPrimaryKeyValue(primaryCompositeKeyValue); - sse.generateSuggestionInputPermutations(); - } - - protected void performDocumentUpsert(NetworkTransaction esGetTxn, SuggestionSearchEntity sse) { - /** - *

    - *

      - * As part of the response processing we need to do the following: - *
    • 1. Extract the version (if present), it will be the ETAG when we use the - * Search-Abstraction-Service - *
    • 2. Spawn next task which is to do the PUT operation into elastic with or with the version - * tag - *
    • a) if version is null or RC=404, then standard put, no _update with version tag - *
    • b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic - *
    - *

    - */ - String link = null; - try { - link = getElasticFullUrl("/" + sse.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); - return; - } - - boolean wasEntryDiscovered = false; - if (esGetTxn.getOperationResult().getResultCode() == 404) { - LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, sse.getEntityPrimaryKeyValue()); - } else if (esGetTxn.getOperationResult().getResultCode() == 200) { - wasEntryDiscovered = true; - } else { - /* - * Not being a 200 does not mean a failure. eg 201 is returned for created. and 500 for es not - * found TODO -> Should we return. - */ - LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, - String.valueOf(esGetTxn.getOperationResult().getResultCode())); - return; - } - // Insert a new document only if the paylod is different. - // This is determined by hashing the payload and using it as a id for the document - // - if (!wasEntryDiscovered) { - try { - String jsonPayload = null; - - jsonPayload = sse.getIndexDocumentJson(); - if (link != null && jsonPayload != null) { - - NetworkTransaction updateElasticTxn = new NetworkTransaction(); - updateElasticTxn.setLink(link); - updateElasticTxn.setEntityType(esGetTxn.getEntityType()); - updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); - updateElasticTxn.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), - esPutExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = "Suggestion search entity sync UPDATE PUT error - " - + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetTxn, sse); - } - }); - } - } catch (Exception exc) { - String message = - "Exception caught during suggestion search entity sync PUT operation. Message - " - + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); - } - } - } - - private void processStoreDocumentResult(NetworkTransaction esPutResult, - NetworkTransaction esGetResult, SuggestionSearchEntity sse) { - - OperationResult or = esPutResult.getOperationResult(); - - if (!or.wasSuccessful()) { - if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { - - if (shouldAllowRetry(sse.getId())) { - esWorkOnHand.incrementAndGet(); - - RetrySuggestionEntitySyncContainer rssec = - new RetrySuggestionEntitySyncContainer(esGetResult, sse); - retryQueue.push(rssec); - - String message = "Store document failed during suggestion search entity synchronization" - + " due to version conflict. Entity will be re-synced."; - LOG.warn(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); - } - } else { - String message = - "Store document failed during suggestion search entity synchronization with result code " - + or.getResultCode() + " and result message " + or.getResult(); - LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); - } - } - } - - /** - * Perform retry sync. - */ - private void performRetrySync() { - while (retryQueue.peek() != null) { - - RetrySuggestionEntitySyncContainer susc = retryQueue.poll(); - if (susc != null) { - - SuggestionSearchEntity sus = susc.getSuggestionSearchEntity(); - NetworkTransaction txn = susc.getNetworkTransaction(); - - String link = null; - try { - /* - * In this retry flow the se object has already derived its fields - */ - link = getElasticFullUrl("/" + sus.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction retryTransaction = new NetworkTransaction(); - retryTransaction.setLink(link); - retryTransaction.setEntityType(txn.getEntityType()); - retryTransaction.setDescriptor(txn.getDescriptor()); - retryTransaction.setOperationType(HttpMethod.GET); - - /* - * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already - * called incrementAndGet when queuing the failed PUT! - */ - - supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), - esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, sus); - } - }); - } - - } - } - } - - /** - * Should allow retry. - * - * @param id the id - * @return true, if successful - */ - private boolean shouldAllowRetry(String id) { - boolean isRetryAllowed = true; - if (retryLimitTracker.get(id) != null) { - Integer currentCount = retryLimitTracker.get(id); - if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { - isRetryAllowed = false; - String message = "Searchable entity re-sync limit reached for " + id - + ", re-sync will no longer be attempted for this entity"; - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } else { - Integer newCount = new Integer(currentCount.intValue() + 1); - retryLimitTracker.put(id, newCount); - } - } else { - Integer firstRetryCount = new Integer(1); - retryLimitTracker.put(id, firstRetryCount); - } - - return isRetryAllowed; - } - - - - @Override - public SynchronizerState getState() { - - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) - */ - @Override - public String getStatReport(boolean showFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return getStatReport(syncDurationInMs, showFinalReport); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#shutdown() - */ - @Override - public void shutdown() { - this.shutdownExecutors(); - } - - @Override - protected boolean isSyncDone() { - - int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " - + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); - } - - if (totalWorkOnHand > 0 || !allWorkEnumerated) { - return false; - } - - this.syncInProgress = false; - - return true; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() - */ - @Override - public void clearCache() { - - if (syncInProgress) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, - "Autosuggestion Entity Summarizer in progress, request to clear cache ignored"); - return; - } - - super.clearCache(); - this.resetCounters(); - if (entityCounters != null) { - entityCounters.clear(); - } - - allWorkEnumerated = false; - - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/CrossEntityReferenceSynchronizer.java b/src/main/java/org/onap/aai/sparky/synchronizer/CrossEntityReferenceSynchronizer.java deleted file mode 100644 index 8328627..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/CrossEntityReferenceSynchronizer.java +++ /dev/null @@ -1,907 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Deque; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.ExecutorService; -import java.util.function.Supplier; - -import org.onap.aai.sparky.config.oxm.CrossEntityReference; -import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.onap.aai.sparky.dal.rest.HttpMethod; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.config.SynchronizerConfiguration; -import org.onap.aai.sparky.synchronizer.entity.IndexableCrossEntityReference; -import org.onap.aai.sparky.synchronizer.entity.MergableEntity; -import org.onap.aai.sparky.synchronizer.entity.SelfLinkDescriptor; -import org.onap.aai.sparky.synchronizer.enumeration.OperationState; -import org.onap.aai.sparky.synchronizer.enumeration.SynchronizerState; -import org.onap.aai.sparky.synchronizer.task.PerformActiveInventoryRetrieval; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchPut; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchRetrieval; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchUpdate; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; -import org.slf4j.MDC; - -import org.onap.aai.cl.mdc.MdcContext; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectReader; -import com.fasterxml.jackson.databind.node.ArrayNode; - -/** - * The Class CrossEntityReferenceSynchronizer. - */ -public class CrossEntityReferenceSynchronizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - /** - * The Class RetryCrossEntitySyncContainer. - */ - private class RetryCrossEntitySyncContainer { - NetworkTransaction txn; - IndexableCrossEntityReference icer; - - /** - * Instantiates a new retry cross entity sync container. - * - * @param txn the txn - * @param icer the icer - */ - public RetryCrossEntitySyncContainer(NetworkTransaction txn, - IndexableCrossEntityReference icer) { - this.txn = txn; - this.icer = icer; - } - - public NetworkTransaction getNetworkTransaction() { - return txn; - } - - public IndexableCrossEntityReference getIndexableCrossEntityReference() { - return icer; - } - } - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(CrossEntityReferenceSynchronizer.class); - - private static final String SERVICE_INSTANCE = "service-instance"; - private Deque selflinks; - private Deque retryQueue; - private Map retryLimitTracker; - private boolean isAllWorkEnumerated; - protected ExecutorService esPutExecutor; - protected ActiveInventoryConfig aaiConfig; - - /** - * Instantiates a new cross entity reference synchronizer. - * - * @param indexName the index name - * @throws Exception the exception - */ - public CrossEntityReferenceSynchronizer(String indexName, ActiveInventoryConfig aaiConfig) - throws Exception { - super(LOG, "CERS", 2, 5, 5, indexName); - this.selflinks = new ConcurrentLinkedDeque(); - this.retryQueue = new ConcurrentLinkedDeque(); - this.retryLimitTracker = new ConcurrentHashMap(); - this.synchronizerName = "Cross Reference Entity Synchronizer"; - this.isAllWorkEnumerated = false; - this.esPutExecutor = NodeUtils.createNamedExecutor("CERS-ES-PUT", 5, LOG); - this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors( - oxmModelLoader.getCrossReferenceEntityDescriptors()); - this.esEntityStats.initializeCountersFromOxmEntityDescriptors( - oxmModelLoader.getCrossReferenceEntityDescriptors()); - this.aaiConfig = aaiConfig; - this.syncDurationInMs = -1; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#doSync() - */ - @Override - public OperationState doSync() { - this.syncDurationInMs = -1; - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "CrossEntitySynchronizer", "", "Sync", ""); - - resetCounters(); - syncStartedTimeStampInMs = System.currentTimeMillis(); - launchSyncFlow(); - return OperationState.OK; - } - - @Override - public SynchronizerState getState() { - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) - */ - @Override - public String getStatReport(boolean showFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return getStatReport(syncDurationInMs, showFinalReport); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#shutdown() - */ - @Override - public void shutdown() { - this.shutdownExecutors(); - } - - @Override - protected boolean isSyncDone() { - int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); - - if (totalWorkOnHand > 0 || !isAllWorkEnumerated) { - return false; - } - - return true; - } - - /** - * Launch sync flow. - * - * @return the operation state - */ - private OperationState launchSyncFlow() { - final Map contextMap = MDC.getCopyOfContextMap(); - Map descriptorMap = - oxmModelLoader.getCrossReferenceEntityDescriptors(); - - if (descriptorMap.isEmpty()) { - LOG.error(AaiUiMsgs.ERROR_LOADING_OXM); - - return OperationState.ERROR; - } - - Collection syncTypes = descriptorMap.keySet(); - - try { - - /* - * launch a parallel async thread to process the documents for each entity-type (to max the of - * the configured executor anyway) - */ - - aaiWorkOnHand.set(syncTypes.size()); - - for (String key : syncTypes) { - - supplyAsync(new Supplier() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - OperationResult typeLinksResult = null; - try { - typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); - aaiWorkOnHand.decrementAndGet(); - processEntityTypeSelfLinks(typeLinksResult); - } catch (Exception exc) { - // TODO -> LOG, what should be logged here? - } - - return null; - } - - }, aaiExecutor).whenComplete((result, error) -> { - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); - } - }); - } - - while (aaiWorkOnHand.get() != 0) { - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); - } - - Thread.sleep(1000); - } - - aaiWorkOnHand.set(selflinks.size()); - isAllWorkEnumerated = true; - performSync(); - - while (!isSyncDone()) { - performRetrySync(); - Thread.sleep(1000); - } - - /* - * Make sure we don't hang on to retries that failed which could cause issues during future - * syncs - */ - retryLimitTracker.clear(); - - } catch (Exception exc) { - // TODO -> LOG, waht should be logged here? - } - - return OperationState.OK; - } - - /** - * Perform sync. - */ - private void performSync() { - while (selflinks.peek() != null) { - - SelfLinkDescriptor linkDescriptor = selflinks.poll(); - aaiWorkOnHand.decrementAndGet(); - - OxmEntityDescriptor descriptor = null; - - if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); - // go to next element in iterator - continue; - } - - if (descriptor.hasCrossEntityReferences()) { - - NetworkTransaction txn = new NetworkTransaction(); - txn.setDescriptor(descriptor); - txn.setLink(linkDescriptor.getSelfLink() + linkDescriptor.getDepthModifier()); - txn.setOperationType(HttpMethod.GET); - txn.setEntityType(linkDescriptor.getEntityType()); - - aaiWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) - .whenComplete((result, error) -> { - - aaiWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.SELF_LINK_GET, error.getLocalizedMessage()); - } else { - if (result == null) { - LOG.error(AaiUiMsgs.SELF_LINK_CROSS_REF_SYNC); - } else { - updateActiveInventoryCounters(result); - fetchDocumentForUpsert(result); - } - } - }); - } - } - } - } - - /** - * Process entity type self links. - * - * @param operationResult the operation result - */ - private void processEntityTypeSelfLinks(OperationResult operationResult) { - - JsonNode rootNode = null; - - final String jsonResult = operationResult.getResult(); - - if (jsonResult != null && jsonResult.length() > 0) { - - try { - rootNode = mapper.readTree(jsonResult); - } catch (IOException exc) { - String message = "Could not deserialize JSON (representing operation result) as node tree. " - + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); - return; - } - - JsonNode resultData = rootNode.get("result-data"); - ArrayNode resultDataArrayNode = null; - - if (resultData.isArray()) { - resultDataArrayNode = (ArrayNode) resultData; - - Iterator elementIterator = resultDataArrayNode.elements(); - JsonNode element = null; - - while (elementIterator.hasNext()) { - element = elementIterator.next(); - - final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); - final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); - - OxmEntityDescriptor descriptor = null; - - if (resourceType != null && resourceLink != null) { - descriptor = oxmModelLoader.getEntityDescriptor(resourceType); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); - // go to next element in iterator - continue; - } - if (descriptor.hasCrossEntityReferences()) { - selflinks.add(new SelfLinkDescriptor(resourceLink, - SynchronizerConfiguration.DEPTH_ALL_MODIFIER, resourceType)); - } - } - } - } - } - } - - - - /** - * By providing the entity type and a json node for the entity, determine the primary key name(s) - * + primary key value(s) sufficient to build an entity query string of the following format: - * - * .: - * - * @return - a composite string in the above format or null - */ - private String determineEntityQueryString(String entityType, JsonNode entityJsonNode) { - - OxmEntityDescriptor entityDescriptor = oxmModelLoader.getEntityDescriptor(entityType); - - String queryString = null; - - if (entityDescriptor != null) { - - final List primaryKeyNames = entityDescriptor.getPrimaryKeyAttributeName(); - final List keyValues = new ArrayList(); - NodeUtils.extractFieldValuesFromObject(entityJsonNode, primaryKeyNames, keyValues); - - queryString = entityType + "." + NodeUtils.concatArray(primaryKeyNames, "/") + ":" - + NodeUtils.concatArray(keyValues); - - } - - return queryString; - - - } - - /** - * Fetch document for upsert. - * - * @param txn the txn - */ - private void fetchDocumentForUpsert(NetworkTransaction txn) { - - if (!txn.getOperationResult().wasSuccessful()) { - LOG.error(AaiUiMsgs.SELF_LINK_GET, txn.getOperationResult().getResult()); - return; - } - - if (txn.getDescriptor().hasCrossEntityReferences()) { - - final String jsonResult = txn.getOperationResult().getResult(); - - if (jsonResult != null && jsonResult.length() > 0) { - - /** - * Here's what we are going to do: - * - *
  • Extract primary key name and value from the parent type. - *
  • Extract the primary key and value from the nested child instance. - *
  • Build a generic query to discover the self-link for the nested-child-instance using - * parent and child. - *
  • Set the self-link on the child. - *
  • Generate the id that will allow the elastic-search upsert to work. - *
  • Rinse and repeat. - */ - - OxmEntityDescriptor parentEntityDescriptor = - oxmModelLoader.getEntityDescriptor(txn.getEntityType()); - - if (parentEntityDescriptor != null) { - - CrossEntityReference cerDefinition = parentEntityDescriptor.getCrossEntityReference(); - - if (cerDefinition != null) { - JsonNode convertedNode = null; - try { - convertedNode = - NodeUtils.convertJsonStrToJsonNode(txn.getOperationResult().getResult()); - - final String parentEntityQueryString = - determineEntityQueryString(txn.getEntityType(), convertedNode); - - List extractedParentEntityAttributeValues = new ArrayList(); - - NodeUtils.extractFieldValuesFromObject(convertedNode, - cerDefinition.getReferenceAttributes(), extractedParentEntityAttributeValues); - - List nestedTargetEntityInstances = new ArrayList(); - NodeUtils.extractObjectsByKey(convertedNode, cerDefinition.getTargetEntityType(), - nestedTargetEntityInstances); - - for (JsonNode targetEntityInstance : nestedTargetEntityInstances) { - - OxmEntityDescriptor cerDescriptor = oxmModelLoader - .getSearchableEntityDescriptor(cerDefinition.getTargetEntityType()); - - if (cerDescriptor != null) { - - String childEntityType = cerDefinition.getTargetEntityType(); - - List childPrimaryKeyNames = cerDescriptor.getPrimaryKeyAttributeName(); - - List childKeyValues = new ArrayList(); - NodeUtils.extractFieldValuesFromObject(targetEntityInstance, childPrimaryKeyNames, - childKeyValues); - - String childEntityQueryKeyString = - childEntityType + "." + NodeUtils.concatArray(childPrimaryKeyNames, "/") + ":" - + NodeUtils.concatArray(childKeyValues); - - /** - * Build generic-query to query child instance self-link from AAI - */ - List orderedQueryKeyParams = new ArrayList(); - if (SERVICE_INSTANCE.equals(childEntityType)) { - orderedQueryKeyParams.clear(); - orderedQueryKeyParams.add(childEntityQueryKeyString); - } else { - orderedQueryKeyParams.add(parentEntityQueryString); - orderedQueryKeyParams.add(childEntityQueryKeyString); - } - String genericQueryStr = null; - try { - genericQueryStr = aaiDataProvider.getGenericQueryForSelfLink(childEntityType, - orderedQueryKeyParams); - - if (genericQueryStr != null) { - aaiWorkOnHand.incrementAndGet(); - OperationResult aaiQueryResult = aaiDataProvider - .queryActiveInventoryWithRetries(genericQueryStr, "application/json", - aaiConfig.getAaiRestConfig().getNumRequestRetries()); - aaiWorkOnHand.decrementAndGet(); - if (aaiQueryResult != null && aaiQueryResult.wasSuccessful()) { - - Collection entityLinks = new ArrayList(); - JsonNode genericQueryResult = null; - try { - genericQueryResult = - NodeUtils.convertJsonStrToJsonNode(aaiQueryResult.getResult()); - - if (genericQueryResult != null) { - - NodeUtils.extractObjectsByKey(genericQueryResult, "resource-link", - entityLinks); - - String selfLink = null; - - if (entityLinks.size() != 1) { - /** - * an ambiguity exists where we can't reliably determine the self - * link, this should be a permanent error - */ - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_SELFLINK_AMBIGUITY, - String.valueOf(entityLinks.size())); - } else { - selfLink = ((JsonNode) entityLinks.toArray()[0]).asText(); - - if (!cerDescriptor.getSearchableAttributes().isEmpty()) { - - IndexableCrossEntityReference icer = - getPopulatedDocument(targetEntityInstance, cerDescriptor); - - for (String parentCrossEntityReferenceAttributeValue : extractedParentEntityAttributeValues) { - icer.addCrossEntityReferenceValue( - parentCrossEntityReferenceAttributeValue); - } - - icer.setLink(ActiveInventoryConfig.extractResourcePath(selfLink)); - - icer.deriveFields(); - - String link = null; - try { - link = getElasticFullUrl("/" + icer.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, - exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction n2 = new NetworkTransaction(); - n2.setLink(link); - n2.setEntityType(txn.getEntityType()); - n2.setDescriptor(txn.getDescriptor()); - n2.setOperationType(HttpMethod.GET); - - esWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), - esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, - error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, icer); - } - }); - } - } - } - } else { - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DURING_AAI_RESPONSE_CONVERSION); - } - - } catch (Exception exc) { - LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, JsonNode.class.toString(), - exc.getLocalizedMessage()); - } - - } else { - String message = "Entity sync failed because AAI query failed with error "; - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); - } - - } else { - String message = - "Entity Sync failed because generic query str could not be determined."; - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); - } - } catch (Exception exc) { - String message = - "Failed to sync entity because generation of generic query failed with error = " - + exc.getMessage(); - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); - } - - } - } - - } catch (IOException ioe) { - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, ioe.getMessage()); - } - } - - } else { - LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DESCRIPTOR_NOT_FOUND, txn.getEntityType()); - } - } - } - } - - /** - * Perform document upsert. - * - * @param esGetResult the es get result - * @param icer the icer - */ - protected void performDocumentUpsert(NetworkTransaction esGetResult, - IndexableCrossEntityReference icer) { - /** - *

    - *

      - * As part of the response processing we need to do the following: - *
    • 1. Extract the version (if present), it will be the ETAG when we use the - * Search-Abstraction-Service - *
    • 2. Spawn next task which is to do the PUT operation into elastic with or with the version - * tag - *
    • a) if version is null or RC=404, then standard put, no _update with version tag - *
    • b) if version != null, do PUT with _update?version= (versionNumber) in the URI to elastic - *
    - *

    - */ - String link = null; - try { - link = getElasticFullUrl("/" + icer.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); - return; - } - - boolean wasEntryDiscovered = false; - String versionNumber = null; - if (esGetResult.getOperationResult().getResultCode() == 404) { - LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, icer.getEntityPrimaryKeyValue()); - } else if (esGetResult.getOperationResult().getResultCode() == 200) { - wasEntryDiscovered = true; - try { - versionNumber = NodeUtils.extractFieldValueFromObject( - NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()), - "_version"); - } catch (IOException exc) { - LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "version Number", - icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage()); - return; - } - } else { - /* - * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we - * return. - */ - LOG.info(AaiUiMsgs.ES_OPERATION_RETURN_CODE, - String.valueOf(esGetResult.getOperationResult().getResultCode())); - return; - } - - try { - String jsonPayload = null; - if (wasEntryDiscovered) { - try { - ArrayList sourceObject = new ArrayList(); - NodeUtils.extractObjectsByKey( - NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()), - "_source", sourceObject); - - if (!sourceObject.isEmpty()) { - String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); - MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); - ObjectReader updater = mapper.readerForUpdating(me); - MergableEntity merged = updater.readValue(icer.getIndexDocumentJson()); - jsonPayload = mapper.writeValueAsString(merged); - } - } catch (IOException exc) { - LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "source value", - icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage()); - return; - } - } else { - jsonPayload = icer.getIndexDocumentJson(); - } - - if (wasEntryDiscovered) { - if (versionNumber != null && jsonPayload != null) { - - String requestPayload = esDataProvider.buildBulkImportOperationRequest(getIndexName(), - ElasticSearchConfig.getConfig().getType(), icer.getId(), versionNumber, jsonPayload); - - NetworkTransaction transactionTracker = new NetworkTransaction(); - transactionTracker.setEntityType(esGetResult.getEntityType()); - transactionTracker.setDescriptor(esGetResult.getDescriptor()); - transactionTracker.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), - requestPayload, esDataProvider, transactionTracker), esPutExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetResult, icer); - } - }); - } - - } else { - if (link != null && jsonPayload != null) { - - NetworkTransaction updateElasticTxn = new NetworkTransaction(); - updateElasticTxn.setLink(link); - updateElasticTxn.setEntityType(esGetResult.getEntityType()); - updateElasticTxn.setDescriptor(esGetResult.getDescriptor()); - updateElasticTxn.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), - esPutExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetResult, icer); - } - }); - } - } - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, exc.getLocalizedMessage()); - } - } - - /** - * Process store document result. - * - * @param esPutResult the es put result - * @param esGetResult the es get result - * @param icer the icer - */ - private void processStoreDocumentResult(NetworkTransaction esPutResult, - NetworkTransaction esGetResult, IndexableCrossEntityReference icer) { - - OperationResult or = esPutResult.getOperationResult(); - - if (!or.wasSuccessful()) { - if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { - - if (shouldAllowRetry(icer.getId())) { - - esWorkOnHand.incrementAndGet(); - - RetryCrossEntitySyncContainer rsc = new RetryCrossEntitySyncContainer(esGetResult, icer); - retryQueue.push(rsc); - - LOG.warn(AaiUiMsgs.ES_CROSS_REF_SYNC_VERSION_CONFLICT); - } - } else { - LOG.error(AaiUiMsgs.ES_CROSS_REF_SYNC_FAILURE, String.valueOf(or.getResultCode()), - or.getResult()); - } - } - } - - /** - * Perform retry sync. - */ - private void performRetrySync() { - while (retryQueue.peek() != null) { - - RetryCrossEntitySyncContainer rsc = retryQueue.poll(); - if (rsc != null) { - - IndexableCrossEntityReference icer = rsc.getIndexableCrossEntityReference(); - NetworkTransaction txn = rsc.getNetworkTransaction(); - - String link = null; - try { - // In this retry flow the icer object has already - // derived its fields - link = getElasticFullUrl("/" + icer.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction retryTransaction = new NetworkTransaction(); - retryTransaction.setLink(link); - retryTransaction.setEntityType(txn.getEntityType()); - retryTransaction.setDescriptor(txn.getDescriptor()); - retryTransaction.setOperationType(HttpMethod.GET); - - /* - * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow and we did - * that for this request already when queuing the failed PUT! - */ - - supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), - esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, icer); - } - }); - } - - } - } - } - - /** - * Should allow retry. - * - * @param id the id - * @return true, if successful - */ - private boolean shouldAllowRetry(String id) { - boolean isRetryAllowed = true; - if (retryLimitTracker.get(id) != null) { - Integer currentCount = retryLimitTracker.get(id); - if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { - isRetryAllowed = false; - LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_RESYNC_LIMIT, id); - } else { - Integer newCount = new Integer(currentCount.intValue() + 1); - retryLimitTracker.put(id, newCount); - } - - } else { - Integer firstRetryCount = new Integer(1); - retryLimitTracker.put(id, firstRetryCount); - } - - return isRetryAllowed; - } - - /** - * Gets the populated document. - * - * @param entityNode the entity node - * @param resultDescriptor the result descriptor - * @return the populated document - * @throws JsonProcessingException the json processing exception - * @throws IOException Signals that an I/O exception has occurred. - */ - protected IndexableCrossEntityReference getPopulatedDocument(JsonNode entityNode, - OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { - - IndexableCrossEntityReference icer = new IndexableCrossEntityReference(oxmModelLoader); - - icer.setEntityType(resultDescriptor.getEntityName()); - - List primaryKeyValues = new ArrayList(); - String pkeyValue = null; - - for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { - pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); - if (pkeyValue != null) { - primaryKeyValues.add(pkeyValue); - } else { - LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName()); - } - } - - final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); - icer.setEntityPrimaryKeyValue(primaryCompositeKeyValue); - - return icer; - - } -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/ElasticSearchIndexCleaner.java b/src/main/java/org/onap/aai/sparky/synchronizer/ElasticSearchIndexCleaner.java deleted file mode 100644 index 59942dc..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/ElasticSearchIndexCleaner.java +++ /dev/null @@ -1,795 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; - -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.rest.RestDataProvider; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.entity.ObjectIdCollection; -import org.onap.aai.sparky.synchronizer.entity.SearchableEntity; -import org.onap.aai.sparky.synchronizer.enumeration.OperationState; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; - -/** - * The Class ElasticSearchIndexCleaner. - */ -public class ElasticSearchIndexCleaner implements IndexCleaner { - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(ElasticSearchIndexCleaner.class); - - private static final String BULK_OP_LINE_TEMPLATE = "%s\n"; - private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; - - private ObjectIdCollection before; - private ObjectIdCollection after; - - private String host; - private String port; - - private String indexName; - private String indexType; - private int scrollContextTimeToLiveInMinutes; - private int numItemsToGetBulkRequest; - - private RestDataProvider restDataProvider; - private ObjectMapper mapper; - - /** - * Instantiates a new elastic search index cleaner. - * - * @param restDataProvider the rest data provider - * @param indexName the index name - * @param indexType the index type - * @param host the host - * @param port the port - * @param scrollContextTimeToLiveInMinutes the scroll context time to live in minutes - * @param numItemsToGetBulkRequest the num items to get bulk request - */ - protected ElasticSearchIndexCleaner(RestDataProvider restDataProvider, String indexName, - String indexType, String host, String port, int scrollContextTimeToLiveInMinutes, - int numItemsToGetBulkRequest) { - this.restDataProvider = restDataProvider; - this.before = null; - this.after = null; - this.indexName = indexName; - this.indexType = indexType; - this.mapper = new ObjectMapper(); - this.host = host; - this.port = port; - this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes; - this.numItemsToGetBulkRequest = numItemsToGetBulkRequest; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexCleaner#populatePreOperationCollection() - */ - @Override - public OperationState populatePreOperationCollection() { - - try { - before = retrieveAllDocumentIdentifiers(); - return OperationState.OK; - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, indexName, exc.getMessage()); - return OperationState.ERROR; - } - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexCleaner#populatePostOperationCollection() - */ - @Override - public OperationState populatePostOperationCollection() { - try { - after = retrieveAllDocumentIdentifiers(); - return OperationState.OK; - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, indexName, exc.getMessage()); - return OperationState.ERROR; - } - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexCleaner#performCleanup() - */ - @Override - public OperationState performCleanup() { - // TODO Auto-generated method stub - LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP, indexName); - - int sizeBefore = before.getSize(); - int sizeAfter = after.getSize(); - - LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP_SIZE, String.valueOf(sizeBefore), - String.valueOf(sizeAfter)); - - /* - * If the processedImportIds size <= 0, then something has failed in the sync operation and we - * shouldn't do the selective delete right now. - */ - - if (sizeAfter > 0) { - - Collection presyncIds = before.getImportedObjectIdsAsValues(); - presyncIds.removeAll(after.getImportedObjectIdsAsValues()); - - try { - LOG.info(AaiUiMsgs.ES_SYNC_SELECTIVE_DELETE, indexName, indexType, - String.valueOf(presyncIds.size())); - - ObjectIdCollection bulkIds = new ObjectIdCollection(); - - Iterator it = presyncIds.iterator(); - int numItemsInBulkRequest = 0; - int numItemsRemainingToBeDeleted = presyncIds.size(); - - while (it.hasNext()) { - - bulkIds.addObjectId(it.next()); - numItemsInBulkRequest++; - - if (numItemsInBulkRequest >= this.numItemsToGetBulkRequest) { - LOG.info(AaiUiMsgs.ES_BULK_DELETE, indexName, String.valueOf(bulkIds.getSize())); - OperationResult bulkDeleteResult = bulkDelete(bulkIds.getImportedObjectIdsAsValues()); - // pegCountersForElasticBulkDelete(bulkDeleteResult); - numItemsRemainingToBeDeleted -= numItemsInBulkRequest; - numItemsInBulkRequest = 0; - bulkIds.clear(); - } - } - - if (numItemsRemainingToBeDeleted > 0) { - LOG.info(AaiUiMsgs.ES_BULK_DELETE, indexName, String.valueOf(bulkIds.getSize())); - OperationResult bulkDeleteResult = bulkDelete(bulkIds.getImportedObjectIdsAsValues()); - // pegCountersForElasticBulkDelete(bulkDeleteResult); - } - - - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_BULK_DELETE_ERROR, indexName, exc.getLocalizedMessage()); - - } - } - - return OperationState.OK; - } - - @Override - public String getIndexName() { - return indexName; - } - - public void setIndexName(String indexName) { - this.indexName = indexName; - } - - /** - * Builds the initial scroll request payload. - * - * @param numItemsToGetPerRequest the num items to get per request - * @param fieldList the field list - * @return the string - * @throws JsonProcessingException the json processing exception - */ - protected String buildInitialScrollRequestPayload(int numItemsToGetPerRequest, - List fieldList) throws JsonProcessingException { - - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("size", numItemsToGetPerRequest); - - ArrayNode fields = mapper.createArrayNode(); - - for (String f : fieldList) { - fields.add(f); - } - - rootNode.set("fields", fields); - - ObjectNode queryNode = mapper.createObjectNode(); - queryNode.set("match_all", mapper.createObjectNode()); - - rootNode.set("query", queryNode); - - return mapper.writeValueAsString(rootNode); - - } - - /** - * Builds the subsequent scroll context request payload. - * - * @param scrollId the scroll id - * @param contextTimeToLiveInMinutes the context time to live in minutes - * @return the string - * @throws JsonProcessingException the json processing exception - */ - protected String buildSubsequentScrollContextRequestPayload(String scrollId, - int contextTimeToLiveInMinutes) throws JsonProcessingException { - - ObjectNode rootNode = mapper.createObjectNode(); - - rootNode.put("scroll", contextTimeToLiveInMinutes + "m"); - rootNode.put("scroll_id", scrollId); - - return mapper.writeValueAsString(rootNode); - - } - - /** - * Parses the elastic search result. - * - * @param jsonResult the json result - * @return the json node - * @throws JsonProcessingException the json processing exception - * @throws IOException Signals that an I/O exception has occurred. - */ - protected JsonNode parseElasticSearchResult(String jsonResult) - throws JsonProcessingException, IOException { - ObjectMapper mapper = new ObjectMapper(); - return mapper.readTree(jsonResult); - } - - /** - * Lookup index doc. - * - * @param ids the ids - * @param docs the docs - * @return the array list - */ - protected ArrayList lookupIndexDoc(ArrayList ids, - List docs) { - ArrayList objs = new ArrayList(); - - if (ids != null && docs != null) { - for (SearchableEntity d : docs) { - if (ids.contains(d.getId())) { - objs.add(d); - } - } - } - - return objs; - } - - /** - * Builds the delete data object. - * - * @param index the index - * @param type the type - * @param id the id - * @return the object node - */ - protected ObjectNode buildDeleteDataObject(String index, String type, String id) { - - ObjectNode indexDocProperties = mapper.createObjectNode(); - - indexDocProperties.put("_index", index); - indexDocProperties.put("_type", type); - indexDocProperties.put("_id", id); - - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.set("delete", indexDocProperties); - - return rootNode; - } - - /** - * This method might appear to be a little strange, and is simply an optimization to take an - * elipsed JsonNode key path and retrieve the node at the end of the path, if it exists. - * - * @param startNode the start node - * @param fieldPath the field path - * @return the node path - */ - protected JsonNode getNodePath(JsonNode startNode, String... fieldPath) { - - JsonNode jsonNode = null; - - for (String field : fieldPath) { - if (jsonNode == null) { - jsonNode = startNode.get(field); - } else { - jsonNode = jsonNode.get(field); - } - - /* - * This is our safety net in case any intermediate path returns a null - */ - - if (jsonNode == null) { - return null; - } - - } - - return jsonNode; - } - - /** - * Gets the full url. - * - * @param resourceUrl the resource url - * @return the full url - */ - private String getFullUrl(String resourceUrl) { - return String.format("http://%s:%s%s", host, port, resourceUrl); - } - - /** - * Retrieve all document identifiers. - * - * @return the object id collection - * @throws IOException Signals that an I/O exception has occurred. - */ - public ObjectIdCollection retrieveAllDocumentIdentifiers() throws IOException { - - ObjectIdCollection currentDocumentIds = new ObjectIdCollection(); - - long opStartTimeInMs = System.currentTimeMillis(); - - List fields = new ArrayList(); - fields.add("_id"); - // fields.add("entityType"); - - String scrollRequestPayload = - buildInitialScrollRequestPayload(this.numItemsToGetBulkRequest, fields); - - final String fullUrlStr = getFullUrl("/" + indexName + "/" + indexType + "/_search?scroll=" - + this.scrollContextTimeToLiveInMinutes + "m"); - - OperationResult result = - restDataProvider.doPost(fullUrlStr, scrollRequestPayload, "application/json"); - - if (result.wasSuccessful()) { - - JsonNode rootNode = parseElasticSearchResult(result.getResult()); - - /* - * Check the result for success / failure, and enumerate all the index ids that resulted in - * success, and ignore the ones that failed or log them so we have a record of the failure. - */ - int totalRecordsAvailable = 0; - String scrollId = null; - int numRecordsFetched = 0; - - if (rootNode != null) { - - scrollId = getFieldValue(rootNode, "_scroll_id"); - final String tookStr = getFieldValue(rootNode, "took"); - int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr); - boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out")); - - if (timedOut) { - LOG.error(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "all document Identifiers", - String.valueOf(tookInMs)); - } else { - LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "all document Identifiers", - String.valueOf(tookInMs)); - } - - JsonNode hitsNode = rootNode.get("hits"); - totalRecordsAvailable = Integer.parseInt(hitsNode.get("total").asText()); - - LOG.info(AaiUiMsgs.COLLECT_TOTAL, "all document Identifiers", - String.valueOf(totalRecordsAvailable)); - - /* - * Collect all object ids - */ - - ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits"); - - Iterator nodeIterator = hitsArray.iterator(); - - String key = null; - String value = null; - JsonNode jsonNode = null; - - while (nodeIterator.hasNext()) { - - jsonNode = nodeIterator.next(); - - key = getFieldValue(jsonNode, "_id"); - - if (key != null) { - currentDocumentIds.addObjectId(key); - } - - /* - * if (key != null) { - * - * JsonNode fieldsNode = jNode.get("fields"); - * - * if (fieldsNode != null) { - * - * JsonNode entityTypeNode = fieldsNode.get("entityType"); - * - * if (entityTypeNode != null) { ArrayNode aNode = (ArrayNode) entityTypeNode; - * - * if (aNode.size() > 0) { value = aNode.get(0).asText(); objAndtTypesMap.put(key, value); - * numRecordsFetched++; } } } } - */ - - } - - int totalRecordsRemainingToFetch = (totalRecordsAvailable - numRecordsFetched); - - int numRequiredAdditionalFetches = - (totalRecordsRemainingToFetch / this.numItemsToGetBulkRequest); - - /* - * Do an additional fetch for the remaining items (if needed) - */ - - if (totalRecordsRemainingToFetch % numItemsToGetBulkRequest != 0) { - numRequiredAdditionalFetches += 1; - } - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.SYNC_NUMBER_REQ_FETCHES, - String.valueOf(numRequiredAdditionalFetches)); - } - - - for (int x = 0; x < numRequiredAdditionalFetches; x++) { - - if (collectItemsFromScrollContext(scrollId, currentDocumentIds) != OperationState.OK) { - // abort the whole thing because now we can't reliably cleanup the orphans. - throw new IOException( - "Failed to collect pre-sync doc collection from index. Aborting operation"); - } - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.SYNC_NUMBER_TOTAL_FETCHES, - String.valueOf(currentDocumentIds.getSize()), - String.valueOf(totalRecordsAvailable)); - } - - } - - } - - } else { - // scroll context get failed, nothing else to do - LOG.error(AaiUiMsgs.ERROR_GENERIC, result.toString()); - } - - LOG.info(AaiUiMsgs.COLLECT_TOTAL_TIME, "all document Identifiers", - String.valueOf((System.currentTimeMillis() - opStartTimeInMs))); - - return currentDocumentIds; - - } - - /** - * Collect items from scroll context. - * - * @param scrollId the scroll id - * @param objectIds the object ids - * @return the operation state - * @throws IOException Signals that an I/O exception has occurred. - */ - private OperationState collectItemsFromScrollContext(String scrollId, - ObjectIdCollection objectIds) throws IOException { - - // ObjectIdCollection documentIdCollection = new ObjectIdCollection(); - - String requestPayload = - buildSubsequentScrollContextRequestPayload(scrollId, scrollContextTimeToLiveInMinutes); - - final String fullUrlStr = getFullUrl("/_search/scroll"); - - OperationResult opResult = - restDataProvider.doPost(fullUrlStr, requestPayload, "application/json"); - - if (opResult.getResultCode() >= 300) { - LOG.warn(AaiUiMsgs.ES_SCROLL_CONTEXT_ERROR, opResult.getResult()); - return OperationState.ERROR; - } - - JsonNode rootNode = parseElasticSearchResult(opResult.getResult()); - - /* - * Check the result for success / failure, and enumerate all the index ids that resulted in - * success, and ignore the ones that failed or log them so we have a record of the failure. - */ - - if (rootNode != null) { - boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out")); - final String tookStr = getFieldValue(rootNode, "took"); - int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr); - - JsonNode hitsNode = rootNode.get("hits"); - - if (timedOut) { - LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "Scroll Context", String.valueOf(tookInMs)); - } else { - LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "Scroll Context", String.valueOf(tookInMs)); - } - - /* - * Collect all object ids - */ - - ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits"); - String key = null; - String value = null; - JsonNode jsonNode = null; - - Iterator nodeIterator = hitsArray.iterator(); - - while (nodeIterator.hasNext()) { - - jsonNode = nodeIterator.next(); - - key = getFieldValue(jsonNode, "_id"); - - if (key != null) { - objectIds.addObjectId(key); - - /* - * JsonNode fieldsNode = jNode.get("fields"); - * - * if (fieldsNode != null) { - * - * JsonNode entityTypeNode = fieldsNode.get("entityType"); - * - * if (entityTypeNode != null) { ArrayNode aNode = (ArrayNode) entityTypeNode; - * - * if (aNode.size() > 0) { value = aNode.get(0).asText(); objectIdsAndTypes.put(key, - * value); } } } } - */ - - } - - } - } else { - // scroll context get failed, nothing else to do - LOG.error(AaiUiMsgs.ERROR_GENERIC, opResult.toString()); - } - - return OperationState.OK; - } - - /** - * Gets the field value. - * - * @param node the node - * @param fieldName the field name - * @return the field value - */ - protected String getFieldValue(JsonNode node, String fieldName) { - - JsonNode field = node.get(fieldName); - - if (field != null) { - return field.asText(); - } - - return null; - - } - - /** - * Bulk delete. - * - * @param docIds the doc ids - * @return the operation result - * @throws IOException Signals that an I/O exception has occurred. - */ - public OperationResult bulkDelete(Collection docIds) throws IOException { - - if (docIds == null || docIds.size() == 0) { - LOG.info(AaiUiMsgs.ES_BULK_DELETE_SKIP); - return new OperationResult(500, - "Skipping bulkDelete(); operation because docs to delete list is empty"); - } - - LOG.info(AaiUiMsgs.ES_BULK_DELETE_START, String.valueOf(docIds.size())); - - StringBuilder sb = new StringBuilder(128); - - for (String id : docIds) { - sb.append( - String.format(BULK_OP_LINE_TEMPLATE, buildDeleteDataObject(indexName, indexType, id))); - } - - sb.append("\n"); - - final String fullUrlStr = getFullUrl("/_bulk"); - - return restDataProvider.doPost(fullUrlStr, sb.toString(), "application/x-www-form-urlencoded"); - - } - - /** - * @return the before - */ - public ObjectIdCollection getBefore() { - return before; - } - - /** - * @param before the before to set - */ - public void setBefore(ObjectIdCollection before) { - this.before = before; - } - - /** - * @return the after - */ - public ObjectIdCollection getAfter() { - return after; - } - - /** - * @param after the after to set - */ - public void setAfter(ObjectIdCollection after) { - this.after = after; - } - - /** - * @return the host - */ - public String getHost() { - return host; - } - - /** - * @param host the host to set - */ - public void setHost(String host) { - this.host = host; - } - - /** - * @return the port - */ - public String getPort() { - return port; - } - - /** - * @param port the port to set - */ - public void setPort(String port) { - this.port = port; - } - - /** - * @return the indexType - */ - public String getIndexType() { - return indexType; - } - - /** - * @param indexType the indexType to set - */ - public void setIndexType(String indexType) { - this.indexType = indexType; - } - - /** - * @return the scrollContextTimeToLiveInMinutes - */ - public int getScrollContextTimeToLiveInMinutes() { - return scrollContextTimeToLiveInMinutes; - } - - /** - * @param scrollContextTimeToLiveInMinutes the scrollContextTimeToLiveInMinutes to set - */ - public void setScrollContextTimeToLiveInMinutes(int scrollContextTimeToLiveInMinutes) { - this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes; - } - - /** - * @return the numItemsToGetBulkRequest - */ - public int getNumItemsToGetBulkRequest() { - return numItemsToGetBulkRequest; - } - - /** - * @param numItemsToGetBulkRequest the numItemsToGetBulkRequest to set - */ - public void setNumItemsToGetBulkRequest(int numItemsToGetBulkRequest) { - this.numItemsToGetBulkRequest = numItemsToGetBulkRequest; - } - - /** - * @return the restDataProvider - */ - public RestDataProvider getRestDataProvider() { - return restDataProvider; - } - - /** - * @param restDataProvider the restDataProvider to set - */ - public void setRestDataProvider(RestDataProvider restDataProvider) { - this.restDataProvider = restDataProvider; - } - - /** - * @return the mapper - */ - public ObjectMapper getMapper() { - return mapper; - } - - /** - * @param mapper the mapper to set - */ - public void setMapper(ObjectMapper mapper) { - this.mapper = mapper; - } - - /** - * @return the log - */ - public static Logger getLog() { - return LOG; - } - - /** - * @return the bulkOpLineTemplate - */ - public static String getBulkOpLineTemplate() { - return BULK_OP_LINE_TEMPLATE; - } - - /** - * @return the timestampFormat - */ - public static String getTimestampFormat() { - return TIMESTAMP_FORMAT; - } - - /* - - */ - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/IndexCleaner.java b/src/main/java/org/onap/aai/sparky/synchronizer/IndexCleaner.java deleted file mode 100644 index 4edab03..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/IndexCleaner.java +++ /dev/null @@ -1,55 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import org.onap.aai.sparky.synchronizer.enumeration.OperationState; - -/** - * The Interface IndexCleaner. - */ -public interface IndexCleaner { - - /** - * Populate pre operation collection. - * - * @return the operation state - */ - public OperationState populatePreOperationCollection(); - - /** - * Populate post operation collection. - * - * @return the operation state - */ - public OperationState populatePostOperationCollection(); - - /** - * Perform cleanup. - * - * @return the operation state - */ - public OperationState performCleanup(); - - public String getIndexName(); - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/IndexIntegrityValidator.java b/src/main/java/org/onap/aai/sparky/synchronizer/IndexIntegrityValidator.java deleted file mode 100644 index b85dabc..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/IndexIntegrityValidator.java +++ /dev/null @@ -1,227 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.rest.RestDataProvider; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; - -/** - * The Class IndexIntegrityValidator. - * - * @author davea. - */ -public class IndexIntegrityValidator implements IndexValidator { - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(IndexIntegrityValidator.class); - - private String host; - - /** - * @return the host - */ - public String getHost() { - return host; - } - - /** - * @param host the host to set - */ - public void setHost(String host) { - this.host = host; - } - - /** - * @return the port - */ - public String getPort() { - return port; - } - - /** - * @param port the port to set - */ - public void setPort(String port) { - this.port = port; - } - - /** - * @return the tableConfigJson - */ - public String getTableConfigJson() { - return tableConfigJson; - } - - /** - * @param tableConfigJson the tableConfigJson to set - */ - public void setTableConfigJson(String tableConfigJson) { - this.tableConfigJson = tableConfigJson; - } - - /** - * @return the log - */ - public static Logger getLog() { - return LOG; - } - - /** - * @return the restDataProvider - */ - public RestDataProvider getRestDataProvider() { - return restDataProvider; - } - - private String port; - private String indexName; - private String indexType; - private String tableConfigJson; - - private final RestDataProvider restDataProvider; - - /** - * Instantiates a new index integrity validator. - * - * @param restDataProvider the rest data provider - * @param indexName the index name - * @param indexType the index type - * @param host the host - * @param port the port - * @param tableConfigJson the table config json - */ - public IndexIntegrityValidator(RestDataProvider restDataProvider, String indexName, - String indexType, String host, String port, String tableConfigJson) { - this.restDataProvider = restDataProvider; - this.host = host; - this.port = port; - this.indexName = indexName; - this.indexType = indexType; - this.tableConfigJson = tableConfigJson; - } - - @Override - public String getIndexName() { - return indexName; - } - - public void setIndexName(String indexName) { - this.indexName = indexName; - } - - public String getIndexType() { - return indexType; - } - - public void setIndexType(String indexType) { - this.indexType = indexType; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexValidator#exists() - */ - @Override - public boolean exists() { - final String fullUrlStr = getFullUrl("/" + indexName + "/"); - OperationResult existsResult = restDataProvider.doHead(fullUrlStr, "application/json"); - - int rc = existsResult.getResultCode(); - - if (rc >= 200 && rc < 300) { - LOG.info(AaiUiMsgs.INDEX_EXISTS, indexName); - return true; - } else { - LOG.info(AaiUiMsgs.INDEX_NOT_EXIST, indexName); - return false; - } - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexValidator#integrityValid() - */ - @Override - public boolean integrityValid() { - // TODO Auto-generated method stub - // logger.info("; - // System.out.println("IndexIntegrityValidator.integrityValid() for - // indexName = " + indexName); - return true; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexValidator#createOrRepair() - */ - @Override - public void createOrRepair() { - // TODO Auto-generated method stub - String message = "IndexIntegrityValidator.createOrRepair() for indexName = " + indexName; - LOG.info(AaiUiMsgs.INFO_GENERIC, message); - - final String fullUrlStr = getFullUrl("/" + indexName + "/"); - OperationResult createResult = - restDataProvider.doPut(fullUrlStr, tableConfigJson, "application/json"); - - int rc = createResult.getResultCode(); - - if (rc >= 200 && rc < 300) { - LOG.info(AaiUiMsgs.INDEX_RECREATED, indexName); - } else if (rc == 400) { - LOG.info(AaiUiMsgs.INDEX_ALREADY_EXISTS, indexName); - } else { - LOG.warn(AaiUiMsgs.INDEX_INTEGRITY_CHECK_FAILED, indexName, createResult.getResult()); - } - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexValidator#destroyIndex() - */ - @Override - public void destroyIndex() { - // TODO Auto-generated method stub - // we don't do this for now - - } - - /** - * Gets the full url. - * - * @param resourceUrl the resource url - * @return the full url - */ - private String getFullUrl(String resourceUrl) { - return String.format("http://%s:%s%s", host, port, resourceUrl); - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/IndexSynchronizer.java b/src/main/java/org/onap/aai/sparky/synchronizer/IndexSynchronizer.java deleted file mode 100644 index f1c6741..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/IndexSynchronizer.java +++ /dev/null @@ -1,65 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import org.onap.aai.sparky.synchronizer.enumeration.OperationState; -import org.onap.aai.sparky.synchronizer.enumeration.SynchronizerState; - -/** - * The Interface IndexSynchronizer. - * - * @author davea. - */ -public interface IndexSynchronizer { - - /** - * Do sync. - * - * @return the operation state - */ - public OperationState doSync(); - - public SynchronizerState getState(); - - /** - * Gets the stat report. - * - * @param finalReport the final report - * @return the stat report - */ - public String getStatReport(boolean finalReport); - - /** - * Shutdown. - */ - public void shutdown(); - - public String getIndexName(); - - /** - * Clear cache. - */ - public void clearCache(); - - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/IndexValidator.java b/src/main/java/org/onap/aai/sparky/synchronizer/IndexValidator.java deleted file mode 100644 index ae2f6f9..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/IndexValidator.java +++ /dev/null @@ -1,56 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -/** - * The Interface IndexValidator. - */ -public interface IndexValidator { - - /** - * Exists. - * - * @return true, if successful - */ - public boolean exists(); - - /** - * Integrity valid. - * - * @return true, if successful - */ - public boolean integrityValid(); - - /** - * Creates the or repair. - */ - public void createOrRepair(); - - /** - * Destroy index. - */ - public void destroyIndex(); - - public String getIndexName(); - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/MyErrorHandler.java b/src/main/java/org/onap/aai/sparky/synchronizer/MyErrorHandler.java deleted file mode 100644 index 7a55b15..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/MyErrorHandler.java +++ /dev/null @@ -1,111 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import java.io.PrintWriter; - -import org.xml.sax.ErrorHandler; -import org.xml.sax.SAXException; -import org.xml.sax.SAXParseException; - -/** - * The Class MyErrorHandler. - */ -public class MyErrorHandler implements ErrorHandler { - - /** Error handler output goes here. */ - private PrintWriter out; - - /** - * @return the out - */ - public PrintWriter getOut() { - return out; - } - - /** - * @param out the out to set - */ - public void setOut(PrintWriter out) { - this.out = out; - } - - /** - * Instantiates a new my error handler. - * - * @param out the out - */ - public MyErrorHandler(PrintWriter out) { - this.out = out; - } - - /** - * Returns a string describing parse exception details. - * - * @param spe the spe - * @return the parses the exception info - */ - private String getParseExceptionInfo(SAXParseException spe) { - String systemId = spe.getSystemId(); - if (systemId == null) { - systemId = "null"; - } - String info = "URI=" + systemId + " Line=" + spe.getLineNumber() + ": " + spe.getMessage(); - return info; - } - - // The following methods are standard SAX ErrorHandler methods. - // See SAX documentation for more info. - - /* - * (non-Javadoc) - * - * @see org.xml.sax.ErrorHandler#warning(org.xml.sax.SAXParseException) - */ - @Override - public void warning(SAXParseException spe) throws SAXException { - out.println("Warning: " + getParseExceptionInfo(spe)); - } - - /* - * (non-Javadoc) - * - * @see org.xml.sax.ErrorHandler#error(org.xml.sax.SAXParseException) - */ - @Override - public void error(SAXParseException spe) throws SAXException { - String message = "Error: " + getParseExceptionInfo(spe); - throw new SAXException(message); - } - - /* - * (non-Javadoc) - * - * @see org.xml.sax.ErrorHandler#fatalError(org.xml.sax.SAXParseException) - */ - @Override - public void fatalError(SAXParseException spe) throws SAXException { - String message = "Fatal Error: " + getParseExceptionInfo(spe); - throw new SAXException(message); - } -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/SearchableEntitySynchronizer.java b/src/main/java/org/onap/aai/sparky/synchronizer/SearchableEntitySynchronizer.java deleted file mode 100644 index e10163f..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/SearchableEntitySynchronizer.java +++ /dev/null @@ -1,767 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import org.onap.aai.cl.mdc.MdcContext; - -import org.onap.aai.cl.mdc.MdcContext; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectReader; -import com.fasterxml.jackson.databind.node.ArrayNode; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Deque; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.ExecutorService; -import java.util.function.Supplier; - -import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.onap.aai.sparky.dal.rest.HttpMethod; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.config.SynchronizerConfiguration; -import org.onap.aai.sparky.synchronizer.entity.MergableEntity; -import org.onap.aai.sparky.synchronizer.entity.SearchableEntity; -import org.onap.aai.sparky.synchronizer.entity.SelfLinkDescriptor; -import org.onap.aai.sparky.synchronizer.enumeration.OperationState; -import org.onap.aai.sparky.synchronizer.enumeration.SynchronizerState; -import org.onap.aai.sparky.synchronizer.task.PerformActiveInventoryRetrieval; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchPut; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchRetrieval; -import org.onap.aai.sparky.synchronizer.task.PerformElasticSearchUpdate; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; -import org.slf4j.MDC; - -/** - * The Class SearchableEntitySynchronizer. - */ -public class SearchableEntitySynchronizer extends AbstractEntitySynchronizer - implements IndexSynchronizer { - - /** - * The Class RetrySearchableEntitySyncContainer. - */ - private class RetrySearchableEntitySyncContainer { - NetworkTransaction txn; - SearchableEntity se; - - /** - * Instantiates a new retry searchable entity sync container. - * - * @param txn the txn - * @param se the se - */ - public RetrySearchableEntitySyncContainer(NetworkTransaction txn, SearchableEntity se) { - this.txn = txn; - this.se = se; - } - - public NetworkTransaction getNetworkTransaction() { - return txn; - } - - public SearchableEntity getSearchableEntity() { - return se; - } - } - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(SearchableEntitySynchronizer.class); - - private boolean allWorkEnumerated; - private Deque selflinks; - private Deque retryQueue; - private Map retryLimitTracker; - protected ExecutorService esPutExecutor; - - /** - * Instantiates a new searchable entity synchronizer. - * - * @param indexName the index name - * @throws Exception the exception - */ - public SearchableEntitySynchronizer(String indexName) throws Exception { - super(LOG, "SES", 2, 5, 5, indexName); - this.allWorkEnumerated = false; - this.selflinks = new ConcurrentLinkedDeque(); - this.retryQueue = new ConcurrentLinkedDeque(); - this.retryLimitTracker = new ConcurrentHashMap(); - this.synchronizerName = "Searchable Entity Synchronizer"; - this.esPutExecutor = NodeUtils.createNamedExecutor("SES-ES-PUT", 5, LOG); - this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors( - oxmModelLoader.getSearchableEntityDescriptors()); - this.esEntityStats.initializeCountersFromOxmEntityDescriptors( - oxmModelLoader.getSearchableEntityDescriptors()); - this.syncDurationInMs = -1; - } - - /** - * Collect all the work. - * - * @return the operation state - */ - private OperationState collectAllTheWork() { - final Map contextMap = MDC.getCopyOfContextMap(); - Map descriptorMap = - oxmModelLoader.getSearchableEntityDescriptors(); - - if (descriptorMap.isEmpty()) { - LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); - LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); - return OperationState.ERROR; - } - - Collection syncTypes = descriptorMap.keySet(); - - /* - * Collection syncTypes = new ArrayList(); syncTypes.add("service-instance"); - */ - - try { - - /* - * launch a parallel async thread to process the documents for each entity-type (to max the of - * the configured executor anyway) - */ - - aaiWorkOnHand.set(syncTypes.size()); - - for (String key : syncTypes) { - - supplyAsync(new Supplier() { - - @Override - public Void get() { - MDC.setContextMap(contextMap); - OperationResult typeLinksResult = null; - try { - typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); - aaiWorkOnHand.decrementAndGet(); - processEntityTypeSelfLinks(typeLinksResult); - } catch (Exception exc) { - // TODO -> LOG, what should be logged here? - } - - return null; - } - - }, aaiExecutor).whenComplete((result, error) -> { - - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "An error occurred getting data from AAI. Error = " + error.getMessage()); - } - }); - - } - - while (aaiWorkOnHand.get() != 0) { - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); - } - - Thread.sleep(1000); - } - - aaiWorkOnHand.set(selflinks.size()); - allWorkEnumerated = true; - syncEntityTypes(); - - while (!isSyncDone()) { - performRetrySync(); - Thread.sleep(1000); - } - - /* - * Make sure we don't hang on to retries that failed which could cause issues during future - * syncs - */ - retryLimitTracker.clear(); - - } catch (Exception exc) { - // TODO -> LOG, waht should be logged here? - } - - return OperationState.OK; - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#doSync() - */ - @Override - public OperationState doSync() { - this.syncDurationInMs = -1; - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "SearchableEntitySynchronizer", "", "Sync", ""); - - resetCounters(); - this.allWorkEnumerated = false; - syncStartedTimeStampInMs = System.currentTimeMillis(); - collectAllTheWork(); - - return OperationState.OK; - } - - /** - * Process entity type self links. - * - * @param operationResult the operation result - */ - private void processEntityTypeSelfLinks(OperationResult operationResult) { - - JsonNode rootNode = null; - - final String jsonResult = operationResult.getResult(); - - if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { - - try { - rootNode = mapper.readTree(jsonResult); - } catch (IOException exc) { - String message = "Could not deserialize JSON (representing operation result) as node tree. " - + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); - } - - JsonNode resultData = rootNode.get("result-data"); - ArrayNode resultDataArrayNode = null; - - if (resultData.isArray()) { - resultDataArrayNode = (ArrayNode) resultData; - - Iterator elementIterator = resultDataArrayNode.elements(); - JsonNode element = null; - - while (elementIterator.hasNext()) { - element = elementIterator.next(); - - final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); - final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); - - OxmEntityDescriptor descriptor = null; - - if (resourceType != null && resourceLink != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(resourceType); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); - // go to next element in iterator - continue; - } - - if (descriptor.hasSearchableAttributes()) { - selflinks.add(new SelfLinkDescriptor(resourceLink, - SynchronizerConfiguration.NODES_ONLY_MODIFIER, resourceType)); - } - - } - } - } - } - - } - - /** - * Sync entity types. - */ - private void syncEntityTypes() { - - while (selflinks.peek() != null) { - - SelfLinkDescriptor linkDescriptor = selflinks.poll(); - aaiWorkOnHand.decrementAndGet(); - - OxmEntityDescriptor descriptor = null; - - if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { - - descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); - - if (descriptor == null) { - LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); - // go to next element in iterator - continue; - } - - NetworkTransaction txn = new NetworkTransaction(); - txn.setDescriptor(descriptor); - txn.setLink(linkDescriptor.getSelfLink()); - txn.setOperationType(HttpMethod.GET); - txn.setEntityType(linkDescriptor.getEntityType()); - - aaiWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) - .whenComplete((result, error) -> { - - aaiWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); - } else { - if (result == null) { - LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, - linkDescriptor.getSelfLink()); - } else { - updateActiveInventoryCounters(result); - fetchDocumentForUpsert(result); - } - } - }); - } - - } - - } - - /** - * Perform document upsert. - * - * @param esGetTxn the es get txn - * @param se the se - */ - protected void performDocumentUpsert(NetworkTransaction esGetTxn, SearchableEntity se) { - /** - *

    - *

      - * As part of the response processing we need to do the following: - *
    • 1. Extract the version (if present), it will be the ETAG when we use the - * Search-Abstraction-Service - *
    • 2. Spawn next task which is to do the PUT operation into elastic with or with the version - * tag - *
    • a) if version is null or RC=404, then standard put, no _update with version tag - *
    • b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic - *
    - *

    - */ - String link = null; - try { - link = getElasticFullUrl("/" + se.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); - return; - } - - String versionNumber = null; - boolean wasEntryDiscovered = false; - if (esGetTxn.getOperationResult().getResultCode() == 404) { - LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, se.getEntityPrimaryKeyValue()); - } else if (esGetTxn.getOperationResult().getResultCode() == 200) { - wasEntryDiscovered = true; - try { - versionNumber = NodeUtils.extractFieldValueFromObject( - NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), - "_version"); - } catch (IOException exc) { - String message = - "Error extracting version number from response, aborting searchable entity sync of " - + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); - return; - } - } else { - /* - * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we - * return. - */ - LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, - String.valueOf(esGetTxn.getOperationResult().getResultCode())); - return; - } - - try { - String jsonPayload = null; - if (wasEntryDiscovered) { - try { - ArrayList sourceObject = new ArrayList(); - NodeUtils.extractObjectsByKey( - NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), - "_source", sourceObject); - - if (!sourceObject.isEmpty()) { - String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); - MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); - ObjectReader updater = mapper.readerForUpdating(me); - MergableEntity merged = updater.readValue(se.getIndexDocumentJson()); - jsonPayload = mapper.writeValueAsString(merged); - } - } catch (IOException exc) { - String message = - "Error extracting source value from response, aborting searchable entity sync of " - + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); - return; - } - } else { - jsonPayload = se.getIndexDocumentJson(); - } - - if (wasEntryDiscovered) { - if (versionNumber != null && jsonPayload != null) { - - String requestPayload = esDataProvider.buildBulkImportOperationRequest(getIndexName(), - ElasticSearchConfig.getConfig().getType(), se.getId(), versionNumber, jsonPayload); - - NetworkTransaction transactionTracker = new NetworkTransaction(); - transactionTracker.setEntityType(esGetTxn.getEntityType()); - transactionTracker.setDescriptor(esGetTxn.getDescriptor()); - transactionTracker.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), - requestPayload, esDataProvider, transactionTracker), esPutExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = "Searchable entity sync UPDATE PUT error - " - + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetTxn, se); - } - }); - } - - } else { - if (link != null && jsonPayload != null) { - - NetworkTransaction updateElasticTxn = new NetworkTransaction(); - updateElasticTxn.setLink(link); - updateElasticTxn.setEntityType(esGetTxn.getEntityType()); - updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); - updateElasticTxn.setOperationType(HttpMethod.PUT); - - esWorkOnHand.incrementAndGet(); - supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), - esPutExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - String message = - "Searchable entity sync UPDATE PUT error - " + error.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } else { - updateElasticSearchCounters(result); - processStoreDocumentResult(result, esGetTxn, se); - } - }); - } - } - } catch (Exception exc) { - String message = "Exception caught during searchable entity sync PUT operation. Message - " - + exc.getLocalizedMessage(); - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } - } - - /** - * Populate searchable entity document. - * - * @param doc the doc - * @param result the result - * @param resultDescriptor the result descriptor - * @throws JsonProcessingException the json processing exception - * @throws IOException Signals that an I/O exception has occurred. - */ - protected void populateSearchableEntityDocument(SearchableEntity doc, String result, - OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { - - doc.setEntityType(resultDescriptor.getEntityName()); - - JsonNode entityNode = mapper.readTree(result); - - List primaryKeyValues = new ArrayList(); - String pkeyValue = null; - - for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { - pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); - if (pkeyValue != null) { - primaryKeyValues.add(pkeyValue); - } else { - String message = "populateSearchableEntityDocument(), pKeyValue is null for entityType = " - + resultDescriptor.getEntityName(); - LOG.warn(AaiUiMsgs.WARN_GENERIC, message); - } - } - - final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); - doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue); - - final List searchTagFields = resultDescriptor.getSearchableAttributes(); - - /* - * Based on configuration, use the configured field names for this entity-Type to build a - * multi-value collection of search tags for elastic search entity search criteria. - */ - for (String searchTagField : searchTagFields) { - String searchTagValue = NodeUtils.getNodeFieldAsText(entityNode, searchTagField); - if (searchTagValue != null && !searchTagValue.isEmpty()) { - doc.addSearchTagWithKey(searchTagValue, searchTagField); - } - } - } - - /** - * Fetch document for upsert. - * - * @param txn the txn - */ - private void fetchDocumentForUpsert(NetworkTransaction txn) { - if (!txn.getOperationResult().wasSuccessful()) { - String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - return; - } - - try { - if (txn.getDescriptor().hasSearchableAttributes()) { - - final String jsonResult = txn.getOperationResult().getResult(); - if (jsonResult != null && jsonResult.length() > 0) { - - SearchableEntity se = new SearchableEntity(oxmModelLoader); - se.setLink(ActiveInventoryConfig.extractResourcePath(txn.getLink())); - populateSearchableEntityDocument(se, jsonResult, txn.getDescriptor()); - se.deriveFields(); - - String link = null; - try { - link = getElasticFullUrl("/" + se.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction n2 = new NetworkTransaction(); - n2.setLink(link); - n2.setEntityType(txn.getEntityType()); - n2.setDescriptor(txn.getDescriptor()); - n2.setOperationType(HttpMethod.GET); - - esWorkOnHand.incrementAndGet(); - - supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), esExecutor) - .whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, se); - } - }); - } - } - - } - } catch (JsonProcessingException exc) { - // TODO -> LOG, waht should be logged here? - } catch (IOException exc) { - // TODO -> LOG, waht should be logged here? - } - } - - /** - * Process store document result. - * - * @param esPutResult the es put result - * @param esGetResult the es get result - * @param se the se - */ - private void processStoreDocumentResult(NetworkTransaction esPutResult, - NetworkTransaction esGetResult, SearchableEntity se) { - - OperationResult or = esPutResult.getOperationResult(); - - if (!or.wasSuccessful()) { - if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { - - if (shouldAllowRetry(se.getId())) { - esWorkOnHand.incrementAndGet(); - - RetrySearchableEntitySyncContainer rsc = - new RetrySearchableEntitySyncContainer(esGetResult, se); - retryQueue.push(rsc); - - String message = "Store document failed during searchable entity synchronization" - + " due to version conflict. Entity will be re-synced."; - LOG.warn(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } - } else { - String message = - "Store document failed during searchable entity synchronization with result code " - + or.getResultCode() + " and result message " + or.getResult(); - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } - } - } - - /** - * Perform retry sync. - */ - private void performRetrySync() { - while (retryQueue.peek() != null) { - - RetrySearchableEntitySyncContainer rsc = retryQueue.poll(); - if (rsc != null) { - - SearchableEntity se = rsc.getSearchableEntity(); - NetworkTransaction txn = rsc.getNetworkTransaction(); - - String link = null; - try { - /* - * In this retry flow the se object has already derived its fields - */ - link = getElasticFullUrl("/" + se.getId(), getIndexName()); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); - } - - if (link != null) { - NetworkTransaction retryTransaction = new NetworkTransaction(); - retryTransaction.setLink(link); - retryTransaction.setEntityType(txn.getEntityType()); - retryTransaction.setDescriptor(txn.getDescriptor()); - retryTransaction.setOperationType(HttpMethod.GET); - - /* - * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already - * called incrementAndGet when queuing the failed PUT! - */ - - supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), - esExecutor).whenComplete((result, error) -> { - - esWorkOnHand.decrementAndGet(); - - if (error != null) { - LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); - } else { - updateElasticSearchCounters(result); - performDocumentUpsert(result, se); - } - }); - } - - } - } - } - - /** - * Should allow retry. - * - * @param id the id - * @return true, if successful - */ - private boolean shouldAllowRetry(String id) { - boolean isRetryAllowed = true; - if (retryLimitTracker.get(id) != null) { - Integer currentCount = retryLimitTracker.get(id); - if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { - isRetryAllowed = false; - String message = "Searchable entity re-sync limit reached for " + id - + ", re-sync will no longer be attempted for this entity"; - LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); - } else { - Integer newCount = new Integer(currentCount.intValue() + 1); - retryLimitTracker.put(id, newCount); - } - } else { - Integer firstRetryCount = new Integer(1); - retryLimitTracker.put(id, firstRetryCount); - } - - return isRetryAllowed; - } - - @Override - public SynchronizerState getState() { - if (!isSyncDone()) { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - } - - return SynchronizerState.IDLE; - - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) - */ - @Override - public String getStatReport(boolean showFinalReport) { - syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; - return this.getStatReport(syncDurationInMs, showFinalReport); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.IndexSynchronizer#shutdown() - */ - @Override - public void shutdown() { - this.shutdownExecutors(); - } - - @Override - protected boolean isSyncDone() { - int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); - - if (totalWorkOnHand > 0 || !allWorkEnumerated) { - return false; - } - - return true; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/SyncController.java b/src/main/java/org/onap/aai/sparky/synchronizer/SyncController.java deleted file mode 100644 index 0f61923..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/SyncController.java +++ /dev/null @@ -1,476 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import static java.util.concurrent.CompletableFuture.supplyAsync; - -import java.util.Collection; -import java.util.LinkedHashSet; -import java.util.concurrent.ExecutorService; -import java.util.function.Supplier; - -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.enumeration.SynchronizerState; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; - -/** - * The Class SyncController. - * - * @author davea. - */ -public class SyncController { - private static final Logger LOG = LoggerFactory.getInstance().getLogger(SyncController.class); - - /** - * The Enum InternalState. - */ - private enum InternalState { - IDLE, PRE_SYNC, SYNC_OPERATION, SELECTIVE_DELETE, ABORTING_SYNC, REPAIRING_INDEX, POST_SYNC, TEST_INDEX_INTEGRITY, GENERATE_FINAL_REPORT - } - - /** - * The Enum SyncActions. - */ - public enum SyncActions { - SYNCHRONIZE, REPAIR_INDEX, INDEX_INTEGRITY_VALIDATION_COMPLETE, PRE_SYNC_COMPLETE, SYNC_COMPLETE, SYNC_ABORTED, SYNC_FAILURE, POST_SYNC_COMPLETE, PURGE_COMPLETE, REPORT_COMPLETE - } - - private Collection registeredSynchronizers; - private Collection registeredIndexValidators; - private Collection registeredIndexCleaners; - private InternalState currentInternalState; - private ExecutorService syncControllerExecutor; - private ExecutorService statReporterExecutor; - private final String controllerName; - - /** - * Instantiates a new sync controller. - * - * @param name the name - * @throws Exception the exception - */ - public SyncController(String name) throws Exception { - - this.controllerName = name; - /* - * Does LHS result in a non-duplicated object collection?? What happens if you double-add an - * object? - */ - - registeredSynchronizers = new LinkedHashSet(); - registeredIndexValidators = new LinkedHashSet(); - registeredIndexCleaners = new LinkedHashSet(); - - this.syncControllerExecutor = NodeUtils.createNamedExecutor("SyncController", 5, LOG); - this.statReporterExecutor = NodeUtils.createNamedExecutor("StatReporter", 1, LOG); - - this.currentInternalState = InternalState.IDLE; - } - - /** - * Change internal state. - * - * @param newState the new state - * @param causedByAction the caused by action - */ - private void changeInternalState(InternalState newState, SyncActions causedByAction) { - LOG.info(AaiUiMsgs.SYNC_INTERNAL_STATE_CHANGED, controllerName, currentInternalState.toString(), - newState.toString(), causedByAction.toString()); - - this.currentInternalState = newState; - - performStateAction(); - } - - public String getControllerName() { - return controllerName; - } - - /** - * Perform action. - * - * @param requestedAction the requested action - */ - public void performAction(SyncActions requestedAction) { - - if (currentInternalState == InternalState.IDLE) { - - try { - switch (requestedAction) { - case SYNCHRONIZE: - changeInternalState(InternalState.TEST_INDEX_INTEGRITY, requestedAction); - break; - - default: - break; - } - - } catch (Exception exc) { - String message = "An error occurred while performing action = " + requestedAction - + ". Error = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } else { - LOG.error(AaiUiMsgs.SYNC_NOT_VALID_STATE_DURING_REQUEST, currentInternalState.toString()); - } - } - - /** - * Perform state action. - */ - private void performStateAction() { - - try { - switch (currentInternalState) { - - case TEST_INDEX_INTEGRITY: - performIndexIntegrityValidation(); - break; - - case PRE_SYNC: - performPreSyncCleanupCollection(); - break; - - case SYNC_OPERATION: - performSynchronization(); - break; - - case POST_SYNC: - performIndexSyncPostCollection(); - changeInternalState(InternalState.SELECTIVE_DELETE, SyncActions.POST_SYNC_COMPLETE); - break; - - case SELECTIVE_DELETE: - performIndexCleanup(); - changeInternalState(InternalState.GENERATE_FINAL_REPORT, SyncActions.PURGE_COMPLETE); - break; - - case GENERATE_FINAL_REPORT: - - dumpStatReport(true); - clearCaches(); - changeInternalState(InternalState.IDLE, SyncActions.REPORT_COMPLETE); - break; - - case ABORTING_SYNC: - performSyncAbort(); - break; - - default: - break; - } - } catch (Exception exc) { - String message = "Caught an error which performing action. Error = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - /** - * Register entity synchronizer. - * - * @param entitySynchronizer the entity synchronizer - */ - public void registerEntitySynchronizer(IndexSynchronizer entitySynchronizer) { - - String indexName = entitySynchronizer.getIndexName(); - - if (indexName != null) { - registeredSynchronizers.add(entitySynchronizer); - } else { - String message = "Failed to register entity synchronizer because index name is null"; - LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); - } - - } - - /** - * Register index validator. - * - * @param indexValidator the index validator - */ - public void registerIndexValidator(IndexValidator indexValidator) { - - String indexName = indexValidator.getIndexName(); - - if (indexName != null) { - registeredIndexValidators.add(indexValidator); - } else { - String message = "Failed to register index validator because index name is null"; - LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); - } - - } - - /** - * Register index cleaner. - * - * @param indexCleaner the index cleaner - */ - public void registerIndexCleaner(IndexCleaner indexCleaner) { - - String indexName = indexCleaner.getIndexName(); - - if (indexName != null) { - registeredIndexCleaners.add(indexCleaner); - } else { - String message = "Failed to register index cleaner because index name is null"; - LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); - } - } - - /* - * State machine should drive our flow dosync just dispatches an action and the state machine - * determines what is in play and what is next - */ - - /** - * Dump stat report. - * - * @param showFinalReport the show final report - */ - private void dumpStatReport(boolean showFinalReport) { - - for (IndexSynchronizer synchronizer : registeredSynchronizers) { - - String statReport = synchronizer.getStatReport(showFinalReport); - - if (statReport != null) { - LOG.info(AaiUiMsgs.INFO_GENERIC, statReport); - } - } - } - - /** - * Clear caches. - */ - private void clearCaches() { - - /* - * Any entity caches that were built as part of the sync operation should be cleared to save - * memory. The original intent of the caching was to provide a short-lived cache to satisfy - * entity requests from multiple synchronizers yet minimizing interactions with the AAI. - */ - - for (IndexSynchronizer synchronizer : registeredSynchronizers) { - synchronizer.clearCache(); - } - } - - /** - * Perform pre sync cleanup collection. - */ - private void performPreSyncCleanupCollection() { - - /* - * ask the index cleaners to collect the their pre-sync object id collections - */ - - for (IndexCleaner cleaner : registeredIndexCleaners) { - cleaner.populatePreOperationCollection(); - } - - changeInternalState(InternalState.SYNC_OPERATION, SyncActions.PRE_SYNC_COMPLETE); - - } - - /** - * Perform index sync post collection. - */ - private void performIndexSyncPostCollection() { - - /* - * ask the entity purgers to collect the their pre-sync object id collections - */ - - for (IndexCleaner cleaner : registeredIndexCleaners) { - cleaner.populatePostOperationCollection(); - } - - } - - /** - * Perform index cleanup. - */ - private void performIndexCleanup() { - - /* - * ask the entity purgers to collect the their pre-sync object id collections - */ - - for (IndexCleaner cleaner : registeredIndexCleaners) { - cleaner.performCleanup(); - } - - } - - /** - * Perform sync abort. - */ - private void performSyncAbort() { - changeInternalState(InternalState.IDLE, SyncActions.SYNC_ABORTED); - } - - /** - * Perform index integrity validation. - */ - private void performIndexIntegrityValidation() { - - /* - * loop through registered index validators and test and fix, if needed - */ - - for (IndexValidator validator : registeredIndexValidators) { - try { - if (!validator.exists()) { - validator.createOrRepair(); - } - } catch (Exception exc) { - String message = "Index validator caused an error = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - changeInternalState(InternalState.PRE_SYNC, SyncActions.INDEX_INTEGRITY_VALIDATION_COMPLETE); - - } - - /** - * Shutdown. - */ - public void shutdown() { - - this.syncControllerExecutor.shutdown(); - for (IndexSynchronizer synchronizer : registeredSynchronizers) { - - try { - synchronizer.shutdown(); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "Synchronizer shutdown caused an error = " + exc.getMessage()); - } - - } - this.statReporterExecutor.shutdown(); - } - - /* - * Need some kind of task running that responds to a transient boolean to kill it or we just stop - * the executor that it is in? - */ - - - - /** - * Perform synchronization. - */ - private void performSynchronization() { - - /* - * Get all the synchronizers running in parallel - */ - - for (IndexSynchronizer synchronizer : registeredSynchronizers) { - supplyAsync(new Supplier() { - - @Override - public Void get() { - - synchronizer.doSync(); - return null; - } - - }, this.syncControllerExecutor).whenComplete((result, error) -> { - - /* - * We don't bother checking the result, because it will always be null as the doSync() is - * non-blocking. - */ - - if (error != null) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "doSync operation failed with an error = " + error.getMessage()); - } - }); - } - - boolean allDone = false; - long nextReportTimeStampInMs = System.currentTimeMillis() + 30000L; - boolean dumpPeriodicStatReport = false; - - while (!allDone) { - - int totalFinished = 0; - - for (IndexSynchronizer synchronizer : registeredSynchronizers) { - if (dumpPeriodicStatReport) { - if (synchronizer.getState() != SynchronizerState.IDLE) { - String statReport = synchronizer.getStatReport(false); - if (statReport != null) { - LOG.info(AaiUiMsgs.INFO_GENERIC, statReport); - } - } - if (synchronizer.getState() == SynchronizerState.IDLE) { - totalFinished++; - } - } - } - if (System.currentTimeMillis() > nextReportTimeStampInMs) { - dumpPeriodicStatReport = true; - nextReportTimeStampInMs = System.currentTimeMillis() + 30000L; - } else { - dumpPeriodicStatReport = false; - } - allDone = (totalFinished == registeredSynchronizers.size()); - - try { - Thread.sleep(250); - } catch (InterruptedException exc) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, - "An error occurred while waiting for sync to complete. Error = " + exc.getMessage()); - } - - } - - changeInternalState(InternalState.POST_SYNC, SyncActions.SYNC_COMPLETE); - - } - - public SynchronizerState getState() { - - switch (currentInternalState) { - - case IDLE: { - return SynchronizerState.IDLE; - } - - default: { - return SynchronizerState.PERFORMING_SYNCHRONIZATION; - - } - } - - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/SyncHelper.java b/src/main/java/org/onap/aai/sparky/synchronizer/SyncHelper.java deleted file mode 100644 index 9081d41..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/SyncHelper.java +++ /dev/null @@ -1,568 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import com.google.common.util.concurrent.ThreadFactoryBuilder; - -import java.lang.Thread.UncaughtExceptionHandler; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; - -import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.dal.aai.ActiveInventoryAdapter; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryRestConfig; -import org.onap.aai.sparky.dal.cache.EntityCache; -import org.onap.aai.sparky.dal.cache.InMemoryEntityCache; -import org.onap.aai.sparky.dal.cache.PersistentEntityCache; -import org.onap.aai.sparky.dal.elasticsearch.ElasticSearchAdapter; -import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.onap.aai.sparky.dal.rest.RestClientBuilder; -import org.onap.aai.sparky.dal.rest.RestfulDataAccessor; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.SyncController.SyncActions; -import org.onap.aai.sparky.synchronizer.config.SynchronizerConfiguration; -import org.onap.aai.sparky.synchronizer.config.SynchronizerConstants; -import org.onap.aai.sparky.synchronizer.enumeration.SynchronizerState; -import org.onap.aai.sparky.util.ErrorUtil; -import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; -import org.slf4j.MDC; - -/** - * The Class SyncHelper. - * - * @author davea. - */ -public class SyncHelper { - - private final Logger LOG = LoggerFactory.getInstance().getLogger(SyncHelper.class); - private SyncController syncController = null; - private SyncController entityCounterHistorySummarizer = null; - - private ScheduledExecutorService oneShotExecutor = Executors.newSingleThreadScheduledExecutor(); - private ScheduledExecutorService periodicExecutor = null; - private ScheduledExecutorService historicalExecutor = - Executors.newSingleThreadScheduledExecutor(); - - private SynchronizerConfiguration syncConfig; - private ElasticSearchConfig esConfig; - private OxmModelLoader oxmModelLoader; - - private Boolean initialSyncRunning = false; - private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); - private AtomicLong timeNextSync = new AtomicLong(); - Map contextMap; - - /** - * The Class SyncTask. - */ - private class SyncTask implements Runnable { - - private boolean isInitialSync; - - public boolean isInitialSync() { - return isInitialSync; - } - - public void setInitialSync(boolean isInitialSync) { - this.isInitialSync = isInitialSync; - } - - /** - * Instantiates a new sync task. - * - * @param initialSync the initial sync - */ - public SyncTask(boolean initialSync) { - this.isInitialSync = initialSync; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Runnable#run() - */ - @Override - public void run() { - long opStartTime = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - - LOG.info(AaiUiMsgs.SEARCH_ENGINE_SYNC_STARTED, sdf.format(opStartTime) - .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD)); - - try { - - if (syncController == null) { - LOG.error(AaiUiMsgs.SYNC_SKIPPED_SYNCCONTROLLER_NOT_INITIALIZED); - return; - } - - int taskFrequencyInDays = SynchronizerConfiguration.getConfig().getSyncTaskFrequencyInDay(); - - /* - * Do nothing if the initial start-up sync hasn't finished yet, but the regular sync - * scheduler fired up a regular sync. - */ - if (!initialSyncRunning) { - if (isInitialSync) { - initialSyncRunning = true; - } else { - // update 'timeNextSync' for periodic sync - timeNextSync.getAndAdd(taskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); - - } - - LOG.info(AaiUiMsgs.INFO_GENERIC, "SyncTask, starting syncrhonization"); - - syncController.performAction(SyncActions.SYNCHRONIZE); - - while (syncController.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { - Thread.sleep(1000); - } - - } else { - LOG.info(AaiUiMsgs.SKIP_PERIODIC_SYNC_AS_SYNC_DIDNT_FINISH, sdf.format(opStartTime) - .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD)); - - return; - } - - long opEndTime = System.currentTimeMillis(); - - if (isInitialSync) { - /* - * Handle corner case when start-up sync operation overlapped with a scheduled - * sync-start-time. Note that the scheduled sync does nothing if 'initialSyncRunning' is - * TRUE. So the actual next-sync is one more sync-cycle away - */ - long knownNextSyncTime = timeNextSync.get(); - if (knownNextSyncTime != SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS - && opEndTime > knownNextSyncTime) { - timeNextSync.compareAndSet(knownNextSyncTime, - knownNextSyncTime + taskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); - initialSyncRunning = false; - } - } - - String durationMessage = - String.format(syncController.getControllerName() + " synchronization took '%d' ms.", - (opEndTime - opStartTime)); - - LOG.info(AaiUiMsgs.SYNC_DURATION, durationMessage); - - // Provide log about the time for next synchronization - if (syncConfig.isConfigOkForPeriodicSync() - && timeNextSync.get() != SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS) { - TimeZone tz = TimeZone.getTimeZone(syncConfig.getSyncTaskStartTimeTimeZone()); - sdf.setTimeZone(tz); - if (opEndTime - opStartTime > taskFrequencyInDays - * SynchronizerConstants.MILLISEC_IN_A_DAY) { - String durationWasLongerMessage = String.format( - syncController.getControllerName() - + " synchronization took '%d' ms which is larger than" - + " synchronization interval of '%d' ms.", - (opEndTime - opStartTime), - taskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); - - LOG.info(AaiUiMsgs.SYNC_DURATION, durationWasLongerMessage); - } - - LOG.info(AaiUiMsgs.SYNC_TO_BEGIN, syncController.getControllerName(), - sdf.format(timeNextSync).replaceAll(SynchronizerConstants.TIME_STD, - SynchronizerConstants.TIME_CONFIG_STD)); - } - - } catch (Exception exc) { - String message = "Caught an exception while attempt to synchronize elastic search " - + "with an error cause = " + ErrorUtil.extractStackTraceElements(5, exc); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - - } - - } - - - /** - * Gets the first sync time. - * - * @param calendar the calendar - * @param timeNow the time now - * @param taskFreqInDay the task freq in day - * @return the first sync time - */ - public long getFirstSyncTime(Calendar calendar, long timeNow, int taskFreqInDay) { - if (taskFreqInDay == SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS) { - return SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS; - } else if (timeNow > calendar.getTimeInMillis()) { - calendar.add(Calendar.DAY_OF_MONTH, taskFreqInDay); - } - return calendar.getTimeInMillis(); - } - - /** - * Boot strap and configure the moving pieces of the Sync Controller. - */ - - private void initializeSyncController() { - - try { - - /* - * TODO: it would be nice to have XML IoC / dependency injection kind of thing for these - * pieces maybe Spring? - */ - - /* - * Sync Controller itself - */ - - syncController = new SyncController("entitySyncController"); - - /* - * Create common elements - */ - - ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new RestClientBuilder()); - ActiveInventoryRestConfig aaiRestConfig = - ActiveInventoryConfig.getConfig().getAaiRestConfig(); - - - EntityCache cache = null; - - if (aaiRestConfig.isCacheEnabled()) { - cache = new PersistentEntityCache(aaiRestConfig.getStorageFolderOverride(), - aaiRestConfig.getNumCacheWorkers()); - } else { - cache = new InMemoryEntityCache(); - } - - RestClientBuilder clientBuilder = new RestClientBuilder(); - - aaiAdapter.setCacheEnabled(true); - aaiAdapter.setEntityCache(cache); - - clientBuilder.setUseHttps(false); - - RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); - - ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); - ElasticSearchAdapter esAdapter = new ElasticSearchAdapter(nonCachingRestProvider, esConfig); - - /* - * Register Index Validators - */ - - IndexIntegrityValidator entitySearchIndexValidator = - new IndexIntegrityValidator(nonCachingRestProvider, esConfig.getIndexName(), - esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), - esConfig.buildElasticSearchTableConfig()); - - syncController.registerIndexValidator(entitySearchIndexValidator); - - // TODO: Insert IndexValidator for TopographicalEntityIndex - // we should have one, but one isn't 100% required as none of the fields are analyzed - - /* - * Register Synchronizers - */ - - SearchableEntitySynchronizer ses = new SearchableEntitySynchronizer(esConfig.getIndexName()); - ses.setAaiDataProvider(aaiAdapter); - ses.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(ses); - - CrossEntityReferenceSynchronizer cers = new CrossEntityReferenceSynchronizer( - esConfig.getIndexName(), ActiveInventoryConfig.getConfig()); - cers.setAaiDataProvider(aaiAdapter); - cers.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(cers); - - if (syncConfig.isAutosuggestSynchronizationEnabled()) { - initAutoSuggestionSynchronizer(esConfig, aaiAdapter, esAdapter, nonCachingRestProvider); - initAggregationSynchronizer(esConfig, aaiAdapter, esAdapter, nonCachingRestProvider); - } - - /* - * Register Cleaners - */ - - IndexCleaner searchableIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, - esConfig.getIndexName(), esConfig.getType(), esConfig.getIpAddress(), - esConfig.getHttpPort(), syncConfig.getScrollContextTimeToLiveInMinutes(), - syncConfig.getNumScrollContextItemsToRetrievePerRequest()); - - syncController.registerIndexCleaner(searchableIndexCleaner); - - } catch (Exception exc) { - String message = "Error: failed to sync with message = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - - } - - private List getAutosuggestableEntitiesFromOXM() { - Map map = oxmModelLoader.getSuggestionSearchEntityDescriptors(); - List suggestableEntities = new ArrayList(); - - for (String entity : map.keySet()) { - suggestableEntities.add(entity); - } - return suggestableEntities; - } - - /** - * Initialize the AutosuggestionSynchronizer and AggregationSuggestionSynchronizer - * - * @param esConfig - * @param aaiAdapter - * @param esAdapter - * @param nonCachingRestProvider - */ - private void initAutoSuggestionSynchronizer(ElasticSearchConfig esConfig, - ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, - RestfulDataAccessor nonCachingRestProvider) { - LOG.info(AaiUiMsgs.INFO_GENERIC, "initAutoSuggestionSynchronizer"); - - // Initialize for entityautosuggestindex - try { - IndexIntegrityValidator autoSuggestionIndexValidator = - new IndexIntegrityValidator(nonCachingRestProvider, esConfig.getAutosuggestIndexname(), - esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), - esConfig.buildAutosuggestionTableConfig()); - - syncController.registerIndexValidator(autoSuggestionIndexValidator); - - AutosuggestionSynchronizer suggestionSynchronizer = - new AutosuggestionSynchronizer(esConfig.getAutosuggestIndexname()); - suggestionSynchronizer.setAaiDataProvider(aaiAdapter); - suggestionSynchronizer.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(suggestionSynchronizer); - - AggregationSuggestionSynchronizer aggregationSuggestionSynchronizer = - new AggregationSuggestionSynchronizer(esConfig.getAutosuggestIndexname()); - aggregationSuggestionSynchronizer.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(aggregationSuggestionSynchronizer); - - IndexCleaner autosuggestIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, - esConfig.getAutosuggestIndexname(), esConfig.getType(), esConfig.getIpAddress(), - esConfig.getHttpPort(), syncConfig.getScrollContextTimeToLiveInMinutes(), - syncConfig.getNumScrollContextItemsToRetrievePerRequest()); - - syncController.registerIndexCleaner(autosuggestIndexCleaner); - } catch (Exception exc) { - String message = "Error: failed to sync with message = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - /** - * Initialize the AggregationSynchronizer - * - * @param esConfig - * @param aaiAdapter - * @param esAdapter - * @param nonCachingRestProvider - */ - private void initAggregationSynchronizer(ElasticSearchConfig esConfig, - ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, - RestfulDataAccessor nonCachingRestProvider) { - LOG.info(AaiUiMsgs.INFO_GENERIC, "initAggregationSynchronizer"); - - List aggregationEntities = getAutosuggestableEntitiesFromOXM(); - - // For each index: create an IndexValidator, a Synchronizer, and an IndexCleaner - for (String entity : aggregationEntities) { - try { - String indexName = TierSupportUiConstants.getAggregationIndexName(entity); - - IndexIntegrityValidator aggregationIndexValidator = new IndexIntegrityValidator( - nonCachingRestProvider, indexName, esConfig.getType(), esConfig.getIpAddress(), - esConfig.getHttpPort(), esConfig.buildAggregationTableConfig()); - - syncController.registerIndexValidator(aggregationIndexValidator); - - /* - * TODO: This per-entity-synchronizer approach will eventually result in AAI / ES overload - * because of the existing dedicated thread pools for ES + AAI operations within the - * synchronizer. If we had 50 types to sync then the thread pools within each Synchronizer - * would cause some heartburn as there would be hundreds of threads trying to talk to AAI. - * Given that we our running out of time, let's make sure we can get it functional and then - * we'll re-visit. - */ - AggregationSynchronizer aggSynchronizer = new AggregationSynchronizer(entity, indexName); - aggSynchronizer.setAaiDataProvider(aaiAdapter); - aggSynchronizer.setEsDataProvider(esAdapter); - syncController.registerEntitySynchronizer(aggSynchronizer); - - IndexCleaner entityDataIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, - indexName, esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), - syncConfig.getScrollContextTimeToLiveInMinutes(), - syncConfig.getNumScrollContextItemsToRetrievePerRequest()); - - syncController.registerIndexCleaner(entityDataIndexCleaner); - - } catch (Exception exc) { - String message = "Error: failed to sync with message = " + exc.getMessage(); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - } - - /** - * Instantiates a new sync helper. - * - * @param loader the loader - */ - public SyncHelper(OxmModelLoader loader) { - try { - this.contextMap = MDC.getCopyOfContextMap(); - this.syncConfig = SynchronizerConfiguration.getConfig(); - this.esConfig = ElasticSearchConfig.getConfig(); - this.oxmModelLoader = loader; - - UncaughtExceptionHandler uncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() { - - @Override - public void uncaughtException(Thread thread, Throwable exc) { - LOG.error(AaiUiMsgs.ERROR_GENERIC, thread.getName() + ": " + exc); - } - }; - - ThreadFactory namedThreadFactory = new ThreadFactoryBuilder().setNameFormat("SyncHelper-%d") - .setUncaughtExceptionHandler(uncaughtExceptionHandler).build(); - - periodicExecutor = Executors.newScheduledThreadPool(3, namedThreadFactory); - - /* - * We only want to initialize the synchronizer if sync has been configured to start - */ - if (syncConfig.isConfigOkForStartupSync() || syncConfig.isConfigOkForPeriodicSync()) { - initializeSyncController(); - } - - // schedule startup synchronization - if (syncConfig.isConfigOkForStartupSync()) { - - long taskInitialDelayInMs = syncConfig.getSyncTaskInitialDelayInMs(); - if (taskInitialDelayInMs != SynchronizerConstants.DELAY_NO_STARTUP_SYNC_IN_MS) { - oneShotExecutor.schedule(new SyncTask(true), taskInitialDelayInMs, TimeUnit.MILLISECONDS); - LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine startup synchronization is enabled."); - } else { - LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine startup synchronization is disabled."); - } - } - - // schedule periodic synchronization - if (syncConfig.isConfigOkForPeriodicSync()) { - - TimeZone tz = TimeZone.getTimeZone(syncConfig.getSyncTaskStartTimeTimeZone()); - Calendar calendar = Calendar.getInstance(tz); - sdf.setTimeZone(tz); - - calendar.set(Calendar.HOUR_OF_DAY, syncConfig.getSyncTaskStartTimeHr()); - calendar.set(Calendar.MINUTE, syncConfig.getSyncTaskStartTimeMin()); - calendar.set(Calendar.SECOND, syncConfig.getSyncTaskStartTimeSec()); - - long timeCurrent = calendar.getTimeInMillis(); - int taskFrequencyInDay = syncConfig.getSyncTaskFrequencyInDay(); - timeNextSync.getAndSet(getFirstSyncTime(calendar, timeCurrent, taskFrequencyInDay)); - - long delayUntilFirstRegSyncInMs = 0; - delayUntilFirstRegSyncInMs = timeNextSync.get() - timeCurrent; - - // Do all calculation in milliseconds - long taskFreqencyInMs = taskFrequencyInDay * SynchronizerConstants.MILLISEC_IN_A_DAY; - - if (taskFreqencyInMs != SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS) { - periodicExecutor.scheduleAtFixedRate(new SyncTask(false), delayUntilFirstRegSyncInMs, - taskFreqencyInMs, TimeUnit.MILLISECONDS); - LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine periodic synchronization is enabled."); - // case: when - startup sync is misconfigured or is disabled - // - give a clue to user when is the next periodic sync - if (!syncConfig.isConfigOkForStartupSync() - || syncConfig.isConfigDisabledForInitialSync()) { - LOG.info(AaiUiMsgs.SYNC_TO_BEGIN, syncController.getControllerName(), - sdf.format(timeNextSync).replaceAll(SynchronizerConstants.TIME_STD, - SynchronizerConstants.TIME_CONFIG_STD)); - } - } else { - LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine periodic synchronization is disabled."); - } - } - - } catch (Exception exc) { - String message = "Caught an exception while starting up the SyncHelper. Error cause = \n" - + ErrorUtil.extractStackTraceElements(5, exc); - LOG.error(AaiUiMsgs.ERROR_GENERIC, message); - } - } - - - /** - * Shutdown. - */ - public void shutdown() { - - if (oneShotExecutor != null) { - oneShotExecutor.shutdown(); - } - - if (periodicExecutor != null) { - periodicExecutor.shutdown(); - } - - if (historicalExecutor != null) { - historicalExecutor.shutdown(); - } - - if (syncController != null) { - syncController.shutdown(); - } - - if (entityCounterHistorySummarizer != null) { - entityCounterHistorySummarizer.shutdown(); - } - - } - - public OxmModelLoader getOxmModelLoader() { - return oxmModelLoader; - } - - public void setOxmModelLoader(OxmModelLoader oxmModelLoader) { - this.oxmModelLoader = oxmModelLoader; - } -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/TaskProcessingStats.java b/src/main/java/org/onap/aai/sparky/synchronizer/TaskProcessingStats.java deleted file mode 100644 index ef53a75..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/TaskProcessingStats.java +++ /dev/null @@ -1,189 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import org.onap.aai.sparky.analytics.AbstractStatistics; -import org.onap.aai.sparky.synchronizer.config.TaskProcessorConfig; - -/** - * The Class TaskProcessingStats. - */ -public class TaskProcessingStats extends AbstractStatistics { - - private static String TASK_AGE_STATS = "taskAgeStats"; - private static String TASK_RESPONSE_STATS = "taskResponseStats"; - private static String RESPONSE_SIZE_IN_BYTES = "taskResponseSizeInBytes"; - // private static String QUEUE_ITEM_LENGTH = "queueItemLength"; - private static String TPS = "transactionsPerSecond"; - - /** - * Instantiates a new task processing stats. - * - * @param config the config - */ - public TaskProcessingStats(TaskProcessorConfig config) { - - addHistogram(TASK_AGE_STATS, config.getTaskAgeHistogramLabel(), - config.getTaskAgeHistogramMaxYAxis(), config.getTaskAgeHistogramNumBins(), - config.getTaskAgeHistogramNumDecimalPoints()); - - addHistogram(TASK_RESPONSE_STATS, config.getResponseTimeHistogramLabel(), - config.getResponseTimeHistogramMaxYAxis(), config.getResponseTimeHistogramNumBins(), - config.getResponseTimeHistogramNumDecimalPoints()); - - addHistogram(RESPONSE_SIZE_IN_BYTES, config.getBytesHistogramLabel(), - config.getBytesHistogramMaxYAxis(), config.getBytesHistogramNumBins(), - config.getBytesHistogramNumDecimalPoints()); - - /* - * addHistogram(QUEUE_ITEM_LENGTH, config.getQueueLengthHistogramLabel(), - * config.getQueueLengthHistogramMaxYAxis(), config.getQueueLengthHistogramNumBins(), - * config.getQueueLengthHistogramNumDecimalPoints()); - */ - - addHistogram(TPS, config.getTpsHistogramLabel(), config.getTpsHistogramMaxYAxis(), - config.getTpsHistogramNumBins(), config.getTpsHistogramNumDecimalPoints()); - - } - - /* - * public void updateQueueItemLengthHistogram(long value) { updateHistogram(QUEUE_ITEM_LENGTH, - * value); } - */ - - /** - * Update task age stats histogram. - * - * @param value the value - */ - public void updateTaskAgeStatsHistogram(long value) { - updateHistogram(TASK_AGE_STATS, value); - } - - /** - * Update task response stats histogram. - * - * @param value the value - */ - public void updateTaskResponseStatsHistogram(long value) { - updateHistogram(TASK_RESPONSE_STATS, value); - } - - /** - * Update response size in bytes histogram. - * - * @param value the value - */ - public void updateResponseSizeInBytesHistogram(long value) { - updateHistogram(RESPONSE_SIZE_IN_BYTES, value); - } - - /** - * Update transactions per second histogram. - * - * @param value the value - */ - public void updateTransactionsPerSecondHistogram(long value) { - updateHistogram(TPS, value); - } - - /** - * Gets the statistics report. - * - * @param verboseEnabled the verbose enabled - * @param indentPadding the indent padding - * @return the statistics report - */ - public String getStatisticsReport(boolean verboseEnabled, String indentPadding) { - - StringBuilder sb = new StringBuilder(); - - sb.append("\n").append(getHistogramStats(TASK_AGE_STATS, verboseEnabled, indentPadding)); - // sb.append("\n").append(getHistogramStats(QUEUE_ITEM_LENGTH, verboseEnabled, indentPadding)); - sb.append("\n").append(getHistogramStats(TASK_RESPONSE_STATS, verboseEnabled, indentPadding)); - sb.append("\n") - .append(getHistogramStats(RESPONSE_SIZE_IN_BYTES, verboseEnabled, indentPadding)); - sb.append("\n").append(getHistogramStats(TPS, verboseEnabled, indentPadding)); - - return sb.toString(); - - } - - /** - * @return the tASK_AGE_STATS - */ - public static String getTASK_AGE_STATS() { - return TASK_AGE_STATS; - } - - /** - * @param tASK_AGE_STATS the tASK_AGE_STATS to set - */ - public static void setTASK_AGE_STATS(String tASK_AGE_STATS) { - TASK_AGE_STATS = tASK_AGE_STATS; - } - - /** - * @return the tASK_RESPONSE_STATS - */ - public static String getTASK_RESPONSE_STATS() { - return TASK_RESPONSE_STATS; - } - - /** - * @param tASK_RESPONSE_STATS the tASK_RESPONSE_STATS to set - */ - public static void setTASK_RESPONSE_STATS(String tASK_RESPONSE_STATS) { - TASK_RESPONSE_STATS = tASK_RESPONSE_STATS; - } - - /** - * @return the rESPONSE_SIZE_IN_BYTES - */ - public static String getRESPONSE_SIZE_IN_BYTES() { - return RESPONSE_SIZE_IN_BYTES; - } - - /** - * @param rESPONSE_SIZE_IN_BYTES the rESPONSE_SIZE_IN_BYTES to set - */ - public static void setRESPONSE_SIZE_IN_BYTES(String rESPONSE_SIZE_IN_BYTES) { - RESPONSE_SIZE_IN_BYTES = rESPONSE_SIZE_IN_BYTES; - } - - /** - * @return the tPS - */ - public static String getTPS() { - return TPS; - } - - /** - * @param tPS the tPS to set - */ - public static void setTPS(String tPS) { - TPS = tPS; - } - - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/TransactionRateController.java b/src/main/java/org/onap/aai/sparky/synchronizer/TransactionRateController.java deleted file mode 100644 index 76deef3..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/TransactionRateController.java +++ /dev/null @@ -1,110 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer; - -import java.util.concurrent.atomic.AtomicInteger; - -import org.onap.aai.sparky.analytics.AveragingRingBuffer; -import org.onap.aai.sparky.synchronizer.config.TaskProcessorConfig; - -/** - * TODO: Fill in description. - * - * @author davea. - */ -public class TransactionRateController { - - private AveragingRingBuffer responseTimeTracker; - private double msPerTransaction; - private int numThreads; - private TaskProcessorConfig config; - private long startTimeInMs; - private AtomicInteger numTransactions; - - /** - * Instantiates a new transaction rate controller. - * - * @param config the config - */ - public TransactionRateController(TaskProcessorConfig config) { - - this.config = config; - this.responseTimeTracker = new AveragingRingBuffer( - config.getNumSamplesPerThreadForRunningAverage() * config.getMaxConcurrentWorkers()); - this.msPerTransaction = 1000 / config.getTargetTps(); - this.numThreads = config.getMaxConcurrentWorkers(); - this.startTimeInMs = System.currentTimeMillis(); - this.numTransactions = new AtomicInteger(0); - } - - /** - * Track response time. - * - * @param responseTimeInMs the response time in ms - */ - public void trackResponseTime(long responseTimeInMs) { - this.numTransactions.incrementAndGet(); - responseTimeTracker.addSample(responseTimeInMs); - } - - public long getFixedDelayInMs() { - - /* - * The math here is pretty simple: - * - * 1. Target TPS is 10. Then the msPerTxn = 1000/10 = 100ms - * - * 2. If the calculated avgResponseTime = 40 ms, then the proposed delay is 60ms per thread. - * - * 3. If the calculated avgResponseTime = 200ms, then the proposed delay is -100 ms, which is - * not possible, we can't speed it up, so we don't propose any further delay. - */ - - double proposedDelay = 0; - - if (config.isTransactionRateControllerEnabled()) { - proposedDelay = ((msPerTransaction - responseTimeTracker.getAvg()) * this.numThreads); - - if (proposedDelay > 0) { - return (long) (proposedDelay); - } - } - - return (long) proposedDelay; - } - - public long getAvg() { - return responseTimeTracker.getAvg(); - } - - public double getCurrentTps() { - if (numTransactions.get() > 0) { - double timeDelta = System.currentTimeMillis() - startTimeInMs; - double numTxns = numTransactions.get(); - return (numTxns / timeDelta) * 1000.0; - } - - return 0.0; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/config/NetworkStatisticsConfig.java b/src/main/java/org/onap/aai/sparky/synchronizer/config/NetworkStatisticsConfig.java deleted file mode 100644 index 31c8acd..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/config/NetworkStatisticsConfig.java +++ /dev/null @@ -1,237 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.config; - -public class NetworkStatisticsConfig { - - private int numSamplesPerThreadForRunningAverage; - - private String bytesHistogramLabel; - - private long bytesHistogramMaxYAxis; - - private int bytesHistogramNumBins; - - private int bytesHistogramNumDecimalPoints; - - private String queueLengthHistogramLabel; - - private long queueLengthHistogramMaxYAxis; - - private int queueLengthHistogramNumBins; - - private int queueLengthHistogramNumDecimalPoints; - - private String taskAgeHistogramLabel; - - private long taskAgeHistogramMaxYAxis; - - private int taskAgeHistogramNumBins; - - private int taskAgeHistogramNumDecimalPoints; - - private String responseTimeHistogramLabel; - - private long responseTimeHistogramMaxYAxis; - - private int responseTimeHistogramNumBins; - - private int responseTimeHistogramNumDecimalPoints; - - private String tpsHistogramLabel; - - private long tpsHistogramMaxYAxis; - - private int tpsHistogramNumBins; - - private int tpsHistogramNumDecimalPoints; - - public int getNumSamplesPerThreadForRunningAverage() { - return numSamplesPerThreadForRunningAverage; - } - - public void setNumSamplesPerThreadForRunningAverage(int numSamplesPerThreadForRunningAverage) { - this.numSamplesPerThreadForRunningAverage = numSamplesPerThreadForRunningAverage; - } - - public String getBytesHistogramLabel() { - return bytesHistogramLabel; - } - - public void setBytesHistogramLabel(String bytesHistogramLabel) { - this.bytesHistogramLabel = bytesHistogramLabel; - } - - public long getBytesHistogramMaxYAxis() { - return bytesHistogramMaxYAxis; - } - - public void setBytesHistogramMaxYAxis(long bytesHistogramMaxYAxis) { - this.bytesHistogramMaxYAxis = bytesHistogramMaxYAxis; - } - - public int getBytesHistogramNumBins() { - return bytesHistogramNumBins; - } - - public void setBytesHistogramNumBins(int bytesHistogramNumBins) { - this.bytesHistogramNumBins = bytesHistogramNumBins; - } - - public int getBytesHistogramNumDecimalPoints() { - return bytesHistogramNumDecimalPoints; - } - - public void setBytesHistogramNumDecimalPoints(int bytesHistogramNumDecimalPoints) { - this.bytesHistogramNumDecimalPoints = bytesHistogramNumDecimalPoints; - } - - public String getQueueLengthHistogramLabel() { - return queueLengthHistogramLabel; - } - - public void setQueueLengthHistogramLabel(String queueLengthHistogramLabel) { - this.queueLengthHistogramLabel = queueLengthHistogramLabel; - } - - public long getQueueLengthHistogramMaxYAxis() { - return queueLengthHistogramMaxYAxis; - } - - public void setQueueLengthHistogramMaxYAxis(long queueLengthHistogramMaxYAxis) { - this.queueLengthHistogramMaxYAxis = queueLengthHistogramMaxYAxis; - } - - public int getQueueLengthHistogramNumBins() { - return queueLengthHistogramNumBins; - } - - public void setQueueLengthHistogramNumBins(int queueLengthHistogramNumBins) { - this.queueLengthHistogramNumBins = queueLengthHistogramNumBins; - } - - public int getQueueLengthHistogramNumDecimalPoints() { - return queueLengthHistogramNumDecimalPoints; - } - - public void setQueueLengthHistogramNumDecimalPoints(int queueLengthHistogramNumDecimalPoints) { - this.queueLengthHistogramNumDecimalPoints = queueLengthHistogramNumDecimalPoints; - } - - public String getTaskAgeHistogramLabel() { - return taskAgeHistogramLabel; - } - - public void setTaskAgeHistogramLabel(String taskAgeHistogramLabel) { - this.taskAgeHistogramLabel = taskAgeHistogramLabel; - } - - public long getTaskAgeHistogramMaxYAxis() { - return taskAgeHistogramMaxYAxis; - } - - public void setTaskAgeHistogramMaxYAxis(long taskAgeHistogramMaxYAxis) { - this.taskAgeHistogramMaxYAxis = taskAgeHistogramMaxYAxis; - } - - public int getTaskAgeHistogramNumBins() { - return taskAgeHistogramNumBins; - } - - public void setTaskAgeHistogramNumBins(int taskAgeHistogramNumBins) { - this.taskAgeHistogramNumBins = taskAgeHistogramNumBins; - } - - public int getTaskAgeHistogramNumDecimalPoints() { - return taskAgeHistogramNumDecimalPoints; - } - - public void setTaskAgeHistogramNumDecimalPoints(int taskAgeHistogramNumDecimalPoints) { - this.taskAgeHistogramNumDecimalPoints = taskAgeHistogramNumDecimalPoints; - } - - public String getResponseTimeHistogramLabel() { - return responseTimeHistogramLabel; - } - - public void setResponseTimeHistogramLabel(String responseTimeHistogramLabel) { - this.responseTimeHistogramLabel = responseTimeHistogramLabel; - } - - public long getResponseTimeHistogramMaxYAxis() { - return responseTimeHistogramMaxYAxis; - } - - public void setResponseTimeHistogramMaxYAxis(long responseTimeHistogramMaxYAxis) { - this.responseTimeHistogramMaxYAxis = responseTimeHistogramMaxYAxis; - } - - public int getResponseTimeHistogramNumBins() { - return responseTimeHistogramNumBins; - } - - public void setResponseTimeHistogramNumBins(int responseTimeHistogramNumBins) { - this.responseTimeHistogramNumBins = responseTimeHistogramNumBins; - } - - public int getResponseTimeHistogramNumDecimalPoints() { - return responseTimeHistogramNumDecimalPoints; - } - - public void setResponseTimeHistogramNumDecimalPoints(int responseTimeHistogramNumDecimalPoints) { - this.responseTimeHistogramNumDecimalPoints = responseTimeHistogramNumDecimalPoints; - } - - public String getTpsHistogramLabel() { - return tpsHistogramLabel; - } - - public void setTpsHistogramLabel(String tpsHistogramLabel) { - this.tpsHistogramLabel = tpsHistogramLabel; - } - - public long getTpsHistogramMaxYAxis() { - return tpsHistogramMaxYAxis; - } - - public void setTpsHistogramMaxYAxis(long tpsHistogramMaxYAxis) { - this.tpsHistogramMaxYAxis = tpsHistogramMaxYAxis; - } - - public int getTpsHistogramNumBins() { - return tpsHistogramNumBins; - } - - public void setTpsHistogramNumBins(int tpsHistogramNumBins) { - this.tpsHistogramNumBins = tpsHistogramNumBins; - } - - public int getTpsHistogramNumDecimalPoints() { - return tpsHistogramNumDecimalPoints; - } - - public void setTpsHistogramNumDecimalPoints(int tpsHistogramNumDecimalPoints) { - this.tpsHistogramNumDecimalPoints = tpsHistogramNumDecimalPoints; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/config/SynchronizerConfiguration.java b/src/main/java/org/onap/aai/sparky/synchronizer/config/SynchronizerConfiguration.java deleted file mode 100644 index 8762a0f..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/config/SynchronizerConfiguration.java +++ /dev/null @@ -1,544 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.config; - -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.text.ParseException; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Date; -import java.util.List; -import java.util.Properties; -import java.util.TimeZone; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.regex.PatternSyntaxException; - -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.util.ConfigHelper; -import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; - - -/** - * The Class SynchronizerConfiguration. - */ -public class SynchronizerConfiguration { - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(SynchronizerConfiguration.class); - - public static final String CONFIG_FILE = - TierSupportUiConstants.DYNAMIC_CONFIG_APP_LOCATION + "synchronizer.properties"; - - private static SynchronizerConfiguration instance; - - public static final String DEPTH_MODIFIER = "?depth=0"; - public static final String DEPTH_ALL_MODIFIER = "?depth=all"; - public static final String DEPTH_AND_NODES_ONLY_MODIFIER = "?depth=0&nodes-only"; - public static final String NODES_ONLY_MODIFIER = "?nodes-only"; - - public static SynchronizerConfiguration getConfig() throws Exception { - - if (instance == null) { - instance = new SynchronizerConfiguration(); - } - - return instance; - } - - /** - * Instantiates a new synchronizer configuration. - */ - public SynchronizerConfiguration() - throws NumberFormatException, PatternSyntaxException, ParseException { - Properties props = ConfigHelper.loadConfigFromExplicitPath(CONFIG_FILE); - initialize(props); - } - - public SynchronizerConfiguration(Properties props) - throws NumberFormatException, PatternSyntaxException, ParseException { - initialize(props); - } - - /** - * Initialize. - * - * @throws Exception the exception - */ - protected void initialize(Properties props) - throws NumberFormatException, PatternSyntaxException, ParseException { - - // parse config for startup sync - try { - syncTaskInitialDelayInMs = - Integer.parseInt(props.getProperty("synchronizer.syncTask.initialDelayInMs", - SynchronizerConstants.DEFAULT_INITIAL_DELAY_IN_MS)); - if (syncTaskInitialDelayInMs < 0) { - throw new NumberFormatException("Error. Sync Task Delay has to be positive"); - } - } catch (NumberFormatException exc) { - this.setConfigOkForStartupSync(false); - syncTaskInitialDelayInMs = SynchronizerConstants.DEFAULT_CONFIG_ERROR_INT_VALUE; - String message = "Invalid configuration for synchronizer parameter:" - + " 'synchronizer.syncTask.initialDelayInMs'"; - LOG.error(AaiUiMsgs.SYNC_INVALID_CONFIG_PARAM, message); - } - - // parse config for periodic sync - try { - syncTaskFrequencyInDay = - Integer.parseInt(props.getProperty("synchronizer.syncTask.taskFrequencyInDay", - SynchronizerConstants.DEFAULT_TASK_FREQUENCY_IN_DAY)); - if (syncTaskFrequencyInDay < 0) { - throw new NumberFormatException("Error. Sync Task Frequency has to be positive"); - } - } catch (NumberFormatException exc) { - this.setConfigOkForPeriodicSync(false); - syncTaskFrequencyInDay = SynchronizerConstants.DEFAULT_CONFIG_ERROR_INT_VALUE; - String message = "Invalid configuration for synchronizer parameter:" - + " 'synchronizer.syncTask.taskFrequencyInDay'"; - LOG.error(AaiUiMsgs.SYNC_INVALID_CONFIG_PARAM, message); - } - - try { - syncTaskStartTime = props.getProperty("synchronizer.syncTask.startTimestamp", - SynchronizerConstants.DEFAULT_START_TIMESTAMP); // Default 05:00:00 UTC - Pattern pattern = Pattern.compile(SynchronizerConstants.TIMESTAMP24HOURS_PATTERN); - Matcher matcher = pattern.matcher(syncTaskStartTime); - if (!matcher.matches()) { - throw new PatternSyntaxException("Pattern Mismatch", - "The erroneous pattern is not available", -1); - } - - List timestampVal = Arrays.asList(syncTaskStartTime.split(" ")); - - if (timestampVal.size() == SynchronizerConstants.COMPONENTS_IN_TIMESTAMP) { - // Need both time and timezone offset - syncTaskStartTimeTimeZone = timestampVal - .get(SynchronizerConstants.IDX_TIMEZONE_IN_TIMESTAMP).replaceAll("UTC", "GMT"); - - String time = timestampVal.get(SynchronizerConstants.IDX_TIME_IN_TIMESTAMP); - DateFormat format = new SimpleDateFormat("HH:mm:ss"); - Date date = format.parse(time); - Calendar calendar = Calendar.getInstance(); - calendar.setTime(date); - - syncTaskStartTimeHr = calendar.get(Calendar.HOUR_OF_DAY); - syncTaskStartTimeMin = calendar.get(Calendar.MINUTE); - syncTaskStartTimeSec = calendar.get(Calendar.SECOND); - } else { - LOG.info(AaiUiMsgs.SYNC_START_TIME); - } - } catch (ParseException exc) { - this.setConfigOkForPeriodicSync(false); - String message = "Invalid configuration for synchronizer parameter:" - + " 'synchronizer.syncTask.startTimestamp'"; - LOG.error(AaiUiMsgs.SYNC_INVALID_CONFIG_PARAM, message); - } - - scrollContextTimeToLiveInMinutes = - Integer.parseInt(props.getProperty("synchronizer.scrollContextTimeToLiveInMinutes", "5")); - numScrollContextItemsToRetrievePerRequest = Integer.parseInt( - props.getProperty("synchronizer.numScrollContextItemsToRetrievePerRequest", "5000")); - - resolverProgressLogFrequencyInMs = Long - .parseLong(props.getProperty("synchronizer.resolver.progressLogFrequencyInMs", "60000")); - resolverQueueMonitorFrequencyInMs = Long - .parseLong(props.getProperty("synchronizer.resolver.queueMonitorFrequencyInMs", "1000")); - - indexIntegrityValidatorEnabled = Boolean - .parseBoolean(props.getProperty("synchronizer.indexIntegrityValidator.enabled", "false")); - indexIntegrityValidatorFrequencyInMs = Long.parseLong( - props.getProperty("synchronizer.indexIntegrityValidatorFrequencyInMs", "300000")); - - displayVerboseQueueManagerStats = Boolean - .parseBoolean(props.getProperty("synchronizer.resolver.displayVerboseQueueManagerStats")); - - resourceNotFoundErrorsSupressed = - Boolean.parseBoolean(props.getProperty("synchronizer.suppressResourceNotFoundErrors")); - - nodesOnlyModifierEnabled = - Boolean.parseBoolean(props.getProperty("synchronizer.applyNodesOnlyModifier")); - - autosuggestSynchronizationEnabled = Boolean - .parseBoolean(props.getProperty("synchronizer.autosuggestSynchronizationEnabled", "true")); - - if (LOG.isDebugEnabled()) { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, this.toString()); - } - } - - public boolean isNodesOnlyModifierEnabled() { - return nodesOnlyModifierEnabled; - } - - public void setNodesOnlyModifierEnabled(boolean nodesOnlyModifierEnabled) { - this.nodesOnlyModifierEnabled = nodesOnlyModifierEnabled; - } - - public int getSyncTaskInitialDelayInMs() { - return syncTaskInitialDelayInMs; - } - - public void setSyncTaskInitialDelayInMs(int syncTaskInitialDelayInMs) { - this.syncTaskInitialDelayInMs = syncTaskInitialDelayInMs; - } - - public boolean isDisplayVerboseQueueManagerStats() { - return displayVerboseQueueManagerStats; - } - - public void setDisplayVerboseQueueManagerStats(boolean displayVerboseQueueManagerStats) { - this.displayVerboseQueueManagerStats = displayVerboseQueueManagerStats; - } - - private int syncTaskInitialDelayInMs; - - private int syncTaskFrequencyInMs; - - private int scrollContextTimeToLiveInMinutes; - - private int numScrollContextItemsToRetrievePerRequest; - - private long resolverProgressLogFrequencyInMs; - - private long resolverQueueMonitorFrequencyInMs; - - private boolean indexIntegrityValidatorEnabled; - - private long indexIntegrityValidatorFrequencyInMs; - - private int syncTaskFrequencyInDay; - - private String syncTaskStartTime; - - private int syncTaskStartTimeHr = 5; // for default sync start time - - private int syncTaskStartTimeMin; - - private int syncTaskStartTimeSec; - - private String syncTaskStartTimeTimeZone; - - private boolean displayVerboseQueueManagerStats; - - private boolean resourceNotFoundErrorsSupressed; - - private boolean nodesOnlyModifierEnabled; - - private boolean autosuggestSynchronizationEnabled; - - private boolean configOkForStartupSync = true; - - private boolean configOkForPeriodicSync = true; - - public boolean isResourceNotFoundErrorsSupressed() { - return resourceNotFoundErrorsSupressed; - } - - public void setResourceNotFoundErrorsSupressed(boolean resourceNotFoundErrorsSupressed) { - this.resourceNotFoundErrorsSupressed = resourceNotFoundErrorsSupressed; - } - - public int getScrollContextTimeToLiveInMinutes() { - return scrollContextTimeToLiveInMinutes; - } - - public void setScrollContextTimeToLiveInMinutes(int scrollContextTimeToLiveInMinutes) { - this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes; - } - - public int getNumScrollContextItemsToRetrievePerRequest() { - return numScrollContextItemsToRetrievePerRequest; - } - - public void setNumScrollContextItemsToRetrievePerRequest( - int numScrollContextItemsToRetrievePerRequest) { - this.numScrollContextItemsToRetrievePerRequest = numScrollContextItemsToRetrievePerRequest; - } - - public int getSyncTaskFrequencyInDay() { - return syncTaskFrequencyInDay; - } - - public void setSyncTaskFrequencyInDay(int syncTaskFrequencyInDay) { - this.syncTaskFrequencyInDay = syncTaskFrequencyInDay; - } - - public String getSyncTaskStartTime() { - return syncTaskStartTime; - } - - public void setSyncTaskStartTime(String syncTaskStartTime) { - this.syncTaskStartTime = syncTaskStartTime; - } - - public int getSyncTaskStartTimeHr() { - return syncTaskStartTimeHr; - } - - public void setSyncTaskStartTimeHr(int syncTaskStartTimeHr) { - this.syncTaskStartTimeHr = syncTaskStartTimeHr; - } - - public int getSyncTaskStartTimeMin() { - return syncTaskStartTimeMin; - } - - public void setSyncTaskStartTimeMin(int syncTaskStartTimeMin) { - this.syncTaskStartTimeMin = syncTaskStartTimeMin; - } - - public int getSyncTaskStartTimeSec() { - return syncTaskStartTimeSec; - } - - public void setSyncTaskStartTimeSec(int syncTaskStartTimeSec) { - this.syncTaskStartTimeSec = syncTaskStartTimeSec; - } - - public String getSyncTaskStartTimeTimeZone() { - return syncTaskStartTimeTimeZone; - } - - public void setSyncTaskStartTimeTimeZone(String syncTaskStartTimeTimeZone) { - this.syncTaskStartTimeTimeZone = syncTaskStartTimeTimeZone; - } - - public int getSyncTaskFrequencyInMs() { - return syncTaskFrequencyInMs; - } - - public void setSyncTaskFrequencyInMs(int syncTaskFrequencyInMs) { - this.syncTaskFrequencyInMs = syncTaskFrequencyInMs; - } - - public long getResolverProgressLogFrequencyInMs() { - return resolverProgressLogFrequencyInMs; - } - - public void setResolverProgressLogFrequencyInMs(long resolverProgressLogFrequencyInMs) { - this.resolverProgressLogFrequencyInMs = resolverProgressLogFrequencyInMs; - } - - public long getResolverQueueMonitorFrequencyInMs() { - return resolverQueueMonitorFrequencyInMs; - } - - public void setResolverQueueMonitorFrequencyInMs(long resolverQueueMonitorFrequencyInMs) { - this.resolverQueueMonitorFrequencyInMs = resolverQueueMonitorFrequencyInMs; - } - - public boolean isIndexIntegrityValidatorEnabled() { - return indexIntegrityValidatorEnabled; - } - - public void setIndexIntegrityValidatorEnabled(boolean indexIntegrityValidatorEnabled) { - this.indexIntegrityValidatorEnabled = indexIntegrityValidatorEnabled; - } - - public long getIndexIntegrityValidatorFrequencyInMs() { - return indexIntegrityValidatorFrequencyInMs; - } - - public void setIndexIntegrityValidatorFrequencyInMs(long indexIntegrityValidatorFrequencyInMs) { - this.indexIntegrityValidatorFrequencyInMs = indexIntegrityValidatorFrequencyInMs; - } - - public boolean isConfigOkForStartupSync() { - return configOkForStartupSync; - } - - public void setConfigOkForStartupSync(boolean configOkForStartupSync) { - this.configOkForStartupSync = configOkForStartupSync; - } - - public boolean isConfigOkForPeriodicSync() { - return configOkForPeriodicSync; - } - - public void setConfigOkForPeriodicSync(boolean configOkForPeriodicSync) { - this.configOkForPeriodicSync = configOkForPeriodicSync; - } - - public boolean isConfigDisabledForInitialSync() { - return syncTaskInitialDelayInMs == SynchronizerConstants.DELAY_NO_STARTUP_SYNC_IN_MS; - } - - public boolean isAutosuggestSynchronizationEnabled() { - return autosuggestSynchronizationEnabled; - } - - public void setAutosuggestSynchronizationEnabled(boolean autosuggestSynchronizationEnabled) { - this.autosuggestSynchronizationEnabled = autosuggestSynchronizationEnabled; - } - - public Calendar getTargetSyncTime() { - - TimeZone tz = TimeZone.getTimeZone(getSyncTaskStartTimeTimeZone()); - Calendar targetSyncTime = Calendar.getInstance(tz); - - targetSyncTime.set(Calendar.HOUR_OF_DAY, getSyncTaskStartTimeHr()); - targetSyncTime.set(Calendar.MINUTE, getSyncTaskStartTimeMin()); - targetSyncTime.set(Calendar.SECOND, getSyncTaskStartTimeSec()); - - return targetSyncTime; - - } - - public long getDefaultInitialSyncDelayInMs(Calendar timeNow) { - - int taskFrequencyInDays = getSyncTaskFrequencyInDay(); - - long nextSyncTimeInMs = getNextSyncTime(getTargetSyncTime(), timeNow.getTimeInMillis(), - taskFrequencyInDays * 86400); - - /* - * If the the current time is after the scheduled start time, then delay by the initial task - * delay configuration value - */ - long delayUntilNextSyncInMs = - Math.max(getSyncTaskInitialDelayInMs(), nextSyncTimeInMs - timeNow.getTimeInMillis()); - - return delayUntilNextSyncInMs; - - } - - public long getNextSyncTime(Calendar syncTime, int taskFrequencyInSeconds) { - - TimeZone tz = TimeZone.getTimeZone(getSyncTaskStartTimeTimeZone()); - Calendar timeNow = Calendar.getInstance(tz); - - return getNextSyncTime(syncTime, timeNow.getTimeInMillis(), taskFrequencyInSeconds); - } - - /** - * Gets the first sync time. - * - * @param calendar the calendar - * @param timeNow the time now in ms - * @param taskFrequencyInMs task period in ms - * @return the first sync time - */ - public long getNextSyncTime(Calendar syncTime, long timeNowInMs, int taskFrequencyInSeconds) { - if (taskFrequencyInSeconds == 0) { - return 0; - } else if (timeNowInMs > syncTime.getTimeInMillis()) { - - /* - * If current time is after the scheduled sync start time, then we'll skip ahead to the next - * sync time period - */ - - syncTime.add(Calendar.SECOND, taskFrequencyInSeconds); - } - - return syncTime.getTimeInMillis(); - } - - /** - * @return the instance - */ - public static SynchronizerConfiguration getInstance() { - return instance; - } - - /** - * @param instance the instance to set - */ - public static void setInstance(SynchronizerConfiguration instance) { - SynchronizerConfiguration.instance = instance; - } - - /** - * @return the log - */ - public static Logger getLog() { - return LOG; - } - - /** - * @return the configFile - */ - public static String getConfigFile() { - return CONFIG_FILE; - } - - /** - * @return the depthModifier - */ - public static String getDepthModifier() { - return DEPTH_MODIFIER; - } - - /** - * @return the depthAllModifier - */ - public static String getDepthAllModifier() { - return DEPTH_ALL_MODIFIER; - } - - /** - * @return the depthAndNodesOnlyModifier - */ - public static String getDepthAndNodesOnlyModifier() { - return DEPTH_AND_NODES_ONLY_MODIFIER; - } - - /** - * @return the nodesOnlyModifier - */ - public static String getNodesOnlyModifier() { - return NODES_ONLY_MODIFIER; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "SynchronizerConfiguration [syncTaskInitialDelayInMs=" + syncTaskInitialDelayInMs - + ", syncTaskFrequencyInMs=" + syncTaskFrequencyInMs + ", scrollContextTimeToLiveInMinutes=" - + scrollContextTimeToLiveInMinutes + ", numScrollContextItemsToRetrievePerRequest=" - + numScrollContextItemsToRetrievePerRequest + ", resolverProgressLogFrequencyInMs=" - + resolverProgressLogFrequencyInMs + ", resolverQueueMonitorFrequencyInMs=" - + resolverQueueMonitorFrequencyInMs + ", indexIntegrityValidatorEnabled=" - + indexIntegrityValidatorEnabled + ", indexIntegrityValidatorFrequencyInMs=" - + indexIntegrityValidatorFrequencyInMs + ", ssyncTaskFrequencyInDay=" - + syncTaskFrequencyInDay + ", syncTaskStartTime=" + syncTaskStartTime - + ", syncTaskStartTimeHr=" + syncTaskStartTimeHr + ", syncTaskStartTimeMin=" - + syncTaskStartTimeMin + ", syncTaskStartTimeSec=" + syncTaskStartTimeSec - + ", syncTaskStartTimeTimeZone=" + syncTaskStartTimeTimeZone - + ", displayVerboseQueueManagerStats=" + displayVerboseQueueManagerStats - + ", resourceNotFoundErrorsSupressed=" + resourceNotFoundErrorsSupressed - + ", nodesOnlyModifierEnabled=" + nodesOnlyModifierEnabled + ", configOKForStartupSync=" - + configOkForStartupSync + ", configOKForPeriodicSync=" + configOkForPeriodicSync - + ", autosuggestSynchronizationEnabled=" + autosuggestSynchronizationEnabled + "]"; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/config/SynchronizerConstants.java b/src/main/java/org/onap/aai/sparky/synchronizer/config/SynchronizerConstants.java deleted file mode 100644 index a548c30..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/config/SynchronizerConstants.java +++ /dev/null @@ -1,60 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.config; - -import java.util.Date; - -/** - * The Class SynchronizerConstants. - */ -public final class SynchronizerConstants { - // Error values for invalid user input - public static final int DEFAULT_CONFIG_ERROR_INT_VALUE = Integer.MAX_VALUE; - public static final Date DEFAULT_CONFIG_ERROR_DATE_VALUE = new Date(Long.MAX_VALUE); - - // constants for scheduling synchronizer - public static final int COMPONENTS_IN_TIMESTAMP = 2; - public static final String DEFAULT_INITIAL_DELAY_IN_MS = "0"; - public static final String DEFAULT_TASK_FREQUENCY_IN_DAY = "0"; - public static final String DEFAULT_START_TIMESTAMP = "05:00:00 UTC"; - public static final long DELAY_NO_STARTUP_SYNC_IN_MS = 0; - public static final long DELAY_NO_PERIODIC_SYNC_IN_MS = 0; - public static final int IDX_TIME_IN_TIMESTAMP = 0; - public static final int IDX_TIMEZONE_IN_TIMESTAMP = 1; - public static final long MILLISEC_IN_A_MIN = 60000; - public static final long MILLISEC_IN_A_DAY = 24 * 60 * 60 * 1000; - public static final String TIME_STD = "GMT"; - public static final String TIME_CONFIG_STD = "UTC"; - public static final String TIMESTAMP24HOURS_PATTERN = - "([01]?[0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9] UTC[+|-][0-5][0-9]:[0-5][0-9]"; - - - - public static final String DEFAULT_SCROLL_CTX_TIME_TO_LIVE_IN_MIN = "5"; - public static final String DEFAULT_NUM_SCROLL_CTX_ITEMS_TO_RETRIEVE_PER_REQ = "5000"; - - /** - * Instantiates a new synchronizer constants. - */ - private SynchronizerConstants() {} -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/config/TaskProcessorConfig.java b/src/main/java/org/onap/aai/sparky/synchronizer/config/TaskProcessorConfig.java deleted file mode 100644 index 73f4f77..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/config/TaskProcessorConfig.java +++ /dev/null @@ -1,325 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.config; - -import java.util.Properties; - -/** - * TODO: Fill in description. - * - * @author davea. - */ -public class TaskProcessorConfig { - /** - * Initialize from properties. - * - * @param props the props - */ - public void initializeFromProperties(Properties props) { - - if (props == null) { - return; - } - - maxConcurrentWorkers = Integer.parseInt(props.getProperty("maxConcurrentWorkers")); - transactionRateControllerEnabled = - Boolean.parseBoolean(props.getProperty("transactionRateControllerEnabled")); - numSamplesPerThreadForRunningAverage = - Integer.parseInt(props.getProperty("numSamplesPerThreadForRunningAverage")); - targetTps = Double.parseDouble(props.getProperty("targetTPS")); - bytesHistogramLabel = props.getProperty("bytesHistogramLabel"); - bytesHistogramMaxYAxis = Long.parseLong(props.getProperty("bytesHistogramMaxYAxis")); - bytesHistogramNumBins = Integer.parseInt(props.getProperty("bytesHistogramNumBins")); - bytesHistogramNumDecimalPoints = - Integer.parseInt(props.getProperty("bytesHistogramNumDecimalPoints")); - queueLengthHistogramLabel = props.getProperty("queueLengthHistogramLabel"); - queueLengthHistogramMaxYAxis = - Long.parseLong(props.getProperty("queueLengthHistogramMaxYAxis")); - queueLengthHistogramNumBins = - Integer.parseInt(props.getProperty("queueLengthHistogramNumBins")); - queueLengthHistogramNumDecimalPoints = - Integer.parseInt(props.getProperty("queueLengthHistogramNumDecimalPoints")); - - taskAgeHistogramLabel = props.getProperty("taskAgeHistogramLabel"); - taskAgeHistogramMaxYAxis = Long.parseLong(props.getProperty("taskAgeHistogramMaxYAxis")); - taskAgeHistogramNumBins = Integer.parseInt(props.getProperty("taskAgeHistogramNumBins")); - taskAgeHistogramNumDecimalPoints = - Integer.parseInt(props.getProperty("taskAgeHistogramNumDecimalPoints")); - - responseTimeHistogramLabel = props.getProperty("responseTimeHistogramLabel"); - responseTimeHistogramMaxYAxis = - Long.parseLong(props.getProperty("responseTimeHistogramMaxYAxis")); - responseTimeHistogramNumBins = - Integer.parseInt(props.getProperty("responseTimeHistogramNumBins")); - responseTimeHistogramNumDecimalPoints = - Integer.parseInt(props.getProperty("responseTimeHistogramNumDecimalPoints")); - - tpsHistogramLabel = props.getProperty("tpsHistogramLabel"); - tpsHistogramMaxYAxis = Long.parseLong(props.getProperty("tpsHistogramMaxYAxis")); - tpsHistogramNumBins = Integer.parseInt(props.getProperty("tpsHistogramNumBins")); - tpsHistogramNumDecimalPoints = - Integer.parseInt(props.getProperty("tpsHistogramNumDecimalPoints")); - - } - - private int maxConcurrentWorkers; - - private boolean transactionRateControllerEnabled; - - private int numSamplesPerThreadForRunningAverage; - - private double targetTps; - - private String bytesHistogramLabel; - - private long bytesHistogramMaxYAxis; - - private int bytesHistogramNumBins; - - private int bytesHistogramNumDecimalPoints; - - private String queueLengthHistogramLabel; - - private long queueLengthHistogramMaxYAxis; - - private int queueLengthHistogramNumBins; - - private int queueLengthHistogramNumDecimalPoints; - - private String taskAgeHistogramLabel; - - private long taskAgeHistogramMaxYAxis; - - private int taskAgeHistogramNumBins; - - private int taskAgeHistogramNumDecimalPoints; - - private String responseTimeHistogramLabel; - - private long responseTimeHistogramMaxYAxis; - - private int responseTimeHistogramNumBins; - - private int responseTimeHistogramNumDecimalPoints; - - private String tpsHistogramLabel; - - private long tpsHistogramMaxYAxis; - - private int tpsHistogramNumBins; - - private int tpsHistogramNumDecimalPoints; - - public String getBytesHistogramLabel() { - return bytesHistogramLabel; - } - - public void setBytesHistogramLabel(String bytesHistogramLabel) { - this.bytesHistogramLabel = bytesHistogramLabel; - } - - public long getBytesHistogramMaxYAxis() { - return bytesHistogramMaxYAxis; - } - - public void setBytesHistogramMaxYAxis(long bytesHistogramMaxYAxis) { - this.bytesHistogramMaxYAxis = bytesHistogramMaxYAxis; - } - - public int getBytesHistogramNumBins() { - return bytesHistogramNumBins; - } - - public void setBytesHistogramNumBins(int bytesHistogramNumBins) { - this.bytesHistogramNumBins = bytesHistogramNumBins; - } - - public int getBytesHistogramNumDecimalPoints() { - return bytesHistogramNumDecimalPoints; - } - - public void setBytesHistogramNumDecimalPoints(int bytesHistogramNumDecimalPoints) { - this.bytesHistogramNumDecimalPoints = bytesHistogramNumDecimalPoints; - } - - public String getQueueLengthHistogramLabel() { - return queueLengthHistogramLabel; - } - - public void setQueueLengthHistogramLabel(String queueLengthHistogramLabel) { - this.queueLengthHistogramLabel = queueLengthHistogramLabel; - } - - public long getQueueLengthHistogramMaxYAxis() { - return queueLengthHistogramMaxYAxis; - } - - public void setQueueLengthHistogramMaxYAxis(long queueLengthHistogramMaxYAxis) { - this.queueLengthHistogramMaxYAxis = queueLengthHistogramMaxYAxis; - } - - public int getQueueLengthHistogramNumBins() { - return queueLengthHistogramNumBins; - } - - public void setQueueLengthHistogramNumBins(int queueLengthHistogramNumBins) { - this.queueLengthHistogramNumBins = queueLengthHistogramNumBins; - } - - public int getQueueLengthHistogramNumDecimalPoints() { - return queueLengthHistogramNumDecimalPoints; - } - - public void setQueueLengthHistogramNumDecimalPoints(int queueLengthHistogramNumDecimalPoints) { - this.queueLengthHistogramNumDecimalPoints = queueLengthHistogramNumDecimalPoints; - } - - public boolean isTransactionRateControllerEnabled() { - return transactionRateControllerEnabled; - } - - public void setTransactionRateControllerEnabled(boolean transactionRateControllerEnabled) { - this.transactionRateControllerEnabled = transactionRateControllerEnabled; - } - - public int getNumSamplesPerThreadForRunningAverage() { - return numSamplesPerThreadForRunningAverage; - } - - public void setNumSamplesPerThreadForRunningAverage(int numSamplesPerThreadForRunningAverage) { - this.numSamplesPerThreadForRunningAverage = numSamplesPerThreadForRunningAverage; - } - - public double getTargetTps() { - return targetTps; - } - - public void setTargetTps(double targetTps) { - this.targetTps = targetTps; - } - - public int getMaxConcurrentWorkers() { - return maxConcurrentWorkers; - } - - public void setMaxConcurrentWorkers(int maxConcurrentWorkers) { - this.maxConcurrentWorkers = maxConcurrentWorkers; - } - - public String getTaskAgeHistogramLabel() { - return taskAgeHistogramLabel; - } - - public void setTaskAgeHistogramLabel(String taskAgeHistogramLabel) { - this.taskAgeHistogramLabel = taskAgeHistogramLabel; - } - - public long getTaskAgeHistogramMaxYAxis() { - return taskAgeHistogramMaxYAxis; - } - - public void setTaskAgeHistogramMaxYAxis(long taskAgeHistogramMaxYAxis) { - this.taskAgeHistogramMaxYAxis = taskAgeHistogramMaxYAxis; - } - - public int getTaskAgeHistogramNumBins() { - return taskAgeHistogramNumBins; - } - - public void setTaskAgeHistogramNumBins(int taskAgeHistogramNumBins) { - this.taskAgeHistogramNumBins = taskAgeHistogramNumBins; - } - - public int getTaskAgeHistogramNumDecimalPoints() { - return taskAgeHistogramNumDecimalPoints; - } - - public void setTaskAgeHistogramNumDecimalPoints(int taskAgeHistogramNumDecimalPoints) { - this.taskAgeHistogramNumDecimalPoints = taskAgeHistogramNumDecimalPoints; - } - - public String getResponseTimeHistogramLabel() { - return responseTimeHistogramLabel; - } - - public void setResponseTimeHistogramLabel(String responseTimeHistogramLabel) { - this.responseTimeHistogramLabel = responseTimeHistogramLabel; - } - - public long getResponseTimeHistogramMaxYAxis() { - return responseTimeHistogramMaxYAxis; - } - - public void setResponseTimeHistogramMaxYAxis(long responseTimeHistogramMaxYAxis) { - this.responseTimeHistogramMaxYAxis = responseTimeHistogramMaxYAxis; - } - - public int getResponseTimeHistogramNumBins() { - return responseTimeHistogramNumBins; - } - - public void setResponseTimeHistogramNumBins(int responseTimeHistogramNumBins) { - this.responseTimeHistogramNumBins = responseTimeHistogramNumBins; - } - - public int getResponseTimeHistogramNumDecimalPoints() { - return responseTimeHistogramNumDecimalPoints; - } - - public void setResponseTimeHistogramNumDecimalPoints(int responseTimeHistogramNumDecimalPoints) { - this.responseTimeHistogramNumDecimalPoints = responseTimeHistogramNumDecimalPoints; - } - - public String getTpsHistogramLabel() { - return tpsHistogramLabel; - } - - public void setTpsHistogramLabel(String tpsHistogramLabel) { - this.tpsHistogramLabel = tpsHistogramLabel; - } - - public long getTpsHistogramMaxYAxis() { - return tpsHistogramMaxYAxis; - } - - public void setTpsHistogramMaxYAxis(long tpsHistogramMaxYAxis) { - this.tpsHistogramMaxYAxis = tpsHistogramMaxYAxis; - } - - public int getTpsHistogramNumBins() { - return tpsHistogramNumBins; - } - - public void setTpsHistogramNumBins(int tpsHistogramNumBins) { - this.tpsHistogramNumBins = tpsHistogramNumBins; - } - - public int getTpsHistogramNumDecimalPoints() { - return tpsHistogramNumDecimalPoints; - } - - public void setTpsHistogramNumDecimalPoints(int tpsHistogramNumDecimalPoints) { - this.tpsHistogramNumDecimalPoints = tpsHistogramNumDecimalPoints; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/AggregationEntity.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/AggregationEntity.java deleted file mode 100644 index b2958b1..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/AggregationEntity.java +++ /dev/null @@ -1,146 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -import java.util.HashMap; -import java.util.Map; - -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.util.NodeUtils; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - -/** - * The Class AggregationEntity. - */ -public class AggregationEntity extends IndexableEntity implements IndexDocument { - private Map attributes = new HashMap(); - protected ObjectMapper mapper = new ObjectMapper(); - - /** - * Instantiates a new aggregation entity. - */ - public AggregationEntity() { - super(); - } - - /** - * Instantiates a new aggregation entity. - * - * @param loader the loader - */ - public AggregationEntity(OxmModelLoader loader) { - super(loader); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.entity.IndexDocument#deriveFields() - */ - @Override - public void deriveFields() { - - /* - * We'll try and create a unique identity key that we can use for differencing the previously - * imported record sets as we won't have granular control of what is created/removed and when. - * The best we can hope for is identification of resources by generated Id until the - * Identity-Service UUID is tagged against all resources, then we can use that instead. - */ - this.id = NodeUtils.generateUniqueShaDigest(link); - } - - public void copyAttributeKeyValuePair(Map map) { - for (String key : map.keySet()) { - if (!key.equalsIgnoreCase("relationship-list")) { // ignore relationship data which is not - // required in aggregation - this.attributes.put(key, map.get(key).toString()); // not sure if entity attribute can - // contain an object as value - } - } - } - - public void addAttributeKeyValuePair(String key, String value) { - this.attributes.put(key, value); - } - - @Override - public String getIndexDocumentJson() { - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("link", this.getLink()); - rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); - for (String key : this.attributes.keySet()) { - rootNode.put(key, this.attributes.get(key)); - } - return rootNode.toString(); - } - - /** - * @return the attributes - */ - public Map getAttributes() { - return attributes; - } - - /** - * @param attributes the attributes to set - */ - public void setAttributes(Map attributes) { - this.attributes = attributes; - } - - /** - * @return the mapper - */ - public ObjectMapper getMapper() { - return mapper; - } - - /** - * @param mapper the mapper to set - */ - public void setMapper(ObjectMapper mapper) { - this.mapper = mapper; - } - - @Override - public ObjectNode getBulkImportEntity() { - // TODO Auto-generated method stub - return null; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "") - + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " - : "") - + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") - + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") + "]"; - } -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/AggregationSuggestionEntity.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/AggregationSuggestionEntity.java deleted file mode 100644 index 412798e..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/AggregationSuggestionEntity.java +++ /dev/null @@ -1,119 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - -import java.util.ArrayList; -import java.util.List; - -import org.json.JSONArray; -import org.json.JSONObject; -import org.onap.aai.sparky.util.NodeUtils; - -public class AggregationSuggestionEntity extends IndexableEntity implements IndexDocument { - - private List inputs = new ArrayList(); - - /** - * @return the inputs - */ - public List getInputs() { - return inputs; - } - - /** - * @param inputs the inputs to set - */ - public void setInputs(List inputs) { - this.inputs = inputs; - } - - /** - * @return the mapper - */ - public ObjectMapper getMapper() { - return mapper; - } - - /** - * @param mapper the mapper to set - */ - public void setMapper(ObjectMapper mapper) { - this.mapper = mapper; - } - - /** - * @return the outputString - */ - public String getOutputString() { - return outputString; - } - - private final String outputString = "VNFs"; - protected ObjectMapper mapper = new ObjectMapper(); - - public AggregationSuggestionEntity() { - super(); - inputs.add("VNFs"); - inputs.add("generic-vnfs"); - } - - @Override - public void deriveFields() { - this.id = NodeUtils.generateUniqueShaDigest(this.outputString); - } - - @Override - public String getIndexDocumentJson() { - - JSONArray inputArray = new JSONArray(); - for (String input : inputs) { - input = input.replace(",", ""); - input = input.replace("[", ""); - input = input.replace("]", ""); - inputArray.put(input); - } - - JSONObject entitySuggest = new JSONObject(); - entitySuggest.put("input", inputArray); - entitySuggest.put("output", this.outputString); - entitySuggest.put("weight", 100); - - JSONObject payloadNode = new JSONObject(); - entitySuggest.put("payload", payloadNode); - - JSONObject rootNode = new JSONObject(); - rootNode.put("entity_suggest", entitySuggest); - - return rootNode.toString(); - } - - @Override - public ObjectNode getBulkImportEntity() { - // TODO Auto-generated method stub - return null; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexDocument.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexDocument.java deleted file mode 100644 index 0633da4..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexDocument.java +++ /dev/null @@ -1,42 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -import com.fasterxml.jackson.databind.node.ObjectNode; - -/** - * The Interface IndexDocument. - */ -public interface IndexDocument { - - /** - * Derive fields. - */ - public void deriveFields(); - - public String getIndexDocumentJson(); - - public String getId(); - - public ObjectNode getBulkImportEntity(); -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexableCrossEntityReference.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexableCrossEntityReference.java deleted file mode 100644 index 3c454f6..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexableCrossEntityReference.java +++ /dev/null @@ -1,163 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -import java.util.ArrayList; - -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.util.NodeUtils; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - - -/** - * The Class IndexableCrossEntityReference. - */ - -public class IndexableCrossEntityReference extends IndexableEntity implements IndexDocument { - - protected String crossReferenceEntityValues; - - /** - * @return the crossReferenceEntityValues - */ - public String getCrossReferenceEntityValues() { - return crossReferenceEntityValues; - } - - /** - * @param crossReferenceEntityValues the crossReferenceEntityValues to set - */ - public void setCrossReferenceEntityValues(String crossReferenceEntityValues) { - this.crossReferenceEntityValues = crossReferenceEntityValues; - } - - /** - * @return the crossEntityReferenceCollection - */ - public ArrayList getCrossEntityReferenceCollection() { - return crossEntityReferenceCollection; - } - - /** - * @param crossEntityReferenceCollection the crossEntityReferenceCollection to set - */ - public void setCrossEntityReferenceCollection(ArrayList crossEntityReferenceCollection) { - this.crossEntityReferenceCollection = crossEntityReferenceCollection; - } - - /** - * @return the mapper - */ - public ObjectMapper getMapper() { - return mapper; - } - - /** - * @param mapper the mapper to set - */ - public void setMapper(ObjectMapper mapper) { - this.mapper = mapper; - } - - protected ArrayList crossEntityReferenceCollection = new ArrayList(); - protected ObjectMapper mapper = new ObjectMapper(); - - /** - * Instantiates a new indexable cross entity reference. - */ - public IndexableCrossEntityReference() { - super(); - } - - /** - * Instantiates a new indexable cross entity reference. - * - * @param loader the loader - */ - public IndexableCrossEntityReference(OxmModelLoader loader) { - super(loader); - } - - /** - * Adds the cross entity reference value. - * - * @param crossEntityReferenceValue the cross entity reference value - */ - public void addCrossEntityReferenceValue(String crossEntityReferenceValue) { - if (!crossEntityReferenceCollection.contains(crossEntityReferenceValue)) { - crossEntityReferenceCollection.add(crossEntityReferenceValue); - } - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.entity.IndexDocument#deriveFields() - */ - @Override - public void deriveFields() { - this.id = NodeUtils.generateUniqueShaDigest(link); - this.crossReferenceEntityValues = NodeUtils.concatArray(crossEntityReferenceCollection, ";"); - } - - @Override - public String getIndexDocumentJson() { - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("entityType", this.getEntityType()); - rootNode.put("entityPrimaryKeyValue", this.getEntityPrimaryKeyValue()); - rootNode.put("crossEntityReferenceValues", crossReferenceEntityValues); - rootNode.put("link", link); - rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); - return rootNode.toString(); - } - - @Override - public ObjectNode getBulkImportEntity() { - // TODO Auto-generated method stub - return null; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "IndexableCrossEntityReference [" - + (crossReferenceEntityValues != null - ? "crossReferenceEntityValues=" + crossReferenceEntityValues + ", " : "") - + (crossEntityReferenceCollection != null - ? "crossEntityReferenceCollection=" + crossEntityReferenceCollection + ", " : "") - + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") - + (entityType != null ? "entityType=" + entityType + ", " : "") - + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " - : "") - + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") - + (link != null ? "link=" + link + ", " : "") + (loader != null ? "loader=" + loader : "") - + "]"; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexableEntity.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexableEntity.java deleted file mode 100644 index deeac35..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/IndexableEntity.java +++ /dev/null @@ -1,139 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -import java.sql.Timestamp; -import java.text.SimpleDateFormat; - -import org.onap.aai.sparky.config.oxm.OxmModelLoader; - -/** - * The Class IndexableEntity. - */ -public abstract class IndexableEntity { - protected String id; // generated, SHA-256 digest - protected String entityType; - protected String entityPrimaryKeyValue; - protected String lastmodTimestamp; - - /** - * @return the lastmodTimestamp - */ - public String getLastmodTimestamp() { - return lastmodTimestamp; - } - - /** - * @param lastmodTimestamp the lastmodTimestamp to set - */ - public void setLastmodTimestamp(String lastmodTimestamp) { - this.lastmodTimestamp = lastmodTimestamp; - } - - /** - * @return the loader - */ - public OxmModelLoader getLoader() { - return loader; - } - - /** - * @param loader the loader to set - */ - public void setLoader(OxmModelLoader loader) { - this.loader = loader; - } - - /** - * @return the timestampFormat - */ - public static String getTimestampFormat() { - return TIMESTAMP_FORMAT; - } - - protected String link; - protected OxmModelLoader loader; - - private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; - - /** - * Instantiates a new indexable entity. - */ - public IndexableEntity() { - SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT); - Timestamp timestamp = new Timestamp(System.currentTimeMillis()); - String currentFormattedTimeStamp = dateFormat.format(timestamp); - this.setEntityTimeStamp(currentFormattedTimeStamp); - } - - /** - * Instantiates a new indexable entity. - * - * @param loader the loader - */ - public IndexableEntity(OxmModelLoader loader) { - this(); - this.loader = loader; - } - - public String getId() { - return id; - } - - public String getEntityType() { - return entityType; - } - - public String getEntityPrimaryKeyValue() { - return entityPrimaryKeyValue; - } - - public String getEntityTimeStamp() { - return lastmodTimestamp; - } - - public void setId(String id) { - this.id = id; - } - - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - public void setEntityPrimaryKeyValue(String fieldValue) { - this.entityPrimaryKeyValue = fieldValue; - } - - public void setEntityTimeStamp(String lastmodTimestamp) { - this.lastmodTimestamp = lastmodTimestamp; - } - - public String getLink() { - return link; - } - - public void setLink(String link) { - this.link = link; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/MergableEntity.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/MergableEntity.java deleted file mode 100644 index f998872..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/MergableEntity.java +++ /dev/null @@ -1,68 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.annotation.JsonAnySetter; - -import java.util.HashMap; -import java.util.Map; - -/** - * The Class MergableEntity. - */ -public class MergableEntity { - private Map other = new HashMap(); - - /** - * @param other the other to set - */ - public void setOther(Map other) { - this.other = other; - } - - /** - * Any. - * - * @return the map - */ - @JsonAnyGetter - public Map any() { - return other; - } - - public Map getOther() { - return other; - } - - /** - * Sets the. - * - * @param name the name - * @param value the value - */ - @JsonAnySetter - public void set(String name, String value) { - other.put(name, value); - } -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/ObjectIdCollection.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/ObjectIdCollection.java deleted file mode 100644 index 158cb1d..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/ObjectIdCollection.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -import java.util.Collection; -import java.util.List; -import java.util.concurrent.ConcurrentHashMap; - -/** - * The Class ObjectIdCollection. - */ -public class ObjectIdCollection { - - protected ConcurrentHashMap importedObjectIds = - new ConcurrentHashMap(); - - /** - * @return the importedObjectIds - */ - public ConcurrentHashMap getImportedObjectIds() { - return importedObjectIds; - } - - /** - * @param importedObjectIds the importedObjectIds to set - */ - public void setImportedObjectIds(ConcurrentHashMap importedObjectIds) { - this.importedObjectIds = importedObjectIds; - } - - public Collection getImportedObjectIdsAsValues() { - return importedObjectIds.values(); - } - - /** - * Adds the object id. - * - * @param id the id - */ - public void addObjectId(String id) { - importedObjectIds.putIfAbsent(id, id); - } - - public int getSize() { - return importedObjectIds.values().size(); - } - - /** - * Adds the all. - * - * @param items the items - */ - public void addAll(List items) { - if (items == null) { - return; - } - - items.stream().forEach((item) -> { - importedObjectIds.putIfAbsent(item, item); - }); - - } - - /** - * Clear. - */ - public void clear() { - importedObjectIds.clear(); - } -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/SearchableEntity.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/SearchableEntity.java deleted file mode 100644 index 08a80ea..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/SearchableEntity.java +++ /dev/null @@ -1,196 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - -import java.util.ArrayList; -import java.util.List; - -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.util.NodeUtils; - -/** - * The Class SearchableEntity. - */ -public class SearchableEntity extends IndexableEntity implements IndexDocument { - protected List searchTagCollection = new ArrayList(); - - /** - * @return the mapper - */ - public ObjectMapper getMapper() { - return mapper; - } - - /** - * @param mapper the mapper to set - */ - public void setMapper(ObjectMapper mapper) { - this.mapper = mapper; - } - - /** - * @param searchTagCollection the searchTagCollection to set - */ - public void setSearchTagCollection(List searchTagCollection) { - this.searchTagCollection = searchTagCollection; - } - - /** - * @param searchTagIdCollection the searchTagIdCollection to set - */ - public void setSearchTagIdCollection(List searchTagIdCollection) { - this.searchTagIdCollection = searchTagIdCollection; - } - - /** - * @param searchTags the searchTags to set - */ - public void setSearchTags(String searchTags) { - this.searchTags = searchTags; - } - - /** - * @param searchTagIDs the searchTagIDs to set - */ - public void setSearchTagIDs(String searchTagIDs) { - this.searchTagIDs = searchTagIDs; - } - - protected List searchTagIdCollection = new ArrayList(); - protected ObjectMapper mapper = new ObjectMapper(); - - /** - * Instantiates a new searchable entity. - */ - public SearchableEntity() { - super(); - } - - /** - * Instantiates a new searchable entity. - * - * @param loader the loader - */ - public SearchableEntity(OxmModelLoader loader) { - super(loader); - } - - /* - * Generated fields, leave the settings for junit overrides - */ - protected String searchTags; // generated based on searchTagCollection values - protected String searchTagIDs; - - /** - * Generates the sha based id. - */ - public void generateId() { - this.id = NodeUtils.generateUniqueShaDigest(link); - } - - /* - * (non-Javadoc) - * - * @see org.onap.aai.sparky.synchronizer.entity.IndexDocument#deriveFields() - */ - @Override - public void deriveFields() { - - /* - * We'll try and create a unique identity key that we can use for differencing the previously - * imported record sets as we won't have granular control of what is created/removed and when. - * The best we can hope for is identification of resources by generated Id until the - * Identity-Service UUID is tagged against all resources, then we can use that instead. - */ - generateId(); - this.searchTags = NodeUtils.concatArray(searchTagCollection, ";"); - this.searchTagIDs = NodeUtils.concatArray(this.searchTagIdCollection, ";"); - } - - /** - * Adds the search tag with key. - * - * @param searchTag the search tag - * @param searchTagKey the key associated with the search tag (key:value) - */ - public void addSearchTagWithKey(String searchTag, String searchTagKey) { - searchTagIdCollection.add(searchTagKey); - searchTagCollection.add(searchTag); - } - - public List getSearchTagCollection() { - return searchTagCollection; - } - - public String getSearchTags() { - return searchTags; - } - - public String getSearchTagIDs() { - return searchTagIDs; - } - - public List getSearchTagIdCollection() { - return searchTagIdCollection; - } - - @Override - public String getIndexDocumentJson() { - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("entityType", this.getEntityType()); - rootNode.put("entityPrimaryKeyValue", this.getEntityPrimaryKeyValue()); - rootNode.put("searchTagIDs", this.getSearchTagIDs()); - rootNode.put("searchTags", this.getSearchTags()); - rootNode.put("link", this.getLink()); - rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); - return rootNode.toString(); - } - - @Override - public ObjectNode getBulkImportEntity() { - // TODO Auto-generated method stub - return null; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "") - + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " - : "") - + (searchTagCollection != null ? "searchTagCollection=" + searchTagCollection + ", " : "") - + (searchTagIdCollection != null ? "searchTagIDCollection=" + searchTagIdCollection + ", " - : "") - + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") - + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") - + (searchTags != null ? "searchTags=" + searchTags + ", " : "") - + (searchTagIDs != null ? "searchTagIDs=" + searchTagIDs : "") + "]"; - } -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/SelfLinkDescriptor.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/SelfLinkDescriptor.java deleted file mode 100644 index 20e59ef..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/SelfLinkDescriptor.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -/** - * The Class SelfLinkDescriptor. - */ -public class SelfLinkDescriptor { - private String selfLink; - private String entityType; - private String depthModifier; - - public String getDepthModifier() { - return depthModifier; - } - - public void setDepthModifier(String depthModifier) { - this.depthModifier = depthModifier; - } - - public String getSelfLink() { - return selfLink; - } - - public void setSelfLink(String selfLink) { - this.selfLink = selfLink; - } - - public String getEntityType() { - return entityType; - } - - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - public SelfLinkDescriptor(String selfLink) { - this(selfLink, null, null); - } - - /** - * Instantiates a new self link descriptor. - * - * @param selfLink the self link - * @param entityType the entity type - */ - public SelfLinkDescriptor(String selfLink, String entityType) { - this(selfLink, null, entityType); - } - - public SelfLinkDescriptor(String selfLink, String depthModifier, String entityType) { - this.selfLink = selfLink; - this.entityType = entityType; - this.depthModifier = depthModifier; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "SelfLinkDescriptor [" + (selfLink != null ? "selfLink=" + selfLink + ", " : "") - + (entityType != null ? "entityType=" + entityType + ", " : "") - + (depthModifier != null ? "depthModifier=" + depthModifier : "") + "]"; - } - -} - diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/SuggestionSearchEntity.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/SuggestionSearchEntity.java deleted file mode 100644 index d699031..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/SuggestionSearchEntity.java +++ /dev/null @@ -1,349 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ObjectNode; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.json.JSONArray; -import org.json.JSONObject; -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.util.NodeUtils; - -public class SuggestionSearchEntity extends IndexableEntity implements IndexDocument { - - private String entityType; - private List suggestionConnectorWords = new ArrayList(); - private List suggestionAttributeTypes = new ArrayList(); - - /** - * @return the suggestionAttributeTypes - */ - public List getSuggestionAttributeTypes() { - return suggestionAttributeTypes; - } - - /** - * @param suggestionAttributeTypes the suggestionAttributeTypes to set - */ - public void setSuggestionAttributeTypes(List suggestionAttributeTypes) { - this.suggestionAttributeTypes = suggestionAttributeTypes; - } - - /** - * @return the suggestionTypeAliases - */ - public List getSuggestionTypeAliases() { - return suggestionTypeAliases; - } - - /** - * @param suggestionTypeAliases the suggestionTypeAliases to set - */ - public void setSuggestionTypeAliases(List suggestionTypeAliases) { - this.suggestionTypeAliases = suggestionTypeAliases; - } - - /** - * @return the suggestableAttr - */ - public List getSuggestableAttr() { - return suggestableAttr; - } - - /** - * @param suggestableAttr the suggestableAttr to set - */ - public void setSuggestableAttr(List suggestableAttr) { - this.suggestableAttr = suggestableAttr; - } - - /** - * @return the outputString - */ - public StringBuffer getOutputString() { - return outputString; - } - - /** - * @param outputString the outputString to set - */ - public void setOutputString(StringBuffer outputString) { - this.outputString = outputString; - } - - /** - * @return the mapper - */ - public ObjectMapper getMapper() { - return mapper; - } - - /** - * @param mapper the mapper to set - */ - public void setMapper(ObjectMapper mapper) { - this.mapper = mapper; - } - - - private List suggestionAttributeValues = new ArrayList(); - private List suggestionTypeAliases = new ArrayList(); - private List suggestionInputPermutations = new ArrayList(); - private List suggestableAttr = new ArrayList(); - private Map payload = new HashMap(); - private JSONObject payloadJsonNode = new JSONObject(); - private StringBuffer outputString = new StringBuffer(); - private String aliasToUse; - - public Map getPayload() { - return payload; - } - - public void setPayload(Map payload) { - this.payload = payload; - } - - - public JSONObject getPayloadJsonNode() { - return payloadJsonNode; - } - - public void setPayloadJsonNode(JSONObject payloadJsonNode) { - this.payloadJsonNode = payloadJsonNode; - } - - - protected ObjectMapper mapper = new ObjectMapper(); - - public SuggestionSearchEntity() { - super(); - } - - public void setSuggestableAttr(ArrayList attributes) { - for (String attribute : attributes) { - this.suggestableAttr.add(attribute); - } - } - - public void setPayloadFromResponse(JsonNode node) { - Map nodePayload = new HashMap(); - if (suggestableAttr != null) { - for (String attribute : suggestableAttr) { - if (node.get(attribute) != null) { - nodePayload.put(attribute, node.get(attribute).asText()); - } - } - this.setPayload(nodePayload); - } - } - - - public SuggestionSearchEntity(OxmModelLoader loader) { - super(loader); - } - - @Override - public String getEntityType() { - return entityType; - } - - @Override - public void setEntityType(String entityType) { - this.entityType = entityType; - } - - public List getSuggestionConnectorWords() { - return suggestionConnectorWords; - } - - public void setSuggestionConnectorWords(List suggestionConnectorWords) { - this.suggestionConnectorWords = suggestionConnectorWords; - } - - public List getSuggestionPropertyTypes() { - return this.suggestionAttributeTypes; - } - - public void setSuggestionPropertyTypes(List suggestionPropertyTypes) { - this.suggestionAttributeTypes = suggestionPropertyTypes; - } - - public List getSuggestionAttributeValues() { - return this.suggestionAttributeValues; - } - - public void setSuggestionAttributeValues(List suggestionAttributeValues) { - this.suggestionAttributeValues = suggestionAttributeValues; - } - - public List getSuggestionAliases() { - return this.suggestionTypeAliases; - } - - public void setSuggestionAliases(List suggestionAliases) { - this.suggestionTypeAliases = suggestionAliases; - } - - public List getSuggestionInputPermutations() { - return this.suggestionInputPermutations; - } - - public void setSuggestionInputPermutations(List permutations) { - this.suggestionInputPermutations = permutations; - } - - public void generateSuggestionInputPermutations() { - - - List entityNames = new ArrayList<>(); - entityNames.add(entityType); - HashMap desc = loader.getOxmModel().get(this.entityType); - String attr = desc.get("suggestionAliases"); - String[] suggestionAliasesArray = attr.split(","); - suggestionTypeAliases = Arrays.asList(suggestionAliasesArray); - this.setAliasToUse(suggestionAliasesArray[suggestionAliasesArray.length - 1]); - for (String alias : suggestionTypeAliases) { - entityNames.add(alias); - } - ArrayList listOfSearchSuggestionPermutations = new ArrayList<>(); - - ArrayList listToPermutate = new ArrayList<>(payload.values()); - - for (String entityName : entityNames) { - listToPermutate.add(entityName); - permutateList(listToPermutate, new ArrayList(), listToPermutate.size(), - listOfSearchSuggestionPermutations); - listToPermutate.remove(entityName); - } - suggestionInputPermutations = listOfSearchSuggestionPermutations; - } - - /** - * Generate all permutations of a list of Strings - * - * @param list - * @param permutation - * @param size - */ - private void permutateList(List list, List permutation, int size, - List listOfSearchSuggestionPermutationList) { - if (permutation.size() == size) { - StringBuilder newPermutation = new StringBuilder(); - - for (int i = 0; i < permutation.size(); i++) { - newPermutation.append(permutation.get(i)).append(" "); - } - - listOfSearchSuggestionPermutationList.add(newPermutation.toString().trim()); - - return; - } - - String[] availableItems = list.toArray(new String[0]); - - for (String i : availableItems) { - permutation.add(i); - list.remove(i); - permutateList(list, permutation, size, listOfSearchSuggestionPermutationList); - list.add(i); - permutation.remove(i); - } - } - - public boolean isSuggestableDoc() { - return this.getPayload().size() != 0; - } - - - @Override - public void deriveFields() { - - int payloadEntryCounter = 1; - for (Map.Entry payload : getPayload().entrySet()) { - // Add the payload(status) only if a valid value is present - if (payload.getValue() != null && payload.getValue().length() > 0) { - this.getPayloadJsonNode().put(payload.getKey(), payload.getValue()); - this.outputString.append(payload.getValue()); - if (payloadEntryCounter < getPayload().entrySet().size()) { - this.outputString.append(" and "); - } else { - this.outputString.append(" "); - } - } - payloadEntryCounter++; - } - - this.outputString.append(this.getAliasToUse()); - this.id = NodeUtils.generateUniqueShaDigest(outputString.toString()); - } - - @Override - public String getIndexDocumentJson() { - // TODO Auto-generated method stub - JSONObject rootNode = new JSONObject(); - - JSONArray suggestionsArray = new JSONArray(); - for (String suggestion : suggestionInputPermutations) { - suggestionsArray.put(suggestion); - } - - JSONObject entitySuggest = new JSONObject(); - - entitySuggest.put("input", suggestionsArray); - entitySuggest.put("output", this.outputString); - entitySuggest.put("payload", this.payloadJsonNode); - rootNode.put("entity_suggest", entitySuggest); - - return rootNode.toString(); - } - - @Override - public ObjectNode getBulkImportEntity() { - // TODO Auto-generated method stub - return null; - } - - public String getAliasToUse() { - return aliasToUse; - } - - public void setAliasToUse(String aliasToUse) { - this.aliasToUse = aliasToUse; - } - - @Override - public String toString() { - return "SuggestionSearchEntity [entityType=" + entityType + ", suggestionConnectorWords=" - + suggestionConnectorWords + ", suggestionAttributeTypes=" + suggestionAttributeTypes - + ", suggestionAttributeValues=" + suggestionAttributeValues + ", suggestionTypeAliases=" - + suggestionTypeAliases + ", mapper=" + mapper + "]"; - } -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/entity/TransactionStorageType.java b/src/main/java/org/onap/aai/sparky/synchronizer/entity/TransactionStorageType.java deleted file mode 100644 index 635281e..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/entity/TransactionStorageType.java +++ /dev/null @@ -1,69 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.entity; - -/** - * The Enum TransactionStorageType. - */ -public enum TransactionStorageType { - EDGE_TAG_QUERY(0, "aaiOffline/edge-tag-query"), ACTIVE_INVENTORY_QUERY(1, - "aaiOffline/active-inventory-query"); - - private Integer index; - - /** - * @param index the index to set - */ - public void setIndex(Integer index) { - this.index = index; - } - - /** - * @param outputFolder the outputFolder to set - */ - public void setOutputFolder(String outputFolder) { - this.outputFolder = outputFolder; - } - - private String outputFolder; - - /** - * Instantiates a new transaction storage type. - * - * @param index the index - * @param outputFolder the output folder - */ - TransactionStorageType(Integer index, String outputFolder) { - this.index = index; - this.outputFolder = outputFolder; - } - - public Integer getIndex() { - return index; - } - - public String getOutputFolder() { - return outputFolder; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/enumeration/OperationState.java b/src/main/java/org/onap/aai/sparky/synchronizer/enumeration/OperationState.java deleted file mode 100644 index 87d1b88..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/enumeration/OperationState.java +++ /dev/null @@ -1,30 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.enumeration; - -/** - * The Enum OperationState. - */ -public enum OperationState { - INIT, OK, ERROR, ABORT, PENDING -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/enumeration/SynchronizerState.java b/src/main/java/org/onap/aai/sparky/synchronizer/enumeration/SynchronizerState.java deleted file mode 100644 index 0ce5f70..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/enumeration/SynchronizerState.java +++ /dev/null @@ -1,30 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.enumeration; - -/** - * The Enum SynchronizerState. - */ -public enum SynchronizerState { - IDLE, PERFORMING_SYNCHRONIZATION -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/filter/ElasticSearchSynchronizerFilter.java b/src/main/java/org/onap/aai/sparky/synchronizer/filter/ElasticSearchSynchronizerFilter.java deleted file mode 100644 index ef199f7..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/filter/ElasticSearchSynchronizerFilter.java +++ /dev/null @@ -1,136 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.filter; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; - -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.SyncHelper; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; -import org.onap.aai.cl.mdc.MdcContext; - -/* - * This is a wire-frame for an experiment to get the jetty filter-lifecyle initialization method to - * setup a scheduled thread executor with an ElasticSearchSynchronization task, which (I'm hoping) - * will allow us to do periodic ES <=> AAI synchronization. - * - * Alternatively, if the embedded java approach doesn't work we could try instead to do a - * System.exec( "perl refreshElasticSearchInstance.pl"). We have two options, I'm hoping the - * embedded options will work for us. - */ - -/** - * The Class ElasticSearchSynchronizerFilter. - */ -public class ElasticSearchSynchronizerFilter implements Filter { - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(ElasticSearchSynchronizerFilter.class); - - private SyncHelper syncHelper; - - /* - * (non-Javadoc) - * - * @see javax.servlet.Filter#destroy() - */ - @Override - public void destroy() { - - if (syncHelper != null) { - syncHelper.shutdown(); - } - } - - /* - * (non-Javadoc) - * - * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, - * javax.servlet.FilterChain) - */ - @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) - throws IOException, ServletException { - - /* - * However, we will setup the filtermap with a url that should never get it, so we shouldn't - * ever be in here. - */ - - chain.doFilter(request, response); - } - - /* - * (non-Javadoc) - * - * @see javax.servlet.Filter#init(javax.servlet.FilterConfig) - */ - @Override - public void init(FilterConfig filterConfig) throws ServletException { - String txnID = NodeUtils.getRandomTxnId(); - MdcContext.initialize(txnID, "ElasticSearchSynchronizerFilter", "", "Init", ""); - - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "init()"); - - try { - new SyncHelper(OxmModelLoader.getInstance()); - } catch (Exception exc) { - throw new ServletException("Caught an exception while initializing filter", exc); - } - - } - - /** - * @return the syncHelper - */ - public SyncHelper getSyncHelper() { - return syncHelper; - } - - /** - * @param syncHelper the syncHelper to set - */ - public void setSyncHelper(SyncHelper syncHelper) { - this.syncHelper = syncHelper; - } - - /** - * @return the log - */ - public static Logger getLog() { - return LOG; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/task/CollectEntitySelfLinkTask.java b/src/main/java/org/onap/aai/sparky/synchronizer/task/CollectEntitySelfLinkTask.java deleted file mode 100644 index b12a1d9..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/task/CollectEntitySelfLinkTask.java +++ /dev/null @@ -1,104 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.task; - -import java.util.function.Supplier; - -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider; -import org.onap.aai.sparky.dal.rest.OperationResult; - -/** - * The Class CollectEntitySelfLinkTask. - */ -public class CollectEntitySelfLinkTask implements Supplier { - - private NetworkTransaction txn; - - private ActiveInventoryDataProvider provider; - - /** - * Instantiates a new collect entity self link task. - * - * @param txn the txn - * @param provider the provider - */ - public CollectEntitySelfLinkTask(NetworkTransaction txn, ActiveInventoryDataProvider provider) { - this.txn = txn; - this.provider = provider; - } - - /* - * (non-Javadoc) - * - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - - txn.setTaskAgeInMs(); - - long startTimeInMs = System.currentTimeMillis(); - OperationResult result = null; - try { - result = provider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); - } catch (Exception exc) { - result = new OperationResult(500, - "Caught an exception while trying to resolve link = " + exc.getMessage()); - } finally { - result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - txn.setOperationResult(result); - } - - return txn; - } - - /** - * @return the txn - */ - public NetworkTransaction getTxn() { - return txn; - } - - /** - * @param txn the txn to set - */ - public void setTxn(NetworkTransaction txn) { - this.txn = txn; - } - - /** - * @return the provider - */ - public ActiveInventoryDataProvider getProvider() { - return provider; - } - - /** - * @param provider the provider to set - */ - public void setProvider(ActiveInventoryDataProvider provider) { - this.provider = provider; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/task/CollectEntityTypeSelfLinksTask.java b/src/main/java/org/onap/aai/sparky/synchronizer/task/CollectEntityTypeSelfLinksTask.java deleted file mode 100644 index 712a2e3..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/task/CollectEntityTypeSelfLinksTask.java +++ /dev/null @@ -1,105 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.task; - -import java.util.function.Supplier; - -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider; -import org.onap.aai.sparky.dal.rest.OperationResult; - -/** - * The Class CollectEntityTypeSelfLinksTask. - */ -public class CollectEntityTypeSelfLinksTask implements Supplier { - - private ActiveInventoryDataProvider aaiProvider; - - private NetworkTransaction txn; - - /** - * Instantiates a new collect entity type self links task. - * - * @param txn the txn - * @param provider the provider - */ - public CollectEntityTypeSelfLinksTask(NetworkTransaction txn, - ActiveInventoryDataProvider provider) { - this.aaiProvider = provider; - this.txn = txn; - } - - /* - * (non-Javadoc) - * - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - - txn.setTaskAgeInMs(); - - long startTimeInMs = System.currentTimeMillis(); - OperationResult result = null; - try { - result = aaiProvider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); - } catch (Exception exc) { - result = new OperationResult(500, - "Caught an exception while trying to resolve link = " + exc.getMessage()); - } finally { - result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - txn.setOperationResult(result); - } - - return txn; - } - - /** - * @return the aaiProvider - */ - public ActiveInventoryDataProvider getAaiProvider() { - return aaiProvider; - } - - /** - * @param aaiProvider the aaiProvider to set - */ - public void setAaiProvider(ActiveInventoryDataProvider aaiProvider) { - this.aaiProvider = aaiProvider; - } - - /** - * @return the txn - */ - public NetworkTransaction getTxn() { - return txn; - } - - /** - * @param txn the txn to set - */ - public void setTxn(NetworkTransaction txn) { - this.txn = txn; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/task/GetCrossEntityReferenceEntityTask.java b/src/main/java/org/onap/aai/sparky/synchronizer/task/GetCrossEntityReferenceEntityTask.java deleted file mode 100644 index 8c1e0b7..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/task/GetCrossEntityReferenceEntityTask.java +++ /dev/null @@ -1,105 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.task; - -import java.util.function.Supplier; - -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider; -import org.onap.aai.sparky.dal.rest.OperationResult; - -/** - * The Class GetCrossEntityReferenceEntityTask. - */ -public class GetCrossEntityReferenceEntityTask implements Supplier { - - private NetworkTransaction txn; - - private ActiveInventoryDataProvider provider; - - /** - * Instantiates a new gets the cross entity reference entity task. - * - * @param txn the txn - * @param provider the provider - */ - public GetCrossEntityReferenceEntityTask(NetworkTransaction txn, - ActiveInventoryDataProvider provider) { - this.txn = txn; - this.provider = provider; - } - - /* - * (non-Javadoc) - * - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - - txn.setTaskAgeInMs(); - - long startTimeInMs = System.currentTimeMillis(); - OperationResult result = null; - try { - result = provider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); - } catch (Exception exc) { - result = new OperationResult(500, - "Caught an exception while trying to resolve link = " + exc.getMessage()); - } finally { - result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - txn.setOperationResult(result); - } - - return txn; - } - - /** - * @return the txn - */ - public NetworkTransaction getTxn() { - return txn; - } - - /** - * @param txn the txn to set - */ - public void setTxn(NetworkTransaction txn) { - this.txn = txn; - } - - /** - * @return the provider - */ - public ActiveInventoryDataProvider getProvider() { - return provider; - } - - /** - * @param provider the provider to set - */ - public void setProvider(ActiveInventoryDataProvider provider) { - this.provider = provider; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformActiveInventoryRetrieval.java b/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformActiveInventoryRetrieval.java deleted file mode 100644 index 33d3610..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformActiveInventoryRetrieval.java +++ /dev/null @@ -1,151 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.task; - -import java.util.Map; -import java.util.function.Supplier; - -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.slf4j.MDC; - -/* - * Consider abstraction the tasks into common elemnts, because most of them repeat a generic call - * flow pattern - */ - -/** - * The Class PerformActiveInventoryRetrieval. - */ -public class PerformActiveInventoryRetrieval implements Supplier { - - private static Logger logger = LoggerFactory.getLogger(PerformActiveInventoryRetrieval.class); - - private NetworkTransaction txn; - private ActiveInventoryDataProvider aaiProvider; - private Map contextMap; - - /** - * Instantiates a new perform active inventory retrieval. - * - * @param txn the txn - * @param aaiProvider the aai provider - */ - public PerformActiveInventoryRetrieval(NetworkTransaction txn, - ActiveInventoryDataProvider aaiProvider) { - this.txn = txn; - this.aaiProvider = aaiProvider; - this.contextMap = MDC.getCopyOfContextMap(); - } - - /* - * (non-Javadoc) - * - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - - txn.setTaskAgeInMs(); - - long startTimeInMs = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - OperationResult result = null; - try { - // todo: use proper config instead of hard-coding parameters - final String absoluteSelfLink = - ActiveInventoryConfig.getConfig().repairSelfLink(txn.getLink()); - result = aaiProvider.queryActiveInventoryWithRetries(absoluteSelfLink, "application/json", 5); - } catch (Exception exc) { - logger.error("Failure to resolve self link from AAI. Error = ", exc); - result = new OperationResult(500, - "Caught an exception while trying to resolve link = " + exc.getMessage()); - } finally { - result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - txn.setOperationResult(result); - } - - return txn; - } - - /** - * @return the logger - */ - public static Logger getLogger() { - return logger; - } - - /** - * @param logger the logger to set - */ - public static void setLogger(Logger logger) { - PerformActiveInventoryRetrieval.logger = logger; - } - - /** - * @return the txn - */ - public NetworkTransaction getTxn() { - return txn; - } - - /** - * @param txn the txn to set - */ - public void setTxn(NetworkTransaction txn) { - this.txn = txn; - } - - /** - * @return the aaiProvider - */ - public ActiveInventoryDataProvider getAaiProvider() { - return aaiProvider; - } - - /** - * @param aaiProvider the aaiProvider to set - */ - public void setAaiProvider(ActiveInventoryDataProvider aaiProvider) { - this.aaiProvider = aaiProvider; - } - - /** - * @return the contextMap - */ - public Map getContextMap() { - return contextMap; - } - - /** - * @param contextMap the contextMap to set - */ - public void setContextMap(Map contextMap) { - this.contextMap = contextMap; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchPut.java b/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchPut.java deleted file mode 100644 index e1bbd56..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchPut.java +++ /dev/null @@ -1,140 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.task; - -import java.util.Map; -import java.util.function.Supplier; - -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.rest.RestDataProvider; -import org.slf4j.MDC; - -/** - * The Class PerformElasticSearchPut. - */ -public class PerformElasticSearchPut implements Supplier { - - private RestDataProvider restDataProvider; - private String jsonPayload; - private NetworkTransaction txn; - private Map contextMap; - - /** - * Instantiates a new perform elastic search put. - * - * @param jsonPayload the json payload - * @param txn the txn - * @param restDataProvider the rest data provider - */ - public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn, - RestDataProvider restDataProvider) { - this.jsonPayload = jsonPayload; - this.txn = txn; - this.restDataProvider = restDataProvider; - this.contextMap = MDC.getCopyOfContextMap(); - } - - public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn, - RestDataProvider restDataProvider, Map contextMap) { - this.jsonPayload = jsonPayload; - this.txn = txn; - this.restDataProvider = restDataProvider; - this.contextMap = contextMap; - } - - /* - * (non-Javadoc) - * - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - txn.setTaskAgeInMs(); - long startTimeInMs = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - - OperationResult or = restDataProvider.doPut(txn.getLink(), jsonPayload, "application/json"); - - or.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - txn.setOperationResult(or); - - return txn; - } - - /** - * @return the restDataProvider - */ - public RestDataProvider getRestDataProvider() { - return restDataProvider; - } - - /** - * @param restDataProvider the restDataProvider to set - */ - public void setRestDataProvider(RestDataProvider restDataProvider) { - this.restDataProvider = restDataProvider; - } - - /** - * @return the jsonPayload - */ - public String getJsonPayload() { - return jsonPayload; - } - - /** - * @param jsonPayload the jsonPayload to set - */ - public void setJsonPayload(String jsonPayload) { - this.jsonPayload = jsonPayload; - } - - /** - * @return the txn - */ - public NetworkTransaction getTxn() { - return txn; - } - - /** - * @param txn the txn to set - */ - public void setTxn(NetworkTransaction txn) { - this.txn = txn; - } - - /** - * @return the contextMap - */ - public Map getContextMap() { - return contextMap; - } - - /** - * @param contextMap the contextMap to set - */ - public void setContextMap(Map contextMap) { - this.contextMap = contextMap; - } -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchRetrieval.java b/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchRetrieval.java deleted file mode 100644 index f3f3c16..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchRetrieval.java +++ /dev/null @@ -1,110 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.task; - -import java.util.Map; -import java.util.function.Supplier; - -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.rest.RestDataProvider; -import org.slf4j.MDC; - -/** - * The Class PerformElasticSearchRetrieval. - */ -public class PerformElasticSearchRetrieval implements Supplier { - - private NetworkTransaction txn; - private RestDataProvider restDataProvider; - private Map contextMap; - - /** - * Instantiates a new perform elastic search retrieval. - * - * @param elasticSearchTxn the elastic search txn - * @param restDataProvider the rest data provider - */ - public PerformElasticSearchRetrieval(NetworkTransaction elasticSearchTxn, - RestDataProvider restDataProvider) { - this.txn = elasticSearchTxn; - this.restDataProvider = restDataProvider; - this.contextMap = MDC.getCopyOfContextMap(); - } - - /* - * (non-Javadoc) - * - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - MDC.setContextMap(contextMap); - OperationResult or = restDataProvider.doGet(txn.getLink(), "application/json"); - txn.setOperationResult(or); - return txn; - } - - /** - * @return the txn - */ - public NetworkTransaction getTxn() { - return txn; - } - - /** - * @param txn the txn to set - */ - public void setTxn(NetworkTransaction txn) { - this.txn = txn; - } - - /** - * @return the restDataProvider - */ - public RestDataProvider getRestDataProvider() { - return restDataProvider; - } - - /** - * @param restDataProvider the restDataProvider to set - */ - public void setRestDataProvider(RestDataProvider restDataProvider) { - this.restDataProvider = restDataProvider; - } - - /** - * @return the contextMap - */ - public Map getContextMap() { - return contextMap; - } - - /** - * @param contextMap the contextMap to set - */ - public void setContextMap(Map contextMap) { - this.contextMap = contextMap; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchUpdate.java b/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchUpdate.java deleted file mode 100644 index 72b48c9..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/task/PerformElasticSearchUpdate.java +++ /dev/null @@ -1,152 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.task; - -import java.util.Map; -import java.util.function.Supplier; - -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.elasticsearch.ElasticSearchDataProvider; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.slf4j.MDC; - -/** - * The Class PerformElasticSearchUpdate. - */ -public class PerformElasticSearchUpdate implements Supplier { - - private ElasticSearchDataProvider esDataProvider; - private NetworkTransaction operationTracker; - private String updatePayload; - private String updateUrl; - private Map contextMap; - - /** - * Instantiates a new perform elastic search update. - * - * @param updateUrl the update url - * @param updatePayload the update payload - * @param esDataProvider the es data provider - * @param transactionTracker the transaction tracker - */ - public PerformElasticSearchUpdate(String updateUrl, String updatePayload, - ElasticSearchDataProvider esDataProvider, NetworkTransaction transactionTracker) { - this.updateUrl = updateUrl; - this.updatePayload = updatePayload; - this.esDataProvider = esDataProvider; - this.contextMap = MDC.getCopyOfContextMap(); - this.operationTracker = new NetworkTransaction(); - operationTracker.setEntityType(transactionTracker.getEntityType()); - operationTracker.setDescriptor(transactionTracker.getDescriptor()); - operationTracker.setOperationType(transactionTracker.getOperationType()); - } - - /* - * (non-Javadoc) - * - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - operationTracker.setTaskAgeInMs(); - long startTimeInMs = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - OperationResult or = esDataProvider.doBulkOperation(updateUrl, updatePayload); - - or.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - operationTracker.setOperationResult(or); - - return operationTracker; - } - - /** - * @return the esDataProvider - */ - public ElasticSearchDataProvider getEsDataProvider() { - return esDataProvider; - } - - /** - * @param esDataProvider the esDataProvider to set - */ - public void setEsDataProvider(ElasticSearchDataProvider esDataProvider) { - this.esDataProvider = esDataProvider; - } - - /** - * @return the operationTracker - */ - public NetworkTransaction getOperationTracker() { - return operationTracker; - } - - /** - * @param operationTracker the operationTracker to set - */ - public void setOperationTracker(NetworkTransaction operationTracker) { - this.operationTracker = operationTracker; - } - - /** - * @return the updatePayload - */ - public String getUpdatePayload() { - return updatePayload; - } - - /** - * @param updatePayload the updatePayload to set - */ - public void setUpdatePayload(String updatePayload) { - this.updatePayload = updatePayload; - } - - /** - * @return the updateUrl - */ - public String getUpdateUrl() { - return updateUrl; - } - - /** - * @param updateUrl the updateUrl to set - */ - public void setUpdateUrl(String updateUrl) { - this.updateUrl = updateUrl; - } - - /** - * @return the contextMap - */ - public Map getContextMap() { - return contextMap; - } - - /** - * @param contextMap the contextMap to set - */ - public void setContextMap(Map contextMap) { - this.contextMap = contextMap; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/task/PersistOperationResultToDisk.java b/src/main/java/org/onap/aai/sparky/synchronizer/task/PersistOperationResultToDisk.java deleted file mode 100644 index 0ab331e..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/task/PersistOperationResultToDisk.java +++ /dev/null @@ -1,157 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.task; - -import java.io.File; -import java.util.Map; -import java.util.function.Supplier; - -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.cl.api.Logger; -import org.slf4j.MDC; - -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * The Class PersistOperationResultToDisk. - */ -public class PersistOperationResultToDisk implements Supplier { - - private String fullPath; - private OperationResult dataToStore; - private ObjectMapper mapper; - private Logger logger; - private Map contextMap; - - /** - * Instantiates a new persist operation result to disk. - * - * @param fullPath the full path - * @param dataToStore the data to store - * @param mapper the mapper - * @param logger the logger - */ - public PersistOperationResultToDisk(String fullPath, OperationResult dataToStore, - ObjectMapper mapper, Logger logger) { - - this.fullPath = fullPath; - this.mapper = mapper; - this.dataToStore = dataToStore; - this.logger = logger; - this.contextMap = MDC.getCopyOfContextMap(); - } - - /* - * (non-Javadoc) - * - * @see java.util.function.Supplier#get() - */ - @Override - public Void get() { - MDC.setContextMap(contextMap); - File file = new File(fullPath); - if (!file.exists()) { - try { - mapper.writeValue(new File(fullPath), dataToStore); - } catch (Exception exc) { - logger.error(AaiUiMsgs.DISK_DATA_WRITE_IO_ERROR, exc.toString()); - } - } - - return null; - } - - /** - * @return the fullPath - */ - public String getFullPath() { - return fullPath; - } - - /** - * @param fullPath the fullPath to set - */ - public void setFullPath(String fullPath) { - this.fullPath = fullPath; - } - - /** - * @return the dataToStore - */ - public OperationResult getDataToStore() { - return dataToStore; - } - - /** - * @param dataToStore the dataToStore to set - */ - public void setDataToStore(OperationResult dataToStore) { - this.dataToStore = dataToStore; - } - - /** - * @return the mapper - */ - public ObjectMapper getMapper() { - return mapper; - } - - /** - * @param mapper the mapper to set - */ - public void setMapper(ObjectMapper mapper) { - this.mapper = mapper; - } - - /** - * @return the logger - */ - public Logger getLogger() { - return logger; - } - - /** - * @param logger the logger to set - */ - public void setLogger(Logger logger) { - this.logger = logger; - } - - /** - * @return the contextMap - */ - public Map getContextMap() { - return contextMap; - } - - /** - * @param contextMap the contextMap to set - */ - public void setContextMap(Map contextMap) { - this.contextMap = contextMap; - } - - - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/task/RetrieveOperationResultFromDisk.java b/src/main/java/org/onap/aai/sparky/synchronizer/task/RetrieveOperationResultFromDisk.java deleted file mode 100644 index 0e11319..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/task/RetrieveOperationResultFromDisk.java +++ /dev/null @@ -1,133 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.task; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.function.Supplier; - -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.cl.api.Logger; - -import com.fasterxml.jackson.databind.ObjectMapper; - -/** - * The Class RetrieveOperationResultFromDisk. - */ -public class RetrieveOperationResultFromDisk implements Supplier { - - private String fullPath; - private ObjectMapper mapper; - private Logger logger; - - /** - * Instantiates a new retrieve operation result from disk. - * - * @param fullPath the full path - * @param mapper the mapper - * @param logger the logger - */ - public RetrieveOperationResultFromDisk(String fullPath, ObjectMapper mapper, Logger logger) { - - this.fullPath = fullPath; - this.mapper = mapper; - this.logger = logger; - } - - /* - * (non-Javadoc) - * - * @see java.util.function.Supplier#get() - */ - @Override - public OperationResult get() { - - try { - File file = new File(fullPath); - if (file.exists()) { - if (logger.isDebugEnabled()) { - logger.debug(AaiUiMsgs.WILL_RETRIEVE_TXN, fullPath); - } - - Path path = Paths.get(fullPath); - byte[] byteBuffer = Files.readAllBytes(path); - - OperationResult opResult = mapper.readValue(byteBuffer, OperationResult.class); - - return opResult; - } else { - logger.debug(AaiUiMsgs.FAILED_TO_RESTORE_TXN_FILE_MISSING, fullPath); - } - } catch (IOException exc) { - logger.error(AaiUiMsgs.DISK_CACHE_READ_IO_ERROR, exc.getLocalizedMessage()); - } - return null; - } - - /** - * @return the fullPath - */ - public String getFullPath() { - return fullPath; - } - - /** - * @param fullPath the fullPath to set - */ - public void setFullPath(String fullPath) { - this.fullPath = fullPath; - } - - /** - * @return the mapper - */ - public ObjectMapper getMapper() { - return mapper; - } - - /** - * @param mapper the mapper to set - */ - public void setMapper(ObjectMapper mapper) { - this.mapper = mapper; - } - - /** - * @return the logger - */ - public Logger getLogger() { - return logger; - } - - /** - * @param logger the logger to set - */ - public void setLogger(Logger logger) { - this.logger = logger; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/synchronizer/task/StoreDocumentTask.java b/src/main/java/org/onap/aai/sparky/synchronizer/task/StoreDocumentTask.java deleted file mode 100644 index 3e31d12..0000000 --- a/src/main/java/org/onap/aai/sparky/synchronizer/task/StoreDocumentTask.java +++ /dev/null @@ -1,137 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.synchronizer.task; - -import java.util.Map; -import java.util.function.Supplier; - -import org.onap.aai.sparky.dal.NetworkTransaction; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.rest.RestDataProvider; -import org.onap.aai.sparky.synchronizer.entity.IndexDocument; -import org.slf4j.MDC; - -/** - * The Class StoreDocumentTask. - */ -public class StoreDocumentTask implements Supplier { - - private IndexDocument doc; - - /** - * @return the doc - */ - public IndexDocument getDoc() { - return doc; - } - - /** - * @param doc the doc to set - */ - public void setDoc(IndexDocument doc) { - this.doc = doc; - } - - /** - * @return the txn - */ - public NetworkTransaction getTxn() { - return txn; - } - - /** - * @param txn the txn to set - */ - public void setTxn(NetworkTransaction txn) { - this.txn = txn; - } - - /** - * @return the esDataProvider - */ - public RestDataProvider getEsDataProvider() { - return esDataProvider; - } - - /** - * @param esDataProvider the esDataProvider to set - */ - public void setEsDataProvider(RestDataProvider esDataProvider) { - this.esDataProvider = esDataProvider; - } - - /** - * @return the contextMap - */ - public Map getContextMap() { - return contextMap; - } - - /** - * @param contextMap the contextMap to set - */ - public void setContextMap(Map contextMap) { - this.contextMap = contextMap; - } - - private NetworkTransaction txn; - - private RestDataProvider esDataProvider; - private Map contextMap; - - /** - * Instantiates a new store document task. - * - * @param doc the doc - * @param txn the txn - * @param esDataProvider the es data provider - */ - public StoreDocumentTask(IndexDocument doc, NetworkTransaction txn, - RestDataProvider esDataProvider) { - this.doc = doc; - this.txn = txn; - this.esDataProvider = esDataProvider; - this.contextMap = MDC.getCopyOfContextMap(); - } - - /* - * (non-Javadoc) - * - * @see java.util.function.Supplier#get() - */ - @Override - public NetworkTransaction get() { - txn.setTaskAgeInMs(); - - long startTimeInMs = System.currentTimeMillis(); - MDC.setContextMap(contextMap); - OperationResult or = - esDataProvider.doPut(txn.getLink(), doc.getIndexDocumentJson(), "application/json"); - or.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); - - txn.setOperationResult(or); - - return txn; - } - -} diff --git a/src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java b/src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java new file mode 100644 index 0000000..a2acc06 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java @@ -0,0 +1,95 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.topology.sync; + +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class GeoSyncController extends SyncControllerImpl implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + + public GeoSyncController(SyncControllerConfig syncControllerConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig) + throws Exception { + super(syncControllerConfig); + + // final String controllerName = "Inventory Geo Synchronizer"; + + IndexIntegrityValidator indexValidator = new IndexIntegrityValidator(esAdapter, schemaConfig, + endpointConfig, ElasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(indexValidator); + + GeoSynchronizer synchronizer = + new GeoSynchronizer(schemaConfig, syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig); + + synchronizer.setAaiAdapter(aaiAdapter); + synchronizer.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(synchronizer); + + + IndexCleaner indexCleaner = + new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig); + + registerIndexCleaner(indexCleaner); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + @Override + public void registerController() { + + if (syncControllerRegistry != null) { + if (syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + } + + + +} diff --git a/src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java b/src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java new file mode 100644 index 0000000..f075ff8 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java @@ -0,0 +1,497 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.topology.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.GeoEntityLookup; +import org.onap.aai.sparky.config.oxm.GeoOxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.inventory.entity.GeoIndexDocument; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +import org.onap.aai.sparky.sync.task.StoreDocumentTask; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; + + +/** + * The Class GeoSynchronizer. + */ +public class GeoSynchronizer extends AbstractEntitySynchronizer implements IndexSynchronizer { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(GeoSynchronizer.class); + + private boolean allWorkEnumerated; + private Deque selflinks; + + private ElasticSearchConfig elasticConfig = null; + private Map geoDescriptorMap = null; + + /** + * Instantiates a new geo synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public GeoSynchronizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers, + int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig) throws Exception { + + super(LOG, "GEO", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(), + aaiStatConfig, esStatConfig); + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque(); + this.synchronizerName = "Geo Synchronizer"; + this.geoDescriptorMap = GeoEntityLookup.getInstance().getGeoEntityDescriptors(); + this.aaiEntityStats.intializeEntityCounters(geoDescriptorMap.keySet()); + this.esEntityStats.intializeEntityCounters(geoDescriptorMap.keySet()); + this.syncDurationInMs = -1; + } + + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + resetCounters(); + setShouldSkipSync(false); + allWorkEnumerated = false; + syncStartedTimeStampInMs = System.currentTimeMillis(); + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "GeoSynchronizer", "", "Sync", ""); + + collectAllTheWork(); + return OperationState.OK; + } + + + /** + * Collect all the work. + * + * @return the operation state + */ + public OperationState collectAllTheWork() { + final Map contextMap = MDC.getCopyOfContextMap(); + if (elasticConfig == null) { + try { + elasticConfig = ElasticSearchConfig.getConfig(); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.CONFIGURATION_ERROR, "Search"); + } + } + + if (geoDescriptorMap.isEmpty()) { + setShouldSkipSync(true); + LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "geo entities"); + return OperationState.ERROR; + } + + Collection syncTypes = geoDescriptorMap.keySet(); + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the of + * the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc); + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); + } + }); + + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc); + } + return OperationState.OK; + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = OxmEntityLookup.getInstance().getEntityDescriptors() + .get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.SELF_LINK_GET_NO_RESPONSE, linkDescriptor.getSelfLink()); + } else { + processEntityTypeSelfLinkResult(result); + } + } + }); + } + } + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, exc); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + if (resourceType != null && resourceLink != null) { + + if (geoDescriptorMap.containsKey(resourceType)) { + selflinks.add(new SelfLinkDescriptor(resourceLink + "?nodes-only", resourceType)); + } else { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + + } + } + } + } + + } + + /** + * Process entity type self link result. + * + * @param txn the txn + */ + private void processEntityTypeSelfLinkResult(NetworkTransaction txn) { + + updateActiveInventoryCounters(txn); + + if (!txn.getOperationResult().wasSuccessful()) { + return; + } + + GeoOxmEntityDescriptor descriptor = geoDescriptorMap.get(txn.getEntityType()); + + if (descriptor == null) { + return; + } + + try { + if (descriptor.hasGeoEntity()) { + + GeoIndexDocument geoDoc = new GeoIndexDocument(); + + final String jsonResult = txn.getOperationResult().getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + populateGeoDocument(geoDoc, jsonResult, txn.getDescriptor(), txn.getLink()); + + if (!geoDoc.isValidGeoDocument()) { + + LOG.info(AaiUiMsgs.GEO_SYNC_IGNORING_ENTITY, geoDoc.getEntityType(), geoDoc.toString()); + + } else { + + String link = null; + try { + link = getElasticFullUrl("/" + geoDoc.getId(), getIndexName(), "default"); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc); + } + + if (link != null) { + + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new StoreDocumentTask(geoDoc, n2, elasticSearchAdapter), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_STORE_FAILURE, error.getMessage()); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result); + } + }); + } + } + } + } + } catch (JsonProcessingException exc) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc); + } + + return; + } + + + /** + * Process store document result. + * + * @param txn the txn + */ + private void processStoreDocumentResult(NetworkTransaction txn) { + + OperationResult or = txn.getOperationResult(); + + if (!or.wasSuccessful()) { + LOG.error(AaiUiMsgs.ES_STORE_FAILURE, or.toString()); + /* + * if(or.getResultCode() != 404 || (or.getResultCode() == 404 && + * !synchronizerConfig.isResourceNotFoundErrorsSupressed())) { logger.error( + * "Skipping failed resource = " + "link" + " RC=[" + or.getResultCode() + "]. Message: " + + * or.getResult()); } + */ + + } + + } + + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return this.getStatReport(syncDurationInMs, showFinalReport); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + /** + * Populate geo document. + * + * @param doc the doc + * @param result the result + * @param resultDescriptor the result descriptor + * @param entityLink the entity link + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected void populateGeoDocument(GeoIndexDocument doc, String result, + OxmEntityDescriptor resultDescriptor, String entityLink) + throws JsonProcessingException, IOException { + + doc.setSelfLink(entityLink); + doc.setEntityType(resultDescriptor.getEntityName()); + + JsonNode entityNode = mapper.readTree(result); + + List primaryKeyValues = new ArrayList(); + String pkeyValue = null; + + for (String keyName : resultDescriptor.getPrimaryKeyAttributeNames()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName()); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + + GeoOxmEntityDescriptor descriptor = geoDescriptorMap.get(resultDescriptor.getEntityName()); + + String geoLatKey = descriptor.getGeoLatName(); + String geoLongKey = descriptor.getGeoLongName(); + + doc.setLatitude(NodeUtils.getNodeFieldAsText(entityNode, geoLatKey)); + doc.setLongitude(NodeUtils.getNodeFieldAsText(entityNode, geoLongKey)); + doc.deriveFields(); + + } + + @Override + protected boolean isSyncDone() { + if (shouldSkipSync()) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return true; + } + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + return true; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java b/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java index bad916b..c9a2414 100644 --- a/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java +++ b/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java @@ -32,9 +32,9 @@ import java.io.InputStream; import java.util.Properties; import java.util.Set; -import org.onap.aai.sparky.logging.AaiUiMsgs; import org.onap.aai.cl.api.Logger; import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; /** * The Class ConfigHelper. diff --git a/src/main/java/org/onap/aai/sparky/util/Encryptor.java b/src/main/java/org/onap/aai/sparky/util/Encryptor.java index 80aefd0..15b735b 100644 --- a/src/main/java/org/onap/aai/sparky/util/Encryptor.java +++ b/src/main/java/org/onap/aai/sparky/util/Encryptor.java @@ -28,17 +28,38 @@ import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.eclipse.jetty.util.security.Password; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; /** * The Class Encryptor. */ public class Encryptor { + private static final Logger LOG = LoggerFactory.getInstance().getLogger(Encryptor.class); + /** * Instantiates a new encryptor. */ public Encryptor() {} + /** + * Encrypt value. + * + * @param value to encrypt + * @return the encrypted string + */ + public String encryptValue(String value) { + String encyptedValue = ""; + try { + encyptedValue = Password.obfuscate(value); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ENCRYPTION_ERROR, value, exc.toString()); + } + return encyptedValue; + } + /** * Decrypt value. * @@ -47,8 +68,11 @@ public class Encryptor { */ public String decryptValue(String value) { String decyptedValue = ""; - - decyptedValue = Password.deobfuscate(value); + try { + decyptedValue = Password.deobfuscate(value); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.DECRYPTION_ERROR, value, exc.toString()); + } return decyptedValue; } @@ -76,4 +100,54 @@ public class Encryptor { System.exit(1); } + /** + * The main method. + * + * @param args the arguments + */ + public static void main(String[] args) { + + Options options = new Options(); + options.addOption("d", true, "value to decrypt"); + options.addOption("h", false, "show help"); + options.addOption("?", false, "show help"); + + String value = null; + boolean encrypt = false; + boolean decrypt = false; + + CommandLineParser parser = new BasicParser(); + CommandLine cmd = null; + + try { + cmd = parser.parse(options, args); + + if (cmd.hasOption("d")) { + value = cmd.getOptionValue("d"); + decrypt = true; + } + + if (cmd.hasOption("?") || cmd.hasOption("h")) { + usage(); + System.exit(0); + } + + if ((encrypt && decrypt) || (!encrypt && !decrypt)) { + usage("Must specify one (and only one) of the -e or -d options"); + } + + Encryptor encryptor = new Encryptor(); + + if (decrypt) { + String out = encryptor.decryptValue(value); + System.out.println(out); + } + } catch (ParseException exc) { + System.out.println("Failed to parse command line properties: " + exc.toString()); + } catch (Exception exc) { + System.out.println("Failure: " + exc.toString()); + } + + System.exit(0); + } } diff --git a/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java b/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java index 0bf6d38..e661b73 100644 --- a/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java +++ b/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java @@ -20,7 +20,6 @@ * * ECOMP is a trademark and service mark of AT&T Intellectual Property. */ - package org.onap.aai.sparky.util; /** diff --git a/src/main/java/org/onap/aai/sparky/util/KeystoreBuilder.java b/src/main/java/org/onap/aai/sparky/util/KeystoreBuilder.java index d3ae421..05ba3d2 100644 --- a/src/main/java/org/onap/aai/sparky/util/KeystoreBuilder.java +++ b/src/main/java/org/onap/aai/sparky/util/KeystoreBuilder.java @@ -110,20 +110,6 @@ public class KeystoreBuilder { private List endpoints = new ArrayList(); - /** - * @return the endpoints - */ - public List getEndpoints() { - return endpoints; - } - - /** - * @param endpoints the endpoints to set - */ - public void setEndpoints(List endpoints) { - this.endpoints = endpoints; - } - /** * Initialize end points list. * @@ -209,7 +195,7 @@ public class KeystoreBuilder { } else { System.out.println("keystore file doesn't exist, preloading new file with jssecacerts"); } - password = keystorePassword; + password = "changeit"; } @@ -260,8 +246,8 @@ public class KeystoreBuilder { private X509Certificate[] getCertificateChainForRemoteEndpoint(String hostname, int port) throws UnknownHostException, IOException { - System.out.println("Opening connection to " + hostname + ":" + port + ".."); - SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket(hostname, port); + System.out.println("Opening connection to localhost:8442.."); + SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket("aai-int1.dev.att.com", 8440); socket.setSoTimeout(10000); try { @@ -272,8 +258,6 @@ public class KeystoreBuilder { System.exit(0); } catch (SSLException exc) { System.out.println("\nCaught SSL exception, we are not authorized to access this server yet"); - throw new SSLException( - "\nCaught SSL exception, we are not authorized to access this server yet"); // e.printStackTrace(System.out); } @@ -523,6 +507,8 @@ public class KeystoreBuilder { */ public static void main(String[] args) throws Exception { + // String endpointList = "aai-int1.test.att.com:8440;aai-int1.dev.att.com:8442"; + /* * Examples: localhost:8440;localhost:8442 d:\1\adhoc_keystore.jks aaiDomain2 false * localhost:8440;localhost:8442 d:\1\adhoc_keystore.jks aaiDomain2 true diff --git a/src/main/java/org/onap/aai/sparky/util/NodeUtils.java b/src/main/java/org/onap/aai/sparky/util/NodeUtils.java index 20e547f..a34c07d 100644 --- a/src/main/java/org/onap/aai/sparky/util/NodeUtils.java +++ b/src/main/java/org/onap/aai/sparky/util/NodeUtils.java @@ -27,8 +27,10 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.Thread.UncaughtExceptionHandler; +import java.net.URI; import java.nio.ByteBuffer; import java.security.SecureRandom; +import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -50,15 +52,17 @@ import java.util.regex.Pattern; import javax.servlet.http.HttpServletRequest; import javax.xml.stream.XMLStreamConstants; +import org.onap.aai.cl.api.Logger; import org.onap.aai.sparky.logging.AaiUiMsgs; import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; -import org.onap.aai.cl.api.Logger; +import org.restlet.Request; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.databind.ser.FilterProvider; import com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -68,33 +72,8 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; public class NodeUtils { private static SecureRandom sRandom = new SecureRandom(); - /** - * @return the sRandom - */ - public static SecureRandom getsRandom() { - return sRandom; - } - - /** - * @param sRandom the sRandom to set - */ - public static void setsRandom(SecureRandom sRandom) { - NodeUtils.sRandom = sRandom; - } - - /** - * @return the entityResourceKeyFormat - */ - public static String getEntityResourceKeyFormat() { - return ENTITY_RESOURCE_KEY_FORMAT; - } + private static final Pattern AAI_VERSION_PREFIX = Pattern.compile("/aai/v[0-9]+/(.*)"); - /** - * @return the timeBreakDownFormat - */ - public static String getTimeBreakDownFormat() { - return TIME_BREAK_DOWN_FORMAT; - } public static synchronized String getRandomTxnId() { byte bytes[] = new byte[6]; @@ -118,6 +97,31 @@ public class NodeUtils { return sb.toString(); } + + public static String extractRawPathWithoutVersion(String selfLinkUri) { + + try { + + String rawPath = new URI(selfLinkUri).getRawPath(); + + Matcher m = AAI_VERSION_PREFIX.matcher(rawPath); + + if (m.matches()) { + + // System.out.println(m.group(0)); + if (m.groupCount() >= 1) { + return m.group(1); + } + // System.out.println(m.group(2)); + + } + } catch (Exception e) { + } + + return null; + + } + /** * Checks if is numeric. * @@ -296,6 +300,14 @@ public class NodeUtils { return concatArray(list, " "); } + private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + + public static String getCurrentTimeStamp() { + SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT); + Timestamp timestamp = new Timestamp(System.currentTimeMillis()); + return dateFormat.format(timestamp); + } + /** * Concat array. * @@ -395,12 +407,12 @@ public class NodeUtils { String resourceId = null; if ("/".equals(link.substring(linkLength - 1))) { // Use-case: - // https://:9292/aai/v7/business/customers/customer/1607_20160524Func_Ak1_01/service-subscriptions/service-subscription/uCPE-VMS/ + // https://aai-ext1.test.att.com:9292/aai/v7/business/customers/customer/1607_20160524Func_Ak1_01/service-subscriptions/service-subscription/uCPE-VMS/ startIndex = link.lastIndexOf("/", linkLength - 2); resourceId = link.substring(startIndex + 1, linkLength - 1); } else { // Use-case: - // https://:9292/aai/v7/business/customers/customer/1607_20160524Func_Ak1_01/service-subscriptions/service-subscription/uCPE-VMS + // https://aai-ext1.test.att.com:9292/aai/v7/business/customers/customer/1607_20160524Func_Ak1_01/service-subscriptions/service-subscription/uCPE-VMS startIndex = link.lastIndexOf("/"); resourceId = link.substring(startIndex + 1, linkLength); } @@ -491,6 +503,33 @@ public class NodeUtils { return ow.writeValueAsString(object); } + /** + * Convert object to json by selectively choosing certain fields thru filters. Example use case: + * based on request type we might need to send different serialization of the UiViewFilterEntity + * + * @param object the object + * @param pretty the pretty + * @return the string + * @throws JsonProcessingException the json processing exception + */ + public static String convertObjectToJson(Object object, boolean pretty, FilterProvider filters) + throws JsonProcessingException { + ObjectWriter ow = null; + + ObjectMapper mapper = new ObjectMapper(); + mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + + if (pretty) { + ow = mapper.writer(filters).withDefaultPrettyPrinter(); + + } else { + ow = mapper.writer(filters); + } + + return ow.writeValueAsString(object); + } + + /** * Convert json str to json node. * @@ -687,13 +726,39 @@ public class NodeUtils { * @throws IOException Signals that an I/O exception has occurred. */ public static String getBody(HttpServletRequest request) throws IOException { + InputStream inputStream = request.getInputStream(); + return getBodyFromStream(inputStream); + } + + + + /** + * Gets the Restlet Request payload. + * + * @param request the request + * @return the body + * @throws IOException Signals that an I/O exception has occurred. + */ + public static String getBody(Request request) throws IOException { + InputStream inputStream = request.getEntity().getStream(); + return getBodyFromStream(inputStream); + } + + + /** + * Gets the payload from the input stream of a request. + * + * @param request the request + * @return the body + * @throws IOException Signals that an I/O exception has occurred. + */ + public static String getBodyFromStream(InputStream inputStream) throws IOException { String body = null; StringBuilder stringBuilder = new StringBuilder(); BufferedReader bufferedReader = null; try { - InputStream inputStream = request.getInputStream(); if (inputStream != null) { bufferedReader = new BufferedReader(new InputStreamReader(inputStream)); char[] charBuffer = new char[128]; @@ -719,4 +784,23 @@ public class NodeUtils { body = stringBuilder.toString(); return body; } + + + /** + * The main method. + * + * @param args the arguments + * @throws ParseException the parse exception + */ + public static void main(String[] args) throws ParseException { + String date = "20170110T112312Z"; + SimpleDateFormat originalFormat = new SimpleDateFormat("yyyyMMdd'T'hhmmss'Z'"); + Date toDate = originalFormat.parse(date); + SimpleDateFormat newFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss'Z'"); + System.out.println(newFormat.format(toDate)); + + } + + + } diff --git a/src/main/java/org/onap/aai/sparky/util/RestletUtils.java b/src/main/java/org/onap/aai/sparky/util/RestletUtils.java new file mode 100644 index 0000000..06c8c05 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/util/RestletUtils.java @@ -0,0 +1,118 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +import javax.servlet.http.HttpServletResponse; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.restlet.Response; +import org.restlet.data.MediaType; +import org.restlet.data.Status; + +public class RestletUtils { + /** + * Returns an HttpServletResponse based on values from a Restlet Response + * + * @param restletResponse Restlet Response to be converted to an HttpServletResponse + * @return An HttpServletResponse object built from the values of a Restlet Response + */ + public HttpServletResponse convertRestletResponseToHttpServletResponse(Response restletResponse) { + return org.restlet.ext.servlet.ServletUtils.getResponse(restletResponse); + } + + /** + * Execute post query + * + * @param logger The logger + * @param search The searchAdapter + * @param response The response + * @param requestUrl The request URL + * @param requestJsonPayload The request JSON payload + * @return The operation result + */ + public OperationResult executePostQuery(Logger logger, SearchAdapter search, Response response, + String requestUrl, String requestJsonPayload) { + + OperationResult opResult = search.doPost(requestUrl, requestJsonPayload, "application/json"); + + if (opResult.getResultCode() > 300) { + setRestletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult()); + } else { + response.setStatus(new Status(opResult.getResultCode())); + } + + return opResult; + } + + /** + * Generate JSON error response + * + * @param message The error message + * @return The error message formatted as a JSON string + */ + public String generateJsonErrorResponse(String message) { + return String.format("{ \"errorMessage\" : \"%s\" }", message); + } + + /** + * Log Restlet exceptions/errors & prepare Response object with exception/errors info + * + * @param logger The logger + * @param errorMsg The error message + * @param exc The exception + * @param response The response + */ + public void handleRestletErrors(Logger logger, String errorMsg, Exception exc, + Response response) { + String errorLogMsg = + (exc == null ? errorMsg : errorMsg + ". Error:" + exc.getLocalizedMessage()); + logger.error(AaiUiMsgs.ERROR_GENERIC, errorLogMsg); + response.setEntity(generateJsonErrorResponse(errorMsg), MediaType.APPLICATION_JSON); + } + + /** + * Sets the Restlet response + * + * @param logger The logger + * @param isError The error + * @param responseCode The response code + * @param response The response + * @param postPayload The post payload + */ + public void setRestletResponse(Logger logger, boolean isError, int responseCode, + Response response, String postPayload) { + + if (isError) { + logger.error(AaiUiMsgs.ERROR_GENERIC, postPayload); + } + + response.setStatus(new Status(responseCode)); + + if (postPayload != null) { + response.setEntity(postPayload, MediaType.APPLICATION_JSON); + } + } +} diff --git a/src/main/java/org/onap/aai/sparky/util/ServletUtils.java b/src/main/java/org/onap/aai/sparky/util/ServletUtils.java deleted file mode 100644 index 2a8159e..0000000 --- a/src/main/java/org/onap/aai/sparky/util/ServletUtils.java +++ /dev/null @@ -1,161 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.util; - -import java.io.IOException; -import java.io.PrintWriter; - -import javax.servlet.http.HttpServletResponse; - -import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; -import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.cl.api.Logger; - -/** - * The Class ServletUtils. - */ -public class ServletUtils { - - /** - * Execute get query. - * - * @param logger the logger - * @param search the search - * @param response the response - * @param requestUrl the request url - * @return the operation result - * @throws Exception the exception - */ - public static OperationResult executeGetQuery(Logger logger, SearchAdapter search, - HttpServletResponse response, String requestUrl) throws Exception { - - OperationResult opResult = search.doGet(requestUrl, "application/json"); - - if (opResult.getResultCode() > 300) { - setServletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult()); - } else { - response.setStatus(opResult.getResultCode()); - } - - return opResult; - - } - - /** - * Execute post query. - * - * @param logger the logger - * @param search the search - * @param response the response - * @param requestUrl the request url - * @param requestJsonPayload the request json payload - * @return the operation result - * @throws Exception the exception - */ - public static OperationResult executePostQuery(Logger logger, SearchAdapter search, - HttpServletResponse response, String requestUrl, String requestJsonPayload) throws Exception { - - OperationResult opResult = search.doPost(requestUrl, requestJsonPayload, "application/json"); - - if (opResult.getResultCode() > 300) { - setServletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult()); - - } else { - response.setStatus(opResult.getResultCode()); - } - - return opResult; - } - - /** - * Handle search servlet errors. - * - * @param logger the logger - * @param errorMsg the error msg - * @param exc the exc - * @param response the response - * @throws IOException Signals that an I/O exception has occurred. - */ - public static void handleSearchServletErrors(Logger logger, String errorMsg, Exception exc, - HttpServletResponse response) throws IOException { - String errorLogMsg = - (exc == null ? errorMsg : errorMsg + ". Error:" + exc.getLocalizedMessage()); - logger.error(AaiUiMsgs.ERROR_GENERIC, errorLogMsg); - response.setContentType("application/json"); - PrintWriter out = response.getWriter(); - out.println(generateJsonErrorResponse(errorMsg)); - out.close(); - } - - /** - * Generate json error response. - * - * @param message the message - * @return the string - */ - public static String generateJsonErrorResponse(String message) { - return String.format("{ \"errorMessage\" : %s }", message); - } - - /** - * Sets the servlet response. - * - * @param logger the logger - * @param isError the is error - * @param responseCode the response code - * @param response the response - * @param postPayload the post payload - * @throws IOException Signals that an I/O exception has occurred. - */ - public static void setServletResponse(Logger logger, boolean isError, int responseCode, - HttpServletResponse response, String postPayload) throws IOException { - - if (isError) { - logger.error(AaiUiMsgs.ERROR_GENERIC, postPayload); - } - - response.setStatus(responseCode); - - if (postPayload != null) { - response.setContentType("application/json"); - PrintWriter out = response.getWriter(); - out.println(postPayload); - out.close(); - } - } - - /** - * Gets the full url. - * - * @param elasticConfig the elastic config - * @param resourceUrl the resource url - * @return the full url - */ - public static String getFullUrl(ElasticSearchConfig elasticConfig, String resourceUrl) { - final String host = elasticConfig.getIpAddress(); - final String port = elasticConfig.getHttpPort(); - return String.format("http://%s:%s%s", host, port, resourceUrl); - } -} diff --git a/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java b/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java index 463d122..ba51254 100644 --- a/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java +++ b/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java @@ -36,44 +36,63 @@ public class SuggestionsPermutation { * * @return A Arraylist which contains a array list of all possible combinations */ - @SuppressWarnings("serial") - public ArrayList> getSuggestionsPermutation(List list) { - List statusList = new ArrayList<>(list); - List dupStatusList; - ArrayList> uniqueList = new ArrayList>(); - int mainLoopIndexCounter = 0; - for (String status : statusList) { - // Add the single entity subset - uniqueList.add(new ArrayList() { - { - add(status); - } - }); - // Remove all the elements to left till the current index - dupStatusList = truncateListUntill(statusList, mainLoopIndexCounter); + public static ArrayList> getUniqueListForSuggestions( + List originalList) { + ArrayList> lists = new ArrayList>(); + if (originalList.isEmpty()) { + lists.add(new ArrayList()); + return lists; + } + List list = new ArrayList(originalList); + String head = list.get(0); + ArrayList rest = new ArrayList(list.subList(1, list.size())); - while (dupStatusList.size() > 0) { - ArrayList suggListInIterate = new ArrayList<>(); - suggListInIterate.add(status); - for (String dupStatus : dupStatusList) { - suggListInIterate.add(dupStatus); - } - uniqueList.add(suggListInIterate); - dupStatusList.remove(0); - } - mainLoopIndexCounter++; + for (ArrayList activeList : getUniqueListForSuggestions(rest)) { + ArrayList newList = new ArrayList(); + newList.add(head); + newList.addAll(activeList); + lists.add(newList); + lists.add(activeList); } - return uniqueList; + return lists; + } + public static ArrayList> getNonEmptyUniqueLists(List list) { + ArrayList> lists = getUniqueListForSuggestions(list); + // remove empty list from the power set + for (ArrayList emptyList : lists) { + if (emptyList.isEmpty()) { + lists.remove(emptyList); + break; + } + } + return lists; } - private List truncateListUntill(List lists, int index) { - List truncatedList = new ArrayList<>(lists); - int counter = 0; - while (counter <= index) { - truncatedList.remove(0); - counter++; + public static List> getListPermutations(List list) { + List inputList = new ArrayList(); + inputList.addAll(list); + if (inputList.size() == 0) { + List> result = new ArrayList>(); + result.add(new ArrayList()); + return result; } - return truncatedList; + + List> listOfLists = new ArrayList>(); + + String firstElement = inputList.remove(0); + + List> recursiveReturn = getListPermutations(inputList); + for (List li : recursiveReturn) { + + for (int index = 0; index <= li.size(); index++) { + List temp = new ArrayList(li); + temp.add(index, firstElement); + listOfLists.add(temp); + } + + } + return listOfLists; } + } diff --git a/src/main/java/org/onap/aai/sparky/util/TreeWalker.java b/src/main/java/org/onap/aai/sparky/util/TreeWalker.java index 6306a30..2221475 100644 --- a/src/main/java/org/onap/aai/sparky/util/TreeWalker.java +++ b/src/main/java/org/onap/aai/sparky/util/TreeWalker.java @@ -22,17 +22,17 @@ */ package org.onap.aai.sparky.util; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; - import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; + /** * The Class TreeWalker. */ diff --git a/src/main/java/org/onap/aai/sparky/util/test/Encryptor.java b/src/main/java/org/onap/aai/sparky/util/test/Encryptor.java deleted file mode 100644 index c24f2c2..0000000 --- a/src/main/java/org/onap/aai/sparky/util/test/Encryptor.java +++ /dev/null @@ -1,83 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.util.test; - -import org.apache.commons.cli.BasicParser; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.eclipse.jetty.util.security.Password; - -/** - * The Class Encryptor. - */ -public class Encryptor { - - /** - * Instantiates a new encryptor. - */ - public Encryptor() {} - - /** - * Decrypt value. - * - * @param value the value - * @return the string - */ - public String decryptValue(String value) { - String decyptedValue = ""; - - try { - decyptedValue = Password.deobfuscate(value); - } catch (Exception exc) { - System.err.println("Cannot decrypt '" + value + "': " + exc.toString()); - } - - return decyptedValue; - } - - /** - * Usage. - */ - public static void usage() { - usage(null); - } - - /** - * Usage. - * - * @param msg the msg - */ - public static void usage(String msg) { - if (msg != null) { - System.err.println(msg); - } - System.err.println("Usage: java Encryptor -e value"); - System.err.println("\tEncrypt the given value"); - System.err.println("Usage: java Encryptor -d value"); - System.err.println("\tDecrypt the given value"); - System.exit(1); - } - -} diff --git a/src/main/java/org/onap/aai/sparky/util/test/KeystoreBuilder.java b/src/main/java/org/onap/aai/sparky/util/test/KeystoreBuilder.java deleted file mode 100644 index e771066..0000000 --- a/src/main/java/org/onap/aai/sparky/util/test/KeystoreBuilder.java +++ /dev/null @@ -1,541 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.util.test; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.net.UnknownHostException; -import java.security.KeyManagementException; -import java.security.KeyStore; -import java.security.KeyStoreException; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.security.cert.CertificateEncodingException; -import java.security.cert.CertificateException; -import java.security.cert.CertificateParsingException; -import java.security.cert.X509Certificate; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLException; -import javax.net.ssl.SSLSocket; -import javax.net.ssl.SSLSocketFactory; -import javax.net.ssl.TrustManager; -import javax.net.ssl.TrustManagerFactory; -import javax.net.ssl.X509TrustManager; - -/** - * The Class KeystoreBuilder. - */ -public class KeystoreBuilder { - - /** - * The Class EndPoint. - */ - private class EndPoint { - private String hostname; - private int port; - - /** - * Instantiates a new end point. - */ - @SuppressWarnings("unused") - public EndPoint() {} - - /** - * Instantiates a new end point. - * - * @param host the host - * @param port the port - */ - public EndPoint(String host, int port) { - this.hostname = host; - this.port = port; - } - - public String getHostname() { - return hostname; - } - - @SuppressWarnings("unused") - public void setHostname(String hostname) { - this.hostname = hostname; - } - - public int getPort() { - return port; - } - - public void setPort(int port) { - this.port = port; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return "EndPoint [hostname=" + hostname + ", port=" + port + "]"; - } - - } - - private List endpoints = new ArrayList(); - - /** - * @return the endpoints - */ - public List getEndpoints() { - return endpoints; - } - - /** - * @param endpoints the endpoints to set - */ - public void setEndpoints(List endpoints) { - this.endpoints = endpoints; - } - - /** - * Initialize end points list. - * - * @param endpointList the endpoint list - */ - private void initializeEndPointsList(String endpointList) { - String[] endpointUris = endpointList.split(";"); - - for (String endpointUri : endpointUris) { - - String ipAndPort = endpointUri.replaceAll("http://", ""); - ipAndPort = endpointUri.replaceAll("https://", ""); - - // System.out.println("ipAndPortUrl = " + ipAndPort); - - String[] hostAndPort = ipAndPort.split(":"); - - String hostname = hostAndPort[0]; - int port = Integer.parseInt(hostAndPort[1]); - - EndPoint ep = new EndPoint(hostname, port); - endpoints.add(ep); - } - - } - - /** - * Instantiates a new keystore builder. - * - * @param endpointList the endpoint list - * @throws NoSuchAlgorithmException the no such algorithm exception - */ - public KeystoreBuilder(String endpointList) throws NoSuchAlgorithmException { - initializeEndPointsList(endpointList); - sha1 = MessageDigest.getInstance("SHA1"); - md5 = MessageDigest.getInstance("MD5"); - } - - private static final String SEP = File.separator; - private SavingTrustManager savingTrustManager; - private SSLSocketFactory sslSocketFactory; - private MessageDigest sha1; - private MessageDigest md5; - private KeyStore ks; - private String keystoreFileName; - private String keystorePassword; - private boolean dumpCertDetails = false; - - public void setDumpCertDetails(boolean shouldSet) { - dumpCertDetails = shouldSet; - } - - /** - * Update keystore. - * - * @param keystoreFileName the keystore file name - * @param keystorePassword the keystore password - * @throws KeyStoreException the key store exception - * @throws NoSuchAlgorithmException the no such algorithm exception - * @throws CertificateException the certificate exception - * @throws IOException Signals that an I/O exception has occurred. - * @throws KeyManagementException the key management exception - */ - public void updateKeystore(String keystoreFileName, String keystorePassword) - throws KeyStoreException, NoSuchAlgorithmException, CertificateException, IOException, - KeyManagementException { - - this.keystoreFileName = keystoreFileName; - this.keystorePassword = keystorePassword; - - File file = new File(keystoreFileName); - String password = keystorePassword; - - if (file.isFile() == false) { - - File dir = new File(System.getProperty("java.home") + SEP + "lib" + SEP + "security"); - file = new File(dir, "jssecacerts"); - if (file.isFile() == false) { - - file = new File(dir, "cacerts"); - System.out.println("keystore file doesn't exist, preloading new file with cacerts"); - - } else { - System.out.println("keystore file doesn't exist, preloading new file with jssecacerts"); - } - password = "changeit"; - - } - - InputStream in = new FileInputStream(file); - ks = KeyStore.getInstance(KeyStore.getDefaultType()); - ks.load(in, password.toCharArray()); - in.close(); - - SSLContext context = SSLContext.getInstance("TLS"); - TrustManagerFactory tmf = - TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(ks); - X509TrustManager defaultTrustManager = (X509TrustManager) tmf.getTrustManagers()[0]; - savingTrustManager = new SavingTrustManager(defaultTrustManager); - context.init(null, new TrustManager[] {savingTrustManager}, null); - sslSocketFactory = context.getSocketFactory(); - - System.out.println("About to add the following endpoint server certificates to the keystore:"); - for (EndPoint ep : endpoints) { - System.out.println("\t--------------------------"); - System.out.println("\t" + ep.toString()); - - X509Certificate[] certChain = - getCertificateChainForRemoteEndpoint(ep.getHostname(), ep.getPort()); - - if (certChain == null) { - System.out.println("Could not obtain server certificate chain"); - return; - } - - dumpCertChainInfo(certChain); - - updateKeyStoreWithCertChain(certChain); - - } - - } - - /** - * Gets the certificate chain for remote endpoint. - * - * @param hostname the hostname - * @param port the port - * @return the certificate chain for remote endpoint - * @throws UnknownHostException the unknown host exception - * @throws IOException Signals that an I/O exception has occurred. - */ - private X509Certificate[] getCertificateChainForRemoteEndpoint(String hostname, int port) - throws UnknownHostException, IOException { - - System.out.println("Opening connection to " + hostname + ":" + port + ".."); - SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket(hostname, port); - socket.setSoTimeout(10000); - - try { - System.out.println("Starting SSL handshake..."); - socket.startHandshake(); - socket.close(); - System.out.println("\nNo errors, certificate is already trusted"); - System.exit(0); - } catch (SSLException exc) { - System.out.println("\nCaught SSL exception, we are not authorized to access this server yet"); - // e.printStackTrace(System.out); - } - - return savingTrustManager.chain; - - } - - /** - * Dump cert chain info. - * - * @param chain the chain - * @throws NoSuchAlgorithmException the no such algorithm exception - * @throws CertificateEncodingException the certificate encoding exception - * @throws CertificateParsingException the certificate parsing exception - */ - private void dumpCertChainInfo(X509Certificate[] chain) - throws NoSuchAlgorithmException, CertificateEncodingException, CertificateParsingException { - - System.out.println(); - System.out.println("Server sent " + chain.length + " certificate(s):"); - System.out.println(); - - for (int i = 0; i < chain.length; i++) { - X509Certificate cert = chain[i]; - - if (dumpCertDetails) { - System.out.println("Full cert details @ index = " + i + " \n" + cert.toString()); - } - - System.out.println("Subject: " + cert.getSubjectDN()); - System.out.println("Issuer: " + cert.getIssuerDN()); - System.out.println("SubjectAlternativeNames: "); - - /* - * RFC-5280, pg. 38, section 4.2.1.6 ( Subject Alternative Names ) - * - * Finally, the semantics of subject alternative names that include wildcard characters (e.g., - * as a placeholder for a set of names) are not addressed by this specification. Applications - * with specific requirements MAY use such names, but they must define the semantics. - * - * id-ce-subjectAltName OBJECT IDENTIFIER ::= { id-ce 17 } - * - * SubjectAltName ::= GeneralNames - * - * GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName - * - * GeneralName ::= CHOICE { otherName [0] OtherName, rfc822Name [1] IA5String, dNSName [2] - * IA5String, <-- the 2 in the output is a type operand x400Address [3] ORAddress, - * directoryName [4] Name, ediPartyName [5] EDIPartyName, uniformResourceIdentifier [6] - * IA5String, iPAddress [7] OCTET STRING, registeredID [8] OBJECT IDENTIFIER } - * - * OtherName ::= SEQUENCE { type-id OBJECT IDENTIFIER, value [0] EXPLICIT ANY DEFINED BY - * type-id } - * - * EDIPartyName ::= SEQUENCE { nameAssigner [0] DirectoryString OPTIONAL, partyName [1] - * DirectoryString } - * - */ - - Collection> sans = cert.getSubjectAlternativeNames(); - - for (List san : sans) { - - /* - * It seems the structure of the array elements contained within the SAN is: [, - * ]* - * - */ - - int type = ((Integer) san.get(0)).intValue(); - String typeStr = getSanType(type); - String value = (String) san.get(1); - - System.out.println(String.format("\tType:'%s', Value: '%s'.", typeStr, value)); - - } - - } - - } - - /** - * Gets the subject alternative names. - * - * @param cert the cert - * @return the subject alternative names - * @throws CertificateParsingException the certificate parsing exception - */ - private List getSubjectAlternativeNames(X509Certificate cert) - throws CertificateParsingException { - - Collection> sans = cert.getSubjectAlternativeNames(); - List subjectAlternativeNames = new ArrayList(); - - for (List san : sans) { - - /* - * It seems the structure of the array elements contained within the SAN is: [, - * ]* - * - */ - - String value = (String) san.get(1); - subjectAlternativeNames.add(value); - } - - return subjectAlternativeNames; - } - - /** - * Update key store with cert chain. - * - * @param chain the chain - * @throws NoSuchAlgorithmException the no such algorithm exception - * @throws KeyStoreException the key store exception - * @throws CertificateException the certificate exception - * @throws IOException Signals that an I/O exception has occurred. - */ - private void updateKeyStoreWithCertChain(X509Certificate[] chain) - throws NoSuchAlgorithmException, KeyStoreException, CertificateException, IOException { - - for (X509Certificate cert : chain) { - - List sans = getSubjectAlternativeNames(cert); - - for (String san : sans) { - ks.setCertificateEntry(san, cert); - System.out.println( - "Added certificate to keystore '" + keystoreFileName + "' using alias '" + san + "'"); - } - } - - OutputStream out = new FileOutputStream(keystoreFileName); - ks.store(out, keystorePassword.toCharArray()); - out.close(); - - } - - - /** - * The Class SavingTrustManager. - */ - private static class SavingTrustManager implements X509TrustManager { - - private final X509TrustManager tm; - private X509Certificate[] chain; - - /** - * Instantiates a new saving trust manager. - * - * @param tm the tm - */ - SavingTrustManager(X509TrustManager tm) { - this.tm = tm; - } - - @Override - public X509Certificate[] getAcceptedIssuers() { - throw new UnsupportedOperationException(); - } - - /* - * (non-Javadoc) - * - * @see javax.net.ssl.X509TrustManager#checkClientTrusted(java.security.cert.X509Certificate[], - * java.lang.String) - */ - @Override - public void checkClientTrusted(X509Certificate[] chain, String authType) - throws CertificateException { - throw new UnsupportedOperationException(); - } - - /* - * (non-Javadoc) - * - * @see javax.net.ssl.X509TrustManager#checkServerTrusted(java.security.cert.X509Certificate[], - * java.lang.String) - */ - @Override - public void checkServerTrusted(X509Certificate[] chain, String authType) - throws CertificateException { - this.chain = chain; - tm.checkServerTrusted(chain, authType); - } - } - - private static final char[] HEXDIGITS = "0123456789abcdef".toCharArray(); - - /** - * Gets the san type. - * - * @param type the type - * @return the san type - */ - // TODO: convert to enum(int,string) - private String getSanType(int type) { - switch (type) { - case 0: - return "otherName"; - case 1: - return "rfc822Name"; - case 2: - return "dNSName"; - case 3: - return "x400Address"; - case 4: - return "directoryName"; - case 5: - return "ediPartyName"; - case 6: - return "uniformResourceIdentifier"; - case 7: - return "iPAddress"; - case 8: - return "registeredID"; - default: - return "unknownSanType"; - } - } - - - /** - * To hex string. - * - * @param bytes the bytes - * @return the string - */ - private static String toHexString(byte[] bytes) { - StringBuilder sb = new StringBuilder(bytes.length * 3); - for (int b : bytes) { - b &= 0xff; - sb.append(HEXDIGITS[b >> 4]); - sb.append(HEXDIGITS[b & 15]); - sb.append(' '); - } - return sb.toString(); - } - - - - /** - * The main method. - * - * @param args the arguments - * @throws Exception the exception - */ - public static void main(String[] args) throws Exception { - - /* - * Examples: localhost:8440;localhost:8442 d:\1\adhoc_keystore.jks aaiDomain2 false - * localhost:8440;localhost:8442 d:\1\adhoc_keystore.jks aaiDomain2 true - */ - - if (args.length != 4) { - System.out.println("Usage: KeyBuilder <[ip:port];*> " - + " "); - System.exit(1); - } - KeystoreBuilder kb = new KeystoreBuilder(args[0]); - kb.setDumpCertDetails(Boolean.parseBoolean(args[3])); - kb.updateKeystore(args[1], args[2]); - - } -} - - diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java b/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java index ff8d5d8..594b49f 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java @@ -22,13 +22,10 @@ */ package org.onap.aai.sparky.viewandinspect; -import com.fasterxml.jackson.core.JsonProcessingException; - import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.sparky.viewandinspect.EntityTypeAggregation; + /** * The Class EntityTypeAggregation. @@ -37,13 +34,6 @@ public class EntityTypeAggregation { private ConcurrentHashMap counters; - /** - * @param counters the counters to set - */ - public void setCounters(ConcurrentHashMap counters) { - this.counters = counters; - } - /** * Instantiates a new entity type aggregation. */ @@ -64,4 +54,5 @@ public class EntityTypeAggregation { public ConcurrentHashMap getCounters() { return counters; } + } diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java b/src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java new file mode 100644 index 0000000..822b14b --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java @@ -0,0 +1,174 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect; + +import java.security.SecureRandom; +import java.util.concurrent.ExecutorService; + +import org.apache.camel.Exchange; +import org.apache.camel.component.restlet.RestletConstants; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.onap.aai.sparky.viewandinspect.entity.JsonNode; +import org.onap.aai.sparky.viewandinspect.entity.NodeMeta; +import org.onap.aai.sparky.viewandinspect.entity.QueryRequest; +import org.onap.aai.sparky.viewandinspect.services.VisualizationContext; +import org.onap.aai.sparky.viewandinspect.services.VisualizationService; +import org.onap.aai.sparky.viewandinspect.services.VisualizationTransformer; +import org.onap.aai.sparky.viewinspect.sync.ViewInspectSyncController; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.ClientInfo; +import org.restlet.data.MediaType; +import org.restlet.data.Status; + +public class SchemaVisualizationProcessor { + + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(SchemaVisualizationProcessor.class); + + private final VisualizationService visualizationService; + private VisualizationTransformer visualizationTransformer; + private VisualizationContext visualizationContext; + private NodeMeta nodeMeta; + private JsonNode jsonNode; + private ActiveInventoryNode activeInventoryNode; + private final ExecutorService tabularExecutorService; + private final ExecutorService aaiExecutorService; + private final SecureRandom secureRandom; + private ActiveInventoryAdapter aaiAdapter; + private ElasticSearchAdapter esAdapter; + private ElasticSearchEndpointConfig endpointConfig; + private ElasticSearchSchemaConfig schemaConfig; + private ActiveInventoryConfig aaiConfig; + + public SchemaVisualizationProcessor(VisualizationConfigs visualizationConfigs, + OxmModelLoader oxmModelLoader, ViewInspectSyncController viewInspectSynController) + throws Exception { + + this.aaiAdapter = viewInspectSynController.getAaiAdapter(); + this.esAdapter = viewInspectSynController.getElasticSearchAdapter(); + this.endpointConfig = viewInspectSynController.getendpointConfig(); + this.schemaConfig = viewInspectSynController.getschemaConfig(); + + this.visualizationService = new VisualizationService(oxmModelLoader, visualizationConfigs, + aaiAdapter, esAdapter, endpointConfig, schemaConfig); + this.activeInventoryNode = new ActiveInventoryNode(visualizationConfigs); + this.nodeMeta = new NodeMeta(visualizationConfigs); + secureRandom = new SecureRandom(); + this.tabularExecutorService = NodeUtils.createNamedExecutor("TABULAR-WORKER", + visualizationConfigs.getNumOfThreadsToFetchNodeIntegrity(), LOG); + /* + * Fix ActiveInvenotryConfig with properly wired in properties + */ + this.aaiConfig = ActiveInventoryConfig.getConfig(); + this.aaiExecutorService = NodeUtils.createNamedExecutor("SLNC-WORKER", + aaiConfig.getAaiRestConfig().getNumResolverWorkers(), LOG); + + this.visualizationContext = new VisualizationContext(secureRandom.nextLong(), aaiAdapter, + tabularExecutorService, aaiExecutorService, visualizationConfigs); + this.visualizationTransformer = new VisualizationTransformer(visualizationConfigs); + this.jsonNode = new JsonNode(activeInventoryNode, visualizationConfigs); + + } + + protected String generateJsonErrorResponse(String message) { + return String.format("{ \"errorMessage\" : %s }", message); + } + + public void processVisualizationRequest(Exchange exchange) { + + String visualizationPayload = ""; + QueryRequest hashId = null; + OperationResult operationResult = null; + Request request = null; + Response response = null; + Object xTransactionId = null; + Object partnerName = null; + + xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + if (xTransactionId == null) { + xTransactionId = NodeUtils.getRandomTxnId(); + } + partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + partnerName = "Browser"; + } + + request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class); + response = exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class); + + /* + * Disables automatic Apache Camel Restlet component logging which prints out an undesirable log + * entry which includes client (e.g. browser) information + */ + request.setLoggable(false); + + ClientInfo clientInfo = request.getClientInfo(); + MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName, + clientInfo.getAddress() + ":" + clientInfo.getPort()); + + visualizationPayload = exchange.getIn().getBody(String.class); + hashId = visualizationService.analyzeQueryRequestBody(visualizationPayload); + + if (hashId != null) { + + operationResult = visualizationService.buildVisualizationUsingGenericQuery(hashId); + + if (operationResult.getResultCode() == Status.SUCCESS_OK.getCode()) { + + response.setStatus(Status.SUCCESS_OK); + } else { + response.setStatus(Status.SERVER_ERROR_INTERNAL); + LOG.error(AaiUiMsgs.FAILURE_TO_PROCESS_REQUEST, String + .format("Failed to process Visualization Schema Payload = '%s'", visualizationPayload)); + } + + } else { + operationResult = new OperationResult(); + operationResult.setResult(String + .format("Failed to analyze Visualization Schema Payload = '%s'", visualizationPayload)); + response.setStatus(Status.SERVER_ERROR_INTERNAL); + LOG.error(AaiUiMsgs.FAILED_TO_ANALYZE, String + .format("Failed to analyze Visualization Schema Payload = '%s'", visualizationPayload)); + + } + + + response.setEntity(operationResult.getResult(), MediaType.APPLICATION_JSON); + exchange.getOut().setBody(response); + } +} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java b/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java index 77a34da..7050595 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java @@ -45,16 +45,28 @@ public class TierSupportUiConstants { public static String CONFIG_AUTH_LOCATION = CONFIG_HOME + "auth" + FILESEP; public static String HOST = "host"; + public static String IP_ADDRESS = "ipAddress"; public static String PORT = "port"; + public static String HTTP_PORT = "httpPort"; public static String RETRIES = "numRequestRetries"; public static String RESOURCE_VERSION = "resource-version"; public static String URI = "URI"; + public static String AUTHORIZED_USERS_FILE_LOCATION = + DYNAMIC_CONFIG_APP_LOCATION + "authorized-users.config"; public static String USERS_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "users.config"; public static String ROLES_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "roles.config"; public static String PORTAL_AUTHENTICATION_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "portal" + FILESEP + "portal-authentication.properties"; + // Related to data-router properties + public static String DR_URI_SUFFIX = "uriSuffix"; + public static String DR_CERT_NAME = "cert-name"; + public static String DR_KEYSTORE_PASSWORD = "keystore-password"; + public static String DR_KEYSTORE = "keystore"; + public static String DR_CONNECT_TIMEOUT = "connectTimeoutMs"; + public static String DR_READ_TIMEOUT = "readTimeoutMs"; + public static final String ES_SUGGEST_API = "_suggest"; public static final String ES_COUNT_API = "_count"; public static final String ES_SEARCH_API = "_search"; @@ -74,10 +86,7 @@ public class TierSupportUiConstants { public static final String FILTER_MAPPING_FILE_DEFAULT = CONFIG_FILTERS_BASE_LOCATION + "filters" + FILESEP + "aaiui_views.json"; - // JUnit testing synchronizer.properties file - public static String TEST_CONFIG_FILE = - System.getProperty("user.dir") + FILESEP + "bundleconfig-local" + FILESEP + "etc" + FILESEP - + "appprops" + FILESEP + "synchronizer.properties"; + public static final String SUGGESTION_TEXT_SEPARATOR = " -- "; // Injected Attributes public static String URI_ATTR_NAME = "uri"; @@ -88,339 +97,4 @@ public class TierSupportUiConstants { return AJSC_HOME + FILESEP + configFile; } - public static final String getAggregationIndexName(String entityType) { - return "aggregate_" + entityType + "_index"; - } - - /** - * @return the aPP_NAME - */ - public static String getAPP_NAME() { - return APP_NAME; - } - - /** - * @param aPP_NAME the aPP_NAME to set - */ - public static void setAPP_NAME(String aPP_NAME) { - APP_NAME = aPP_NAME; - } - - /** - * @return the cONFIG_HOME - */ - public static String getCONFIG_HOME() { - return CONFIG_HOME; - } - - /** - * @param cONFIG_HOME the cONFIG_HOME to set - */ - public static void setCONFIG_HOME(String cONFIG_HOME) { - CONFIG_HOME = cONFIG_HOME; - } - - /** - * @return the aJSC_HOME - */ - public static String getAJSC_HOME() { - return AJSC_HOME; - } - - /** - * @param aJSC_HOME the aJSC_HOME to set - */ - public static void setAJSC_HOME(String aJSC_HOME) { - AJSC_HOME = aJSC_HOME; - } - - /** - * @return the cONFIG_ROOT_LOCATION - */ - public static String getCONFIG_ROOT_LOCATION() { - return CONFIG_ROOT_LOCATION; - } - - /** - * @param cONFIG_ROOT_LOCATION the cONFIG_ROOT_LOCATION to set - */ - public static void setCONFIG_ROOT_LOCATION(String cONFIG_ROOT_LOCATION) { - CONFIG_ROOT_LOCATION = cONFIG_ROOT_LOCATION; - } - - /** - * @return the sTATIC_CONFIG_APP_LOCATION - */ - public static String getSTATIC_CONFIG_APP_LOCATION() { - return STATIC_CONFIG_APP_LOCATION; - } - - /** - * @param sTATIC_CONFIG_APP_LOCATION the sTATIC_CONFIG_APP_LOCATION to set - */ - public static void setSTATIC_CONFIG_APP_LOCATION(String sTATIC_CONFIG_APP_LOCATION) { - STATIC_CONFIG_APP_LOCATION = sTATIC_CONFIG_APP_LOCATION; - } - - /** - * @return the dYNAMIC_CONFIG_APP_LOCATION - */ - public static String getDYNAMIC_CONFIG_APP_LOCATION() { - return DYNAMIC_CONFIG_APP_LOCATION; - } - - /** - * @param dYNAMIC_CONFIG_APP_LOCATION the dYNAMIC_CONFIG_APP_LOCATION to set - */ - public static void setDYNAMIC_CONFIG_APP_LOCATION(String dYNAMIC_CONFIG_APP_LOCATION) { - DYNAMIC_CONFIG_APP_LOCATION = dYNAMIC_CONFIG_APP_LOCATION; - } - - /** - * @return the cONFIG_OXM_LOCATION - */ - public static String getCONFIG_OXM_LOCATION() { - return CONFIG_OXM_LOCATION; - } - - /** - * @param cONFIG_OXM_LOCATION the cONFIG_OXM_LOCATION to set - */ - public static void setCONFIG_OXM_LOCATION(String cONFIG_OXM_LOCATION) { - CONFIG_OXM_LOCATION = cONFIG_OXM_LOCATION; - } - - /** - * @return the cONFIG_FILTERS_BASE_LOCATION - */ - public static String getCONFIG_FILTERS_BASE_LOCATION() { - return CONFIG_FILTERS_BASE_LOCATION; - } - - /** - * @param cONFIG_FILTERS_BASE_LOCATION the cONFIG_FILTERS_BASE_LOCATION to set - */ - public static void setCONFIG_FILTERS_BASE_LOCATION(String cONFIG_FILTERS_BASE_LOCATION) { - CONFIG_FILTERS_BASE_LOCATION = cONFIG_FILTERS_BASE_LOCATION; - } - - - /** - * @return the cONFIG_AUTH_LOCATION - */ - public static String getCONFIG_AUTH_LOCATION() { - return CONFIG_AUTH_LOCATION; - } - - /** - * @param cONFIG_AUTH_LOCATION the cONFIG_AUTH_LOCATION to set - */ - public static void setCONFIG_AUTH_LOCATION(String cONFIG_AUTH_LOCATION) { - CONFIG_AUTH_LOCATION = cONFIG_AUTH_LOCATION; - } - - /** - * @return the hOST - */ - public static String getHOST() { - return HOST; - } - - /** - * @param hOST the hOST to set - */ - public static void setHOST(String hOST) { - HOST = hOST; - } - - /** - * @return the pORT - */ - public static String getPORT() { - return PORT; - } - - /** - * @param pORT the pORT to set - */ - public static void setPORT(String pORT) { - PORT = pORT; - } - - /** - * @return the rETRIES - */ - public static String getRETRIES() { - return RETRIES; - } - - /** - * @param rETRIES the rETRIES to set - */ - public static void setRETRIES(String rETRIES) { - RETRIES = rETRIES; - } - - /** - * @return the rESOURCE_VERSION - */ - public static String getRESOURCE_VERSION() { - return RESOURCE_VERSION; - } - - /** - * @param rESOURCE_VERSION the rESOURCE_VERSION to set - */ - public static void setRESOURCE_VERSION(String rESOURCE_VERSION) { - RESOURCE_VERSION = rESOURCE_VERSION; - } - - /** - * @return the uRI - */ - public static String getURI() { - return URI; - } - - /** - * @param uRI the uRI to set - */ - public static void setURI(String uRI) { - URI = uRI; - } - - /** - * @return the uSERS_FILE_LOCATION - */ - public static String getUSERS_FILE_LOCATION() { - return USERS_FILE_LOCATION; - } - - /** - * @param uSERS_FILE_LOCATION the uSERS_FILE_LOCATION to set - */ - public static void setUSERS_FILE_LOCATION(String uSERS_FILE_LOCATION) { - USERS_FILE_LOCATION = uSERS_FILE_LOCATION; - } - - /** - * @return the rOLES_FILE_LOCATION - */ - public static String getROLES_FILE_LOCATION() { - return ROLES_FILE_LOCATION; - } - - /** - * @param rOLES_FILE_LOCATION the rOLES_FILE_LOCATION to set - */ - public static void setROLES_FILE_LOCATION(String rOLES_FILE_LOCATION) { - ROLES_FILE_LOCATION = rOLES_FILE_LOCATION; - } - - /** - * @return the pORTAL_AUTHENTICATION_FILE_LOCATION - */ - public static String getPORTAL_AUTHENTICATION_FILE_LOCATION() { - return PORTAL_AUTHENTICATION_FILE_LOCATION; - } - - /** - * @param pORTAL_AUTHENTICATION_FILE_LOCATION the pORTAL_AUTHENTICATION_FILE_LOCATION to set - */ - public static void setPORTAL_AUTHENTICATION_FILE_LOCATION( - String pORTAL_AUTHENTICATION_FILE_LOCATION) { - PORTAL_AUTHENTICATION_FILE_LOCATION = pORTAL_AUTHENTICATION_FILE_LOCATION; - } - - /** - * @return the tEST_CONFIG_FILE - */ - public static String getTEST_CONFIG_FILE() { - return TEST_CONFIG_FILE; - } - - /** - * @param tEST_CONFIG_FILE the tEST_CONFIG_FILE to set - */ - public static void setTEST_CONFIG_FILE(String tEST_CONFIG_FILE) { - TEST_CONFIG_FILE = tEST_CONFIG_FILE; - } - - /** - * @return the uRI_ATTR_NAME - */ - public static String getURI_ATTR_NAME() { - return URI_ATTR_NAME; - } - - /** - * @param uRI_ATTR_NAME the uRI_ATTR_NAME to set - */ - public static void setURI_ATTR_NAME(String uRI_ATTR_NAME) { - URI_ATTR_NAME = uRI_ATTR_NAME; - } - - /** - * @return the filesep - */ - public static String getFilesep() { - return FILESEP; - } - - /** - * @return the esSuggestApi - */ - public static String getEsSuggestApi() { - return ES_SUGGEST_API; - } - - /** - * @return the esCountApi - */ - public static String getEsCountApi() { - return ES_COUNT_API; - } - - /** - * @return the esSearchApi - */ - public static String getEsSearchApi() { - return ES_SEARCH_API; - } - - /** - * @return the entityAutoSuggestIndexNameDefault - */ - public static String getEntityAutoSuggestIndexNameDefault() { - return ENTITY_AUTO_SUGGEST_INDEX_NAME_DEFAULT; - } - - /** - * @return the entityAutoSuggestSettingsFileDefault - */ - public static String getEntityAutoSuggestSettingsFileDefault() { - return ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT; - } - - /** - * @return the entityAutoSuggestMappingsFileDefault - */ - public static String getEntityAutoSuggestMappingsFileDefault() { - return ENTITY_AUTO_SUGGEST_MAPPINGS_FILE_DEFAULT; - } - - /** - * @return the entityDynamicMappingsFileDefault - */ - public static String getEntityDynamicMappingsFileDefault() { - return ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT; - } - - /** - * @return the uriVersionRegexPattern - */ - public static String getUriVersionRegexPattern() { - return URI_VERSION_REGEX_PATTERN; - } - } diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfig.java b/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfig.java deleted file mode 100644 index 77f3d97..0000000 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfig.java +++ /dev/null @@ -1,219 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.viewandinspect.config; - -import java.util.Properties; - -import org.onap.aai.sparky.util.ConfigHelper; - -/** - * The Class VisualizationConfig. - */ -public class VisualizationConfig { - - private int maxSelfLinkTraversalDepth; - - private boolean visualizationDebugEnabled; - - private String aaiEntityNodeDescriptors; - - private String generalNodeClassName; - - private String searchNodeClassName; - - private String selectedSearchedNodeClassName; - - private String entityTypesToSummarize; - private String vnfEntityTypes; - - private boolean makeAllNeighborsBidirectional; - - private static VisualizationConfig instance; - - public static VisualizationConfig getConfig() { - - if (instance == null) { - instance = new VisualizationConfig(); - } - - return instance; - - } - - /** - * Instantiates a new visualization config. - */ - public VisualizationConfig() { - - Properties visualizationProps = - ConfigHelper.loadConfigFromExplicitPath(TierSupportUiConstants.STATIC_CONFIG_APP_LOCATION - + TierSupportUiConstants.FILESEP + "visualization.properties"); - - maxSelfLinkTraversalDepth = - Integer.parseInt(visualizationProps.getProperty("maxSelfLinkTraversalDepth", "2")); - visualizationDebugEnabled = - Boolean.parseBoolean(visualizationProps.getProperty("visualizationDebugEnabled", "false")); - aaiEntityNodeDescriptors = visualizationProps.getProperty("aaiEntityNodeDescriptors", null); - generalNodeClassName = - visualizationProps.getProperty("generalNodeClassName", "unknownClassName"); - searchNodeClassName = - visualizationProps.getProperty("searchedNodeClassName", "unknownClassName"); - selectedSearchedNodeClassName = - visualizationProps.getProperty("selectedSearchedNodeClassName", "unknownClassName"); - - entityTypesToSummarize = visualizationProps.getProperty("entityTypesToSummarize", - "customer,service-instance,complex,pserver,vserver,vnf"); - - vnfEntityTypes = visualizationProps.getProperty("vnfEntityTypes", "generic-vnf,newvce,vce,vpe"); - - makeAllNeighborsBidirectional = Boolean - .parseBoolean(visualizationProps.getProperty("makeAllNeighborsBidirectional", "false")); - - } - - - - /** - * Make all neighbors bidirectional. - * - * @return true, if successful - */ - public boolean makeAllNeighborsBidirectional() { - return makeAllNeighborsBidirectional; - } - - public void setMakeAllNeighborsBidirectional(boolean makeAllNeighborsBidirectional) { - this.makeAllNeighborsBidirectional = makeAllNeighborsBidirectional; - } - - public String getSelectedSearchedNodeClassName() { - return selectedSearchedNodeClassName; - } - - public void setSelectedSearchedNodeClassName(String selectedSearchedNodeClassName) { - this.selectedSearchedNodeClassName = selectedSearchedNodeClassName; - } - - public String getGeneralNodeClassName() { - return generalNodeClassName; - } - - public void setGeneralNodeClassName(String generalNodeClassName) { - this.generalNodeClassName = generalNodeClassName; - } - - public String getSearchNodeClassName() { - return searchNodeClassName; - } - - public void setSearchNodeClassName(String searchNodeClassName) { - this.searchNodeClassName = searchNodeClassName; - } - - public String getAaiEntityNodeDescriptors() { - return aaiEntityNodeDescriptors; - } - - public void setAaiEntityNodeDescriptors(String aaiEntityNodeDescriptors) { - this.aaiEntityNodeDescriptors = aaiEntityNodeDescriptors; - } - - public boolean isVisualizationDebugEnabled() { - return visualizationDebugEnabled; - } - - public void setVisualizationDebugEnabled(boolean visualizationDebugEnabled) { - this.visualizationDebugEnabled = visualizationDebugEnabled; - } - - public void setMaxSelfLinkTraversalDepth(int maxSelfLinkTraversalDepth) { - this.maxSelfLinkTraversalDepth = maxSelfLinkTraversalDepth; - } - - public int getMaxSelfLinkTraversalDepth() { - return maxSelfLinkTraversalDepth; - } - - public String getEntityTypesToSummarize() { - return entityTypesToSummarize; - } - - public void setEntityTypesToSummarize(String entityTypesToSummarize) { - this.entityTypesToSummarize = entityTypesToSummarize; - } - - public String getVnfEntityTypes() { - return vnfEntityTypes; - } - - public void setVnfEntityTypes(String vnfEntityTypes) { - this.vnfEntityTypes = vnfEntityTypes; - } - - /** - * @return the instance - */ - public static VisualizationConfig getInstance() { - return instance; - } - - /** - * @param instance the instance to set - */ - public static void setInstance(VisualizationConfig instance) { - VisualizationConfig.instance = instance; - } - - /** - * @return the makeAllNeighborsBidirectional - */ - public boolean isMakeAllNeighborsBidirectional() { - return makeAllNeighborsBidirectional; - } - - @Override - public String toString() { - return "VisualizationConfig [maxSelfLinkTraversalDepth=" + maxSelfLinkTraversalDepth - + ", visualizationDebugEnabled=" + visualizationDebugEnabled + ", " - + (aaiEntityNodeDescriptors != null - ? "aaiEntityNodeDescriptors=" + aaiEntityNodeDescriptors + ", " : "") - + (generalNodeClassName != null ? "generalNodeClassName=" + generalNodeClassName + ", " - : "") - + (searchNodeClassName != null ? "searchNodeClassName=" + searchNodeClassName + ", " : "") - + (selectedSearchedNodeClassName != null - ? "selectedSearchedNodeClassName=" + selectedSearchedNodeClassName + ", " : "") - + (entityTypesToSummarize != null - ? "entityTypesToSummarize=" + entityTypesToSummarize + ", " : "") - + (vnfEntityTypes != null ? "vnfEntityTypes=" + vnfEntityTypes + ", " : "") - + "makeAllNeighborsBidirectional=" + makeAllNeighborsBidirectional + "]"; - } - - /* - * (non-Javadoc) - * - * @see java.lang.Object#toString() - */ - - - -} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java b/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java new file mode 100644 index 0000000..9fc9030 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java @@ -0,0 +1,169 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.config; + +/** + * The Class VisualizationConfig. + */ +public class VisualizationConfigs { + + private int maxSelfLinkTraversalDepth; + + private boolean visualizationDebugEnabled; + + private String aaiEntityNodeDescriptors; + + private String generalNodeClassName; + + private String searchNodeClassName; + + private String selectedSearchedNodeClassName; + + private int numOfThreadsToFetchNodeIntegrity; + + private String entityTypesToSummarize; + private String vnfEntityTypes; + + private boolean makeAllNeighborsBidirectional; + + /** + * Instantiates a new visualization config. + */ + public VisualizationConfigs() {} + + + + /** + * Make all neighbors bidirectional. + * + * @return true, if successful + */ + public boolean makeAllNeighborsBidirectional() { + return makeAllNeighborsBidirectional; + } + + public void setMakeAllNeighborsBidirectional(boolean makeAllNeighborsBidirectional) { + this.makeAllNeighborsBidirectional = makeAllNeighborsBidirectional; + } + + public String getSelectedSearchedNodeClassName() { + return selectedSearchedNodeClassName; + } + + public void setSelectedSearchedNodeClassName(String selectedSearchedNodeClassName) { + this.selectedSearchedNodeClassName = selectedSearchedNodeClassName; + } + + public String getGeneralNodeClassName() { + return generalNodeClassName; + } + + public void setGeneralNodeClassName(String generalNodeClassName) { + this.generalNodeClassName = generalNodeClassName; + } + + public String getSearchNodeClassName() { + return searchNodeClassName; + } + + public void setSearchNodeClassName(String searchNodeClassName) { + this.searchNodeClassName = searchNodeClassName; + } + + public String getAaiEntityNodeDescriptors() { + return aaiEntityNodeDescriptors; + } + + public void setAaiEntityNodeDescriptors(String aaiEntityNodeDescriptors) { + this.aaiEntityNodeDescriptors = aaiEntityNodeDescriptors; + } + + public boolean isVisualizationDebugEnabled() { + return visualizationDebugEnabled; + } + + public void setVisualizationDebugEnabled(boolean visualizationDebugEnabled) { + this.visualizationDebugEnabled = visualizationDebugEnabled; + } + + public void setMaxSelfLinkTraversalDepth(int maxSelfLinkTraversalDepth) { + this.maxSelfLinkTraversalDepth = maxSelfLinkTraversalDepth; + } + + public int getMaxSelfLinkTraversalDepth() { + return maxSelfLinkTraversalDepth; + } + + public int getNumOfThreadsToFetchNodeIntegrity() { + return numOfThreadsToFetchNodeIntegrity; + } + + public void setNumOfThreadsToFetchNodeIntegrity(int numOfThreadsToFetchNodeIntegrity) { + this.numOfThreadsToFetchNodeIntegrity = numOfThreadsToFetchNodeIntegrity; + } + + public String getEntityTypesToSummarize() { + return entityTypesToSummarize; + } + + public void setEntityTypesToSummarize(String entityTypesToSummarize) { + this.entityTypesToSummarize = entityTypesToSummarize; + } + + public String getVnfEntityTypes() { + return vnfEntityTypes; + } + + public void setVnfEntityTypes(String vnfEntityTypes) { + this.vnfEntityTypes = vnfEntityTypes; + } + + + + @Override + public String toString() { + return "VisualizationConfigs [maxSelfLinkTraversalDepth=" + maxSelfLinkTraversalDepth + + ", visualizationDebugEnabled=" + visualizationDebugEnabled + ", " + + (aaiEntityNodeDescriptors != null + ? "aaiEntityNodeDescriptors=" + aaiEntityNodeDescriptors + ", " : "") + + (generalNodeClassName != null ? "generalNodeClassName=" + generalNodeClassName + ", " + : "") + + (searchNodeClassName != null ? "searchNodeClassName=" + searchNodeClassName + ", " : "") + + (selectedSearchedNodeClassName != null + ? "selectedSearchedNodeClassName=" + selectedSearchedNodeClassName + ", " : "") + + "numOfThreadsToFetchNodeIntegrity=" + numOfThreadsToFetchNodeIntegrity + ", " + + (entityTypesToSummarize != null + ? "entityTypesToSummarize=" + entityTypesToSummarize + ", " : "") + + (vnfEntityTypes != null ? "vnfEntityTypes=" + vnfEntityTypes + ", " : "") + + "makeAllNeighborsBidirectional=" + makeAllNeighborsBidirectional + "]"; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + + + +} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java index d87aad8..8d74d68 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java @@ -36,14 +36,15 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.dal.rest.OperationResult; import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.viewandinspect.config.VisualizationConfig; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingAction; import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -70,7 +71,6 @@ public class ActiveInventoryNode { private int nodeDepth; private OperationResult opResult; - private boolean processingErrorOccurred; private List errorCauses; private boolean selflinkRetrievalFailure; @@ -88,14 +88,16 @@ public class ActiveInventoryNode { private boolean selfLinkDeterminationPending; private AtomicBoolean selfLinkProcessed; + private AtomicBoolean nodeIntegrityProcessed; private OxmModelLoader oxmModelLoader; - private VisualizationConfig visualizationConfig; + private VisualizationConfigs visualizationConfigs; private String entityType; private String primaryKeyName; private String primaryKeyValue; + private boolean nodeValidated; private boolean nodeIssue; private boolean ignoredByFilter; @@ -106,19 +108,12 @@ public class ActiveInventoryNode { private ObjectMapper mapper; - /** - * Instantiates a new active inventory node. - */ - public ActiveInventoryNode() { - this(null); - } - /** * Instantiates a new active inventory node. * * @param key the key */ - public ActiveInventoryNode(String key) { + public ActiveInventoryNode(VisualizationConfigs visualizationConfigs) { this.nodeId = null; this.entityType = null; this.selfLink = null; @@ -127,13 +122,15 @@ public class ActiveInventoryNode { this.errorCauses = new ArrayList(); this.selflinkRetrievalFailure = false; this.nodeIssue = false; + this.nodeValidated = false; this.state = NodeProcessingState.INIT; this.selfLinkPendingResolve = false; this.selfLinkDeterminationPending = false; selfLinkProcessed = new AtomicBoolean(Boolean.FALSE); + nodeIntegrityProcessed = new AtomicBoolean(Boolean.FALSE); oxmModelLoader = null; - visualizationConfig = null; + this.visualizationConfigs = visualizationConfigs; isRootNode = false; inboundNeighbors = new ConcurrentLinkedDeque(); @@ -165,7 +162,7 @@ public class ActiveInventoryNode { public void addQueryParams(Collection params) { - if (params != null && !params.isEmpty()) { + if (params != null & params.size() > 0) { for (String param : params) { addQueryParam(param); @@ -215,8 +212,8 @@ public class ActiveInventoryNode { * * @return the visualization config */ - public VisualizationConfig getvisualizationConfig() { - return visualizationConfig; + public VisualizationConfigs getvisualizationConfigs() { + return visualizationConfigs; } public int getNodeDepth() { @@ -232,8 +229,8 @@ public class ActiveInventoryNode { * * @param visualizationConfig the new visualization config */ - public void setvisualizationConfig(VisualizationConfig visualizationConfig) { - this.visualizationConfig = visualizationConfig; + public void setvisualizationConfig(VisualizationConfigs visualizationConfigs) { + this.visualizationConfigs = visualizationConfigs; } public OxmModelLoader getOxmModelLoader() { @@ -252,6 +249,14 @@ public class ActiveInventoryNode { this.primaryKeyValue = primaryKeyValue; } + public boolean isNodeValidated() { + return nodeValidated; + } + + public void setNodeValidated(boolean nodeValidated) { + this.nodeValidated = nodeValidated; + } + public boolean isNodeIssue() { return nodeIssue; } @@ -338,7 +343,7 @@ public class ActiveInventoryNode { } public boolean isAtMaxDepth() { - return (nodeDepth >= VisualizationConfig.getConfig().getMaxSelfLinkTraversalDepth()); + return (nodeDepth >= this.visualizationConfigs.getMaxSelfLinkTraversalDepth()); } public ConcurrentLinkedDeque getInboundNeighbors() { @@ -443,8 +448,16 @@ public class ActiveInventoryNode { this.selfLinkProcessed.set(selfLinkProcessed); } + public boolean getNodeIntegrityProcessed() { + return nodeIntegrityProcessed.get(); + } + + public void setNodeIntegrityProcessed(boolean nodeIntegrityProcessed) { + this.nodeIntegrityProcessed.set(nodeIntegrityProcessed); + } + public boolean isDirectSelfLink() { - // https://:8443/aai/v8/resources/id/2458124400 + // https://aai-int1.test.att.com:8443/aai/v8/resources/id/2458124400 return isDirectSelfLink(this.selfLink); } @@ -455,7 +468,7 @@ public class ActiveInventoryNode { * @return true, if is direct self link */ public static boolean isDirectSelfLink(String link) { - // https://:8443/aai/v8/resources/id/2458124400 + // https://aai-int1.test.att.com:8443/aai/v8/resources/id/2458124400 if (link == null) { return false; @@ -625,7 +638,7 @@ public class ActiveInventoryNode { * probably more likely just for array node types, but we'll see. */ - if (oxmModelLoader.getEntityDescriptor(fieldName) == null) { + if (OxmEntityLookup.getInstance().getEntityDescriptors().get(fieldName) == null) { /* * this is no an entity type as far as we can tell, so we can add it to our property * set. @@ -645,7 +658,8 @@ public class ActiveInventoryNode { * complex group or relationship. */ - if (oxmModelLoader.getEntityDescriptor(field.getKey()) == null) { + if (OxmEntityLookup.getInstance().getEntityDescriptors() + .get(field.getKey()) == null) { /* * this is no an entity type as far as we can tell, so we can add it to our property * set. diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java index e29f6df..69971c5 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java @@ -22,10 +22,6 @@ */ package org.onap.aai.sparky.viewandinspect.entity; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; - import java.util.ArrayList; import java.util.List; @@ -91,67 +87,5 @@ public class D3VisualizationOutput { this.inlineMessage = inlineMessage; } - /** - * @return the nodes - */ - public List getNodes() { - return nodes; - } - - /** - * @param nodes the nodes to set - */ - public void setNodes(List nodes) { - this.nodes = nodes; - } - - /** - * @return the links - */ - public List getLinks() { - return links; - } - - /** - * @param links the links to set - */ - public void setLinks(List links) { - this.links = links; - } - - /** - * The main method. - * - * @param args the arguments - * @throws JsonProcessingException the json processing exception - */ - public static final void main(String[] args) throws JsonProcessingException { - - ActiveInventoryNode pserverAin = new ActiveInventoryNode(); - pserverAin.setNodeId("pserver.76786asd87asgd"); - JsonNode pserver = new JsonNode(pserverAin); - - List nodes = new ArrayList(); - nodes.add(pserver); - - JsonNodeLink l1 = new JsonNodeLink(); - l1.setSource(pserverAin.getNodeId()); - l1.setTarget(pserverAin.getNodeId()); - l1.setId(l1.getSource() + "_" + l1.getTarget()); - - List links = new ArrayList(); - links.add(l1); - - D3VisualizationOutput output = new D3VisualizationOutput(); - output.addNodes(nodes); - output.addLinks(links); - - - ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); - String json = ow.writeValueAsString(output); - - System.out.println(json); - - } } diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java new file mode 100644 index 0000000..678a00f --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java @@ -0,0 +1,56 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +public class GraphRequest { + + private String hashId; + private boolean includeGraphMeta; + + public GraphRequest() { + + } + + public String getHashId() { + return hashId; + } + + public void setHashId(String hashId) { + this.hashId = hashId; + } + + public boolean isIncludeGraphMeta() { + return includeGraphMeta; + } + + public void setIncludeGraphMeta(boolean includeGraphMeta) { + this.includeGraphMeta = includeGraphMeta; + } + + @Override + public String toString() { + return "QueryRequest [" + (hashId != null ? "hashId=" + hashId + ", " : "") + + "includeGraphMeta=" + includeGraphMeta + "]"; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java index 3f9d0f2..4d1c458 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java @@ -22,13 +22,14 @@ */ package org.onap.aai.sparky.viewandinspect.entity; -import com.fasterxml.jackson.annotation.JsonIgnore; - import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.apache.log4j.Logger; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +import com.fasterxml.jackson.annotation.JsonIgnore; /* * We can use annotations to differentiate between intermediate data we use to build the node, and @@ -77,12 +78,15 @@ public class JsonNode { @JsonIgnore private static final Logger LOG = Logger.getLogger(JsonNode.class); + private VisualizationConfigs visualizationConfigs; + + /** * Instantiates a new json node. * * @param ain the ain */ - public JsonNode(ActiveInventoryNode ain) { + public JsonNode(ActiveInventoryNode ain, VisualizationConfigs visualizationConfigs) { this.resourceKey = ain.getNodeId(); this.itemProperties = ain.getProperties(); this.setItemType(ain.getEntityType()); @@ -90,6 +94,7 @@ public class JsonNode { this.setItemNameValue(ain.getPrimaryKeyValue()); this.setId(ain.getNodeId()); this.isRootNode = ain.isRootNode(); + this.visualizationConfigs = visualizationConfigs; if (LOG.isDebugEnabled()) { LOG.debug("---"); @@ -100,9 +105,10 @@ public class JsonNode { inboundNeighbors = ain.getInboundNeighbors(); outboundNeighbors = ain.getOutboundNeighbors(); - nodeMeta = new NodeMeta(); + nodeMeta = new NodeMeta(this.visualizationConfigs); nodeMeta.setNodeIssue(ain.isNodeIssue()); + nodeMeta.setNodeValidated(ain.isNodeValidated()); nodeMeta.setNodeDepth(ain.getNodeDepth()); nodeMeta.setNumInboundNeighbors(ain.getInboundNeighbors().size()); @@ -177,55 +183,11 @@ public class JsonNode { return isRootNode; } - /** - * @return the inboundNeighbors - */ - public Collection getInboundNeighbors() { - return inboundNeighbors; - } - - /** - * @param inboundNeighbors the inboundNeighbors to set - */ - public void setInboundNeighbors(Collection inboundNeighbors) { - this.inboundNeighbors = inboundNeighbors; - } - - /** - * @return the outboundNeighbors - */ - public Collection getOutboundNeighbors() { - return outboundNeighbors; - } - - /** - * @param outboundNeighbors the outboundNeighbors to set + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() */ - public void setOutboundNeighbors(Collection outboundNeighbors) { - this.outboundNeighbors = outboundNeighbors; - } - - /** - * @return the log - */ - public static Logger getLog() { - return LOG; - } - - /** - * @param itemProperties the itemProperties to set - */ - public void setItemProperties(Map itemProperties) { - this.itemProperties = itemProperties; - } - - /** - * @param isRootNode the isRootNode to set - */ - public void setRootNode(boolean isRootNode) { - this.isRootNode = isRootNode; - } - @Override public String toString() { return "JsonNode [" + (id != null ? "id=" + id + ", " : "") @@ -233,8 +195,8 @@ public class JsonNode { + (itemNameKey != null ? "itemNameKey=" + itemNameKey + ", " : "") + (itemNameValue != null ? "itemNameValue=" + itemNameValue + ", " : "") + (itemProperties != null ? "itemProperties=" + itemProperties + ", " : "") - + (nodeMeta != null ? "nodeMeta=" + nodeMeta + ", " : "") + "isRootNode=" + isRootNode - + ", " + (resourceKey != null ? "resourceKey=" + resourceKey + ", " : "") + + (nodeMeta != null ? "nodeMeta=" + nodeMeta + ", " : "") + + (resourceKey != null ? "resourceKey=" + resourceKey + ", " : "") + (inboundNeighbors != null ? "inboundNeighbors=" + inboundNeighbors + ", " : "") + (outboundNeighbors != null ? "outboundNeighbors=" + outboundNeighbors : "") + "]"; } diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java index c55f838..26a027f 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java @@ -22,7 +22,7 @@ */ package org.onap.aai.sparky.viewandinspect.entity; -import org.onap.aai.sparky.viewandinspect.config.VisualizationConfig; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState; /** @@ -32,7 +32,6 @@ public class NodeMeta { private String className; - private boolean isEnrichableNode; private boolean isSearchTarget; private NodeDebug nodeDebug; @@ -51,14 +50,19 @@ public class NodeMeta { private NodeProcessingState processingState; + private VisualizationConfigs visualizationConfigs; + + + /** * Instantiates a new node meta. */ - public NodeMeta() { + public NodeMeta(VisualizationConfigs visualizationConfigs) { this.isSearchTarget = false; - this.isEnrichableNode = false; + this.visualizationConfigs = visualizationConfigs; + - if (VisualizationConfig.getConfig().isVisualizationDebugEnabled()) { + if (this.visualizationConfigs.isVisualizationDebugEnabled()) { nodeDebug = new NodeDebug(); } this.numInboundNeighbors = 0; @@ -166,10 +170,6 @@ public class NodeMeta { return selfLinkResponseTimeInMs; } - public boolean isEnrichableNode() { - return isEnrichableNode; - } - public boolean isNodeIssue() { return nodeIssue; } @@ -186,10 +186,6 @@ public class NodeMeta { this.className = className; } - public void setEnrichableNode(boolean isEnrichableNode) { - this.isEnrichableNode = isEnrichableNode; - } - public void setNodeIssue(boolean nodeIssue) { this.nodeIssue = nodeIssue; } diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java index ca55f09..22bea15 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java @@ -22,7 +22,7 @@ */ package org.onap.aai.sparky.viewandinspect.entity; -import org.onap.aai.sparky.dal.rest.OperationResult; +import org.onap.aai.restclient.client.OperationResult; /** * The Class NodeProcessingTransaction. diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QuerySearchEntity.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QuerySearchEntity.java deleted file mode 100644 index 222a2f7..0000000 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QuerySearchEntity.java +++ /dev/null @@ -1,72 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.viewandinspect.entity; - -import com.fasterxml.jackson.annotation.JsonIgnore; - -/** - * The Class ViewAndInspectSearchRequest. - */ -public class QuerySearchEntity { - - private static final String DEFAULT_MAX_RESULTS = "10"; - public String maxResults; - - public String queryStr; - - /** - * Instantiates a new view and inspect search request. - */ - public QuerySearchEntity() { - maxResults = DEFAULT_MAX_RESULTS; - queryStr = null; - } - - public String getMaxResults() { - return maxResults; - } - - public void setMaxResults(String maxResults) { - this.maxResults = maxResults; - } - - public String getQueryStr() { - return queryStr; - } - - public void setQueryStr(String queryStr) { - this.queryStr = queryStr; - } - - @JsonIgnore - public String[] getSearchTerms() { - - if (queryStr == null) { - return null; - } - - return queryStr.split(" "); - - } - -} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java index 7e5519c..135ddcc 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java @@ -22,10 +22,10 @@ */ package org.onap.aai.sparky.viewandinspect.entity; -import com.fasterxml.jackson.annotation.JsonProperty; - import java.util.Arrays; +import com.fasterxml.jackson.annotation.JsonProperty; + /** * The Class Relationship. */ diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java index 8dd61d4..d758543 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java @@ -22,10 +22,10 @@ */ package org.onap.aai.sparky.viewandinspect.entity; -import com.fasterxml.jackson.annotation.JsonProperty; - import java.util.Arrays; +import com.fasterxml.jackson.annotation.JsonProperty; + /** * The Class RelationshipList. */ @@ -42,20 +42,6 @@ public class RelationshipList { this.relationship = relationship; } - /** - * @return the relationship - */ - public Relationship[] getRelationship() { - return relationship; - } - - /** - * @param relationship the relationship to set - */ - public void setRelationship(Relationship[] relationship) { - this.relationship = relationship; - } - /* * (non-Javadoc) * diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchResponse.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchResponse.java deleted file mode 100644 index 7daf471..0000000 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchResponse.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.viewandinspect.entity; - -import java.util.ArrayList; -import java.util.List; - -import org.onap.aai.sparky.suggestivesearch.SuggestionEntity; - -/** - * The Class SearchResponse. - */ -public class SearchResponse { - - private long processingTimeInMs; - private int totalFound; - - private List suggestions; - - /** - * Instantiates a new search response. - */ - public SearchResponse() { - this.suggestions = new ArrayList(); - this.processingTimeInMs = 0; - this.totalFound = 0; - } - - public long getProcessingTimeInMs() { - return processingTimeInMs; - } - - public void setProcessingTimeInMs(long processingTimeInMs) { - this.processingTimeInMs = processingTimeInMs; - } - - public int getTotalFound() { - return totalFound; - } - - public void setTotalFound(int totalFound) { - this.totalFound = totalFound; - } - - public List getSuggestions() { - return suggestions; - } - - public void setSuggestions(List suggestions) { - this.suggestions = suggestions; - } - - /** - * Adds the entity entry. - * - * @param suggestionEntry that will be converted to JSON - */ - public void addSuggestion(SuggestionEntity suggestionEntity) { - suggestions.add(suggestionEntity); - } - - /** - * Increments the total number of hits for this SearchResponse by the value passed in. - * - * @param additionalCount - Count to increment the total found - */ - public void addToTotalFound(int additionalCount) { - totalFound += additionalCount; - } -} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java new file mode 100644 index 0000000..bed2602 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java @@ -0,0 +1,115 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.onap.aai.sparky.sync.entity.SearchableEntity; + +import java.util.Set; + +public class SearchableEntityList { + + private List entities; + + public SearchableEntityList() { + entities = new ArrayList(); + } + + public List getEntities() { + return entities; + } + + public void setEntities(List entities) { + this.entities = entities; + } + + public void addEntity(SearchableEntity entity) { + + if (!entities.contains(entity)) { + entities.add(entity); + } + + } + + protected static SearchableEntity buildEntity(String entityType, String pkeyValue, String link, + Map searchTags) { + + SearchableEntity se = new SearchableEntity(); + + se.setEntityType(entityType); + se.setEntityPrimaryKeyValue(pkeyValue); + se.setLink(link); + + if (searchTags != null) { + + Set> entrySet = searchTags.entrySet(); + + for (Entry entry : entrySet) { + se.addSearchTagWithKey(entry.getKey(), entry.getValue()); + } + } + + se.deriveFields(); + + return se; + + } + + protected static Map getSearchTagMap(String... tags) { + + HashMap dataMap = new HashMap(); + + if (tags != null && tags.length >= 2) { + + int numTags = tags.length; + int index = 0; + + while (index < numTags) { + + if (index + 1 < numTags) { + // we have enough parameters for the current set + dataMap.put(tags[index], tags[index + 1]); + index += 2; + } else { + break; + } + } + + } + + return dataMap; + + + } + + @Override + public String toString() { + return "SearchableEntityList [" + (entities != null ? "entities=" + entities : "") + "]"; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java index 21af9cf..204b930 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java @@ -22,7 +22,7 @@ */ package org.onap.aai.sparky.viewandinspect.entity; -import org.onap.aai.sparky.dal.rest.OperationResult; +import org.onap.aai.restclient.client.OperationResult; public class SelfLinkDeterminationTransaction { @@ -33,7 +33,6 @@ public class SelfLinkDeterminationTransaction { private OperationResult opResult; - public String getParentNodeId() { return parentNodeId; } diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Violations.java b/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Violations.java deleted file mode 100644 index 4968de4..0000000 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Violations.java +++ /dev/null @@ -1,125 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.viewandinspect.entity; - -import com.att.aft.dme2.internal.jackson.annotate.JsonProperty; - -/** - * The Class Violations. - */ -public class Violations { - - private String severity; - - private String category; - - private String type; - - private String timestamp; - - private String details; - - @JsonProperty("error-message") - private String errorMessage; - - /** - * Instantiates a new violations. - * - * @param severity the severity - * @param category the category - * @param type the type - * @param timestamp the timestamp - * @param errorMessage the error message - */ - public Violations(String severity, String category, String type, String timestamp, - String errorMessage) { - this.severity = severity; - this.category = category; - this.type = type; - this.timestamp = timestamp; - this.errorMessage = errorMessage; - } - - public String getSeverity() { - return severity; - } - - public void setSeverity(String severity) { - this.severity = severity; - } - - public String getCategory() { - return category; - } - - public void setCategory(String category) { - this.category = category; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - - public String getTimestamp() { - return timestamp; - } - - public void setTimestamp(String timestamp) { - this.timestamp = timestamp; - } - - /* - * public Map getDetails() { return details; } - * - * public void setDetails(Map details) { this.details = details; } - */ - - public String getErrorMessage() { - return errorMessage; - } - - public void setErrorMessage(String errorMessage) { - this.errorMessage = errorMessage; - } - - /** - * @return the details - */ - public String getDetails() { - return details; - } - - /** - * @param details the details to set - */ - public void setDetails(String details) { - this.details = details; - } - - -} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java b/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java index 2550ed7..b7038bf 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java @@ -28,3 +28,4 @@ package org.onap.aai.sparky.viewandinspect.enumeration; public enum NodeProcessingAction { SELF_LINK_SET, NEW_NODE_PROCESSED, SELF_LINK_RESOLVE_ERROR, SELF_LINK_DETERMINATION_ERROR, SELF_LINK_RESOLVE_OK, SELF_LINK_RESPONSE_PARSE_ERROR, SELF_LINK_RESPONSE_PARSE_OK, NEIGHBORS_PROCESSED_ERROR, NEIGHBORS_PROCESSED_OK, COMPLEX_ATTRIBUTE_GROUP_PARSE_ERROR, COMPLEX_ATTRIBUTE_GROUP_PARSE_OK, NODE_IDENTITY_ERROR, UNEXPECTED_STATE_TRANSITION } + diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java b/src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java new file mode 100644 index 0000000..5101c28 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java @@ -0,0 +1,440 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.search; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.common.search.CommonSearchSuggestion; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; +import org.onap.aai.sparky.dal.sas.config.SearchServiceConfig; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.api.SearchProvider; +import org.onap.aai.sparky.search.config.SuggestionConfig; +import org.onap.aai.sparky.search.entity.QuerySearchEntity; +import org.onap.aai.sparky.search.entity.SearchSuggestion; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; + +public class ViewInspectSearchProvider implements SearchProvider { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ViewInspectSearchProvider.class); + + private SearchServiceConfig sasConfig = null; + private SearchAdapter search = null; + private OxmModelLoader oxmModelLoader; + private String additionalSearchSuggestionText; + + private static final String KEY_SEARCH_RESULT = "searchResult"; + private static final String KEY_HITS = "hits"; + private static final String KEY_DOCUMENT = "document"; + private static final String KEY_CONTENT = "content"; + + private static final String VI_SUGGESTION_ROUTE = "schema"; // TODO -> Read route from + // suggestive-search.properties + // instead of hard coding + + private static final String KEY_SEARCH_TAG_IDS = "searchTagIDs"; + private static final String KEY_SEARCH_TAGS = "searchTags"; + private static final String KEY_LINK = "link"; + private static final String KEY_ENTITY_TYPE = "entityType"; + private static final String VALUE_QUERY = "query"; + + public ViewInspectSearchProvider(OxmModelLoader oxmModelLoader) throws Exception { + + sasConfig = SearchServiceConfig.getConfig(); + search = new SearchAdapter(); + suggestionConfig = SuggestionConfig.getConfig(); + this.oxmModelLoader = oxmModelLoader; + additionalSearchSuggestionText = null; + + } + + @Override + public List search(QuerySearchEntity queryRequest) { + + List suggestionEntityList = new ArrayList(); + + + /* + * Based on the configured stop words, we need to strip any matched stop-words ( case + * insensitively ) from the query string, before hitting elastic to prevent the words from being + * used against the elastic view-and-inspect index. Another alternative to this approach would + * be to define stop words on the elastic search index configuration for the + * entity-search-index, but but that may be more complicated / more risky than just a simple bug + * fix, but it's something we should think about for the future. + */ + + try { + final String queryStringWithoutStopWords = + stripStopWordsFromQuery(queryRequest.getQueryStr()); + + final String fullUrlStr = getSasFullUrl(sasConfig.getIndexName(), VALUE_QUERY, + sasConfig.getIpAddress(), sasConfig.getHttpPort(), sasConfig.getVersion()); + + String postBody = String.format(VIUI_SEARCH_TEMPLATE, + Integer.parseInt(queryRequest.getMaxResults()), queryStringWithoutStopWords); + + OperationResult opResult = search.doPost(fullUrlStr, postBody, "application/json"); + if (opResult.getResultCode() == 200) { + suggestionEntityList = + generateSuggestionsForSearchResponse(opResult.getResult(), queryRequest.getQueryStr()); + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, + "View and inspect query failed with error = " + exc.getMessage()); + } + return suggestionEntityList; + + + } + + public String getAdditionalSearchSuggestionText() { + return additionalSearchSuggestionText; + } + + public void setAdditionalSearchSuggestionText(String additionalSearchSuggestionText) { + this.additionalSearchSuggestionText = additionalSearchSuggestionText; + } + + /** + * Get Full URL for search + * + * @param api the api + * @param indexName + * @return the full url + */ + private String getSasFullUrl(String indexName, String type, String ipAddress, String port, + String version) { + + return String.format("https://%s:%s/services/search-data-service/%s/search/indexes/%s/%s", + ipAddress, port, version, indexName, type); + } + + + + /** + * Builds the search response. + * + * @param operationResult The Elasticsearch query result + * @param queryStr The string the user typed into the search bar + * @return A list of search suggestions and corresponding UI filter values + */ + private List generateSuggestionsForSearchResponse(String operationResult, + String queryStr) { + + + if (operationResult == null || operationResult.length() == 0) { + return null; + } + + ObjectMapper mapper = new ObjectMapper(); + JsonNode rootNode = null; + List suggestionEntityList = new ArrayList(); + try { + rootNode = mapper.readTree(operationResult); + + JsonNode hitsNode = rootNode.get(KEY_SEARCH_RESULT); + + + + // Check if there are hits that are coming back + if (hitsNode.has(KEY_HITS)) { + ArrayNode hitsArray = (ArrayNode) hitsNode.get(KEY_HITS); + + /* + * next we iterate over the values in the hit array elements + */ + + Iterator nodeIterator = hitsArray.elements(); + JsonNode entityNode = null; + CommonSearchSuggestion suggestionEntity = null; + JsonNode sourceNode = null; + while (nodeIterator.hasNext()) { + entityNode = nodeIterator.next(); + sourceNode = entityNode.get(KEY_DOCUMENT).get(KEY_CONTENT); + + // do the point transformation as we build the response? + suggestionEntity = new CommonSearchSuggestion(); + suggestionEntity.setRoute(VI_SUGGESTION_ROUTE); + + /* + * This is where we probably want to annotate the search tags because we also have access + * to the seachTagIds + */ + + String searchTagIds = getValueFromNode(sourceNode, KEY_SEARCH_TAG_IDS); + String searchTags = getValueFromNode(sourceNode, KEY_SEARCH_TAGS); + String entityType = getValueFromNode(sourceNode, KEY_ENTITY_TYPE); + String link = getValueFromNode(sourceNode, KEY_LINK); + + if (link != null) { + suggestionEntity.setHashId(NodeUtils.generateUniqueShaDigest(link)); + } + + try { + suggestionEntity + .setText(annotateSearchTags(searchTags, searchTagIds, entityType, queryStr)); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), + exc.getLocalizedMessage()); + // at least send back the un-annotated search tags + suggestionEntity.setText(searchTags); + } + + if (getAdditionalSearchSuggestionText() != null) { + String suggestionText = suggestionEntity.getText(); + suggestionText += TierSupportUiConstants.SUGGESTION_TEXT_SEPARATOR + + getAdditionalSearchSuggestionText(); + suggestionEntity.setText(suggestionText); + } + + if (searchTags != null) { + suggestionEntityList.add(suggestionEntity); + } + + } + } + } catch (IOException exc) { + LOG.warn(AaiUiMsgs.SEARCH_RESPONSE_BUILDING_EXCEPTION, exc.getLocalizedMessage()); + } + return suggestionEntityList; + } + + + + /** + * The current format of an UI-dropdown-item is like: "search-terms entityType att1=attr1_val". + * Example, for pserver: search-terms pserver hostname=djmAG-72060, + * pserver-name2=example-pserver-name2-val-17254, pserver-id=example-pserver-id-val-17254, + * ipv4-oam-address=example-ipv4-oam-address-val-17254 SearchController.js parses the above + * format. So if you are modifying the parsing below, please update SearchController.js as well. + * + * @param searchTags the search tags + * @param searchTagIds the search tag ids + * @param entityType the entity type + * @param queryStr the query str + * @return the string + */ + + private String annotateSearchTags(String searchTags, String searchTagIds, String entityType, + String queryStr) { + + if (searchTags == null || searchTagIds == null) { + String valueOfSearchTags = String.valueOf(searchTags); + String valueOfSearchTagIds = String.valueOf(searchTagIds); + + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, "See error", + "Search tags = " + valueOfSearchTags + " and Seach tag IDs = " + valueOfSearchTagIds); + return searchTags; + } + + if (entityType == null) { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), "EntityType is null"); + return searchTags; + } + + if (queryStr == null) { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), + "Query string is null"); + return searchTags; + } + + /* + * The ElasticSearch analyzer has already applied the lowercase filter, so we don't have to + * covert them again + */ + String[] searchTagsArray = searchTags.split(";"); + String[] searchTagIdsArray = searchTagIds.split(";"); + + // specifically apply lower case to the the query terms to make matching + // simpler + String[] queryTerms = queryStr.toLowerCase().split(" "); + + OxmEntityDescriptor desc = OxmEntityLookup.getInstance().getEntityDescriptors().get(entityType); + + if (desc == null) { + LOG.error(AaiUiMsgs.ENTITY_NOT_FOUND_IN_OXM, entityType.toString()); + return searchTags; + } + + String primaryKeyName = NodeUtils.concatArray(desc.getPrimaryKeyAttributeNames(), "/"); + String primaryKeyValue = null; + + /* + * For each used attribute, get the fieldName for the attribute index and transform the search + * tag into t1,t2,t3 => h1=t1, h2=t2, h3=t3; + */ + StringBuilder searchTagsBuilder = new StringBuilder(128); + searchTagsBuilder.append(entityType); + + String primaryKeyConjunctionValue = null; + boolean queryTermsMatchedSearchTags = false; + + if (searchTagsArray.length == searchTagIdsArray.length) { + for (int i = 0; i < searchTagsArray.length; i++) { + String searchTagAttributeId = searchTagIdsArray[i]; + String searchTagAttributeValue = searchTagsArray[i]; + + // Find the concat conjunction + Map pairConjunctionList = suggestionConfig.getPairingList(); + + String suggConjunction = null; + if (pairConjunctionList.get(searchTagAttributeId) != null) { + suggConjunction = pairConjunctionList.get(searchTagAttributeId); + } else { + suggConjunction = suggestionConfig.getDefaultPairingValue(); + } + + if (primaryKeyName.equals(searchTagAttributeId)) { + primaryKeyValue = searchTagAttributeValue; + primaryKeyConjunctionValue = suggConjunction; + } + + if (queryTermsMatchSearchTag(queryTerms, searchTagAttributeValue)) { + searchTagsBuilder.append(" " + suggConjunction + " " + searchTagAttributeValue); + queryTermsMatchedSearchTags = true; + } + } + } else { + String errorMessage = + "Search tags length did not match search tag ID length for entity type " + entityType; + LOG.error(AaiUiMsgs.ENTITY_SYNC_SEARCH_TAG_ANNOTATION_FAILED, errorMessage); + } + + + + /* + * if none of the user query terms matched the index entity search tags then we should still tag + * the matched entity with a conjunction set to at least it's entity primary key value to + * discriminate between the entities of the same type in the search results displayed in the UI + * search bar results + */ + + if (!queryTermsMatchedSearchTags) { + + if (primaryKeyValue != null && primaryKeyConjunctionValue != null) { + searchTagsBuilder.append(" " + primaryKeyConjunctionValue + " " + primaryKeyValue); + } else { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, "See error", + "Could not annotate user query terms " + queryStr + + " from available entity search tags = " + searchTags); + return searchTags; + } + + } + + return searchTagsBuilder.toString(); + + } + + /** + * Query terms match search tag. + * + * @param queryTerms the query terms + * @param searchTag the search tag + * @return true, if successful @return. + */ + private boolean queryTermsMatchSearchTag(String[] queryTerms, String searchTag) { + + if (queryTerms == null || queryTerms.length == 0 || searchTag == null) { + return false; + } + + for (String queryTerm : queryTerms) { + if (searchTag.toLowerCase().contains(queryTerm.toLowerCase())) { + return true; + } + } + + return false; + + } + + /** + * Gets the value from node. + * + * @param node the node + * @param fieldName the field name + * @return the value from node + */ + private String getValueFromNode(JsonNode node, String fieldName) { + + if (node == null || fieldName == null) { + return null; + } + + JsonNode valueNode = node.get(fieldName); + + if (valueNode != null) { + return valueNode.asText(); + } + + return null; + + } + + private static final String VIUI_SEARCH_TEMPLATE = + "{ " + "\"results-start\": 0," + "\"results-size\": %d," + "\"queries\": [{" + "\"must\": {" + + "\"match\": {" + "\"field\": \"entityType searchTags crossEntityReferenceValues\"," + + "\"value\": \"%s\"," + "\"operator\": \"and\", " + + "\"analyzer\": \"whitespace_analyzer\"" + "}" + "}" + "}]" + "}"; + + private SuggestionConfig suggestionConfig = null; + + /** + * @param queryStr - space separate query search terms + * @return - query string with stop-words removed + */ + private String stripStopWordsFromQuery(String queryStr) { + + if (queryStr == null) { + return queryStr; + } + + Collection stopWords = suggestionConfig.getStopWords(); + ArrayList queryTerms = + new ArrayList(Arrays.asList(queryStr.toLowerCase().split(" "))); + + queryTerms.removeAll(stopWords); + + return String.join(" ", queryTerms); + } + +} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/services/SearchServiceWrapper.java b/src/main/java/org/onap/aai/sparky/viewandinspect/services/SearchServiceWrapper.java deleted file mode 100644 index ebce18e..0000000 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/services/SearchServiceWrapper.java +++ /dev/null @@ -1,980 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.viewandinspect.services; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.json.JSONException; -import org.json.JSONObject; -import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.dal.elasticsearch.HashQueryResponse; -import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.sas.config.SearchServiceConfig; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.search.VnfSearchService; -import org.onap.aai.sparky.search.config.SuggestionConfig; -import org.onap.aai.sparky.suggestivesearch.SuggestionEntity; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.sparky.viewandinspect.entity.QuerySearchEntity; -import org.onap.aai.sparky.viewandinspect.entity.SearchResponse; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; - -/** - * The Class SearchServlet. - */ - -public class SearchServiceWrapper { - - private static final long serialVersionUID = 1L; - - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(SearchServiceWrapper.class); - - private SearchServiceConfig sasConfig = null; - private SuggestionConfig suggestionConfig = null; - private SearchAdapter search = null; - private ObjectMapper mapper; - private OxmModelLoader oxmModelLoader; - private VnfSearchService vnfSearch = null; - - private static final String SEARCH_STRING = "search"; - private static final String COUNT_STRING = "count"; - private static final String QUERY_SEARCH = SEARCH_STRING + "/querysearch"; - private static final String SUMMARY_BY_ENTITY_TYPE_API = SEARCH_STRING + "/summarybyentitytype"; - private static final String SUMMARY_BY_ENTITY_TYPE_COUNT_API = - SUMMARY_BY_ENTITY_TYPE_API + "/" + COUNT_STRING; - - private static final String VALUE_ANYKEY = "anyKey"; - private static final String VALUE_QUERY = "query"; - - private static final String KEY_HASH_ID = "hashId"; - private static final String KEY_GROUP_BY = "groupby"; - private static final String KEY_SEARCH_RESULT = "searchResult"; - private static final String KEY_HITS = "hits"; - private static final String KEY_PAYLOAD = "payload"; - private static final String KEY_DOCUMENT = "document"; - private static final String KEY_CONTENT = "content"; - private static final String KEY_SEARCH_TAG_IDS = "searchTagIDs"; - private static final String KEY_SEARCH_TAGS = "searchTags"; - private static final String KEY_LINK = "link"; - private static final String KEY_ENTITY_TYPE = "entityType"; - - private static final String VI_SUGGESTION_ROUTE = "viewInspect"; // TODO -> Read route from - // suggestive-search.properties - // instead of hard coding - - private static final String VIUI_SEARCH_TEMPLATE = - "{ " + "\"results-start\": 0," + "\"results-size\": %d," + "\"queries\": [{" + "\"must\": {" - + "\"match\": {" + "\"field\": \"entityType searchTags crossEntityReferenceValues\"," - + "\"value\": \"%s\"," + "\"operator\": \"and\", " - + "\"analyzer\": \"whitespace_analyzer\"" + "}" + "}" + "}]" + "}"; - - /** - * Instantiates a new search service wrapper - */ - public SearchServiceWrapper() { - this.mapper = new ObjectMapper(); - vnfSearch = new VnfSearchService(); - - try { - if (sasConfig == null) { - sasConfig = SearchServiceConfig.getConfig(); - } - - if (suggestionConfig == null) { - suggestionConfig = SuggestionConfig.getConfig(); - } - - if (search == null) { - search = new SearchAdapter(); - } - - if (oxmModelLoader == null) { - oxmModelLoader = OxmModelLoader.getInstance(); - - if (OxmModelLoader.getInstance().getSearchableEntityDescriptors().isEmpty()) { - LOG.error(AaiUiMsgs.ENTITY_NOT_FOUND_IN_OXM, "searchable entity"); - } - } - } catch (Exception exc) { - new ServletException( - "Caught an exception while getting an instance of servlet configuration from SearchServlet.", - exc); - } - } - - public void doGet(HttpServletRequest request, HttpServletResponse response) - throws ServletException, IOException { - doPost(request, response); - } - - public void setSasConfig(SearchServiceConfig sasConfig) { - this.sasConfig = sasConfig; - } - - public SearchServiceConfig getSasConfig() { - return sasConfig; - } - - public void setSuggestionConfig(SuggestionConfig suggestionConfig) { - this.suggestionConfig = suggestionConfig; - } - - public void setSearch(SearchAdapter search) { - this.search = search; - } - - public SuggestionConfig getSuggestionConfig() { - return suggestionConfig; - } - - public SearchAdapter getSearch() { - return search; - } - - public void setOxmModelLoader(OxmModelLoader oxmModelLoader) { - this.oxmModelLoader = oxmModelLoader; - } - - public OxmModelLoader getOxmModelLoader() { - return oxmModelLoader; - } - - public VnfSearchService getVnfSearch() { - return vnfSearch; - } - - public void setVnfSearch(VnfSearchService vnfSearch) { - this.vnfSearch = vnfSearch; - } - - /** - * Get Full URL for search - * - * @param api the api - * @param indexName - * @return the full url - */ - private String getSasFullUrl(String indexName, String type, String ipAddress, String port, - String version) { - - return String.format("https://%s:%s/services/search-data-service/%s/search/indexes/%s/%s", - ipAddress, port, version, indexName, type); - } - - /** - * Handle search service do query. - * - * @param app the app - * @param request the request - * @param response the response - * @throws Exception the exception - */ - - protected JSONObject getRequestParamsFromHeader(HttpServletRequest request) { - StringBuffer br = new StringBuffer(); - String line = null; - try { - BufferedReader reader = request.getReader(); - while ((line = reader.readLine()) != null) { - br.append(line); - } - } catch (Exception exc) { - LOG.error(AaiUiMsgs.ERROR_READING_HTTP_REQ_PARAMS); - } - - String output = br.toString(); - - return new JSONObject(output); - } - - protected void handleSummaryByEntityTypeCount(HttpServletRequest request, - HttpServletResponse response) throws Exception { - JSONObject parameters = getRequestParamsFromHeader(request); - String hashId = null; - if (parameters.has(KEY_HASH_ID)) { - hashId = parameters.get(KEY_HASH_ID).toString(); - } else { - vnfSearch.setZeroCountResponse(response); - LOG.error(AaiUiMsgs.ERROR_HASH_NOT_FOUND); - return; - } - HashQueryResponse hashQueryResponse = getResponseForQueryByHash(hashId, response); - Map hashQueryResponsePayloadParams = new HashMap(); - if (hashQueryResponse.getJsonPayload() != null) { - hashQueryResponsePayloadParams = getPayloadParams(hashQueryResponse.getJsonPayload()); - vnfSearch.getEntityCountResults(response, hashQueryResponsePayloadParams); - } else { - vnfSearch.setZeroCountResponse(response); - LOG.error(AaiUiMsgs.ERROR_INVALID_HASH, hashId); - } - } - - protected Map getPayloadParams(String parameters) { - Map payloadParams = new HashMap(); - try { - JSONObject json = new JSONObject(parameters); - JSONObject payload = json.getJSONObject(KEY_PAYLOAD); - if (payload.length() > 0) { - for (String key : JSONObject.getNames(payload)) { - payloadParams.put(key, payload.getString(key)); - } - } - } catch (JSONException exc) { - LOG.error(AaiUiMsgs.ERROR_PARSING_PARAMS, exc); - } - return payloadParams; - } - - protected HashQueryResponse getResponseForQueryByHash(String hashId, - HttpServletResponse response) { - return vnfSearch.getJSONPayloadFromHash(hashId); - } - - protected void handleSummaryByEntityType(HttpServletRequest request, HttpServletResponse response) - throws Exception { - JSONObject parameters = getRequestParamsFromHeader(request); - String hashId = null; - if (parameters.has(KEY_HASH_ID)) { - hashId = parameters.get(KEY_HASH_ID).toString(); - } else { - vnfSearch.setZeroCountResponse(response); - LOG.error(AaiUiMsgs.ERROR_HASH_NOT_FOUND); - return; - } - HashQueryResponse hashQueryResponse = getResponseForQueryByHash(hashId, response); - Map hashQueryResponsePayloadParams = new HashMap(); - if (hashQueryResponse.getJsonPayload() != null) { - hashQueryResponsePayloadParams = getPayloadParams(hashQueryResponse.getJsonPayload()); - if (parameters.has(KEY_GROUP_BY)) { - String groupByKey = parameters.getString(KEY_GROUP_BY); - vnfSearch.getSummaryByEntityType(response, hashQueryResponsePayloadParams, groupByKey); - } - } else { - LOG.error(AaiUiMsgs.ERROR_INVALID_HASH, hashId); - vnfSearch.setEmptyAggResponse(response); - } - } - - /** - * Gets the value from node. - * - * @param node the node - * @param fieldName the field name - * @return the value from node - */ - private String getValueFromNode(JsonNode node, String fieldName) { - - if (node == null || fieldName == null) { - return null; - } - - JsonNode valueNode = node.get(fieldName); - - if (valueNode != null) { - return valueNode.asText(); - } - - return null; - - } - - /** - * Builds the search response. - * - * @param operationResult the operation result - * @param queryStr the query str - * @return TODO - * @return the search response - */ - private List generateSuggestionsForSearchResponse(String operationResult, - String queryStr) { - - - if (operationResult == null || operationResult.length() == 0) { - return null; - } - - ObjectMapper mapper = new ObjectMapper(); - JsonNode rootNode = null; - List suggestionEntityList = new ArrayList(); - try { - rootNode = mapper.readTree(operationResult); - - JsonNode hitsNode = rootNode.get(KEY_SEARCH_RESULT); - - - // Check if there are hits that are coming back - if (hitsNode.has(KEY_HITS)) { - ArrayNode hitsArray = (ArrayNode) hitsNode.get(KEY_HITS); - - /* - * next we iterate over the values in the hit array elements - */ - - Iterator nodeIterator = hitsArray.elements(); - JsonNode entityNode = null; - SuggestionEntity suggestionEntity = null; - JsonNode sourceNode = null; - while (nodeIterator.hasNext()) { - entityNode = nodeIterator.next(); - sourceNode = entityNode.get(KEY_DOCUMENT).get(KEY_CONTENT); - - // do the point transformation as we build the response? - suggestionEntity = new SuggestionEntity(); - suggestionEntity.setRoute(VI_SUGGESTION_ROUTE); - - /* - * This is where we probably want to annotate the search tags because we also have access - * to the seachTagIds - */ - - String searchTagIds = getValueFromNode(sourceNode, KEY_SEARCH_TAG_IDS); - String searchTags = getValueFromNode(sourceNode, KEY_SEARCH_TAGS); - String link = getValueFromNode(sourceNode, KEY_LINK); - String entityType = getValueFromNode(sourceNode, KEY_ENTITY_TYPE); - if (link != null) { - suggestionEntity.setHashId(NodeUtils.generateUniqueShaDigest(link)); - } - - try { - suggestionEntity - .setText(annotateSearchTags(searchTags, searchTagIds, entityType, queryStr)); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), - exc.getLocalizedMessage()); - // at least send back the un-annotated search tags - suggestionEntity.setText(searchTags); - } - - if (searchTags != null) { - suggestionEntityList.add(suggestionEntity); - } - - } - } - } catch (IOException exc) { - LOG.warn(AaiUiMsgs.SEARCH_RESPONSE_BUILDING_EXCEPTION, exc.getLocalizedMessage()); - } - return suggestionEntityList; - } - - /* - */ - - /** - * Query terms match search tag. - * - * @param queryTerms the query terms - * @param searchTag the search tag - * @return true, if successful @return. - */ - private boolean queryTermsMatchSearchTag(String[] queryTerms, String searchTag) { - - if (queryTerms == null || queryTerms.length == 0 || searchTag == null) { - return false; - } - - for (String queryTerm : queryTerms) { - if (searchTag.toLowerCase().contains(queryTerm.toLowerCase())) { - return true; - } - } - - return false; - - } - - /** - * The current format of an UI-dropdown-item is like: "search-terms entityType att1=attr1_val". - * Example, for pserver: search-terms pserver hostname=djmAG-72060, - * pserver-name2=example-pserver-name2-val-17254, pserver-id=example-pserver-id-val-17254, - * ipv4-oam-address=example-ipv4-oam-address-val-17254 SearchController.js parses the above - * format. So if you are modifying the parsing below, please update SearchController.js as well. - * - * @param searchTags the search tags - * @param searchTagIds the search tag ids - * @param entityType the entity type - * @param queryStr the query str - * @return the string - */ - - private String annotateSearchTags(String searchTags, String searchTagIds, String entityType, - String queryStr) { - - if (searchTags == null || searchTagIds == null) { - String valueOfSearchTags = String.valueOf(searchTags); - String valueOfSearchTagIds = String.valueOf(searchTagIds); - - LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, "See error", - "Search tags = " + valueOfSearchTags + " and Seach tag IDs = " + valueOfSearchTagIds); - return searchTags; - } - - if (entityType == null) { - LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), "EntityType is null"); - return searchTags; - } - - if (queryStr == null) { - LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), - "Query string is null"); - return searchTags; - } - - /* - * The ElasticSearch analyzer has already applied the lowercase filter, so we don't have to - * covert them again - */ - String[] searchTagsArray = searchTags.split(";"); - String[] searchTagIdsArray = searchTagIds.split(";"); - - // specifically apply lower case to the the query terms to make matching - // simpler - String[] queryTerms = queryStr.toLowerCase().split(" "); - - OxmEntityDescriptor desc = oxmModelLoader.getSearchableEntityDescriptors().get(entityType); - - if (desc == null) { - LOG.error(AaiUiMsgs.ENTITY_NOT_FOUND_IN_OXM, entityType.toString()); - return searchTags; - } - - String primaryKeyName = NodeUtils.concatArray(desc.getPrimaryKeyAttributeName(), "/"); - String primaryKeyValue = null; - - /* - * For each used attribute, get the fieldName for the attribute index and transform the search - * tag into t1,t2,t3 => h1=t1, h2=t2, h3=t3; - */ - StringBuilder searchTagsBuilder = new StringBuilder(128); - searchTagsBuilder.append(entityType); - - String primaryKeyConjunctionValue = null; - boolean queryTermsMatchedSearchTags = false; - - if (searchTagsArray.length == searchTagIdsArray.length) { - for (int i = 0; i < searchTagsArray.length; i++) { - String searchTagAttributeId = searchTagIdsArray[i]; - String searchTagAttributeValue = searchTagsArray[i]; - - // Find the concat conjunction - Map pairConjunctionList = suggestionConfig.getPairingList(); - - String suggConjunction = null; - if (pairConjunctionList.get(searchTagAttributeId) != null) { - suggConjunction = pairConjunctionList.get(searchTagAttributeId); - } else { - suggConjunction = suggestionConfig.getDefaultPairingValue(); - } - - if (primaryKeyName.equals(searchTagAttributeId)) { - primaryKeyValue = searchTagAttributeValue; - primaryKeyConjunctionValue = suggConjunction; - } - - if (queryTermsMatchSearchTag(queryTerms, searchTagAttributeValue)) { - searchTagsBuilder.append(" " + suggConjunction + " " + searchTagAttributeValue); - queryTermsMatchedSearchTags = true; - } - } - } else { - String errorMessage = - "Search tags length did not match search tag ID length for entity type " + entityType; - LOG.error(AaiUiMsgs.ENTITY_SYNC_SEARCH_TAG_ANNOTATION_FAILED, errorMessage); - } - - /* - * if none of the user query terms matched the index entity search tags then we should still tag - * the matched entity with a conjunction set to at least it's entity primary key value to - * discriminate between the entities of the same type in the search results displayed in the UI - * search bar results - */ - - if (!queryTermsMatchedSearchTags) { - - if (primaryKeyValue != null && primaryKeyConjunctionValue != null) { - searchTagsBuilder.append(" " + primaryKeyConjunctionValue + " " + primaryKeyValue); - } else { - LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, "See error", - "Could not annotate user query terms " + queryStr - + " from available entity search tags = " + searchTags); - return searchTags; - } - - } - - return searchTagsBuilder.toString(); - - } - - - /** - * @param queryStr - space separate query search terms - * @return - query string with stop-words removed - */ - private String stripStopWordsFromQuery(String queryStr) { - - if (queryStr == null) { - return queryStr; - } - - Collection stopWords = suggestionConfig.getStopWords(); - ArrayList queryTerms = - new ArrayList(Arrays.asList(queryStr.toLowerCase().split(" "))); - - queryTerms.removeAll(stopWords); - - return String.join(" ", queryTerms); - } - - /* - * Expected query: - * - * POST /search/viuiSearch/ - * - * { "maxResults" : "10", "searchStr" : "" } - */ - - /** - * Handle view and inspect search. - * - * @param request the request - * @param maxResults Max number of results to return - * @param response the response - * @return - * @throws IOException Signals that an I/O exception has occurred. - */ - protected List performViewAndInspectQuerySearch( - QuerySearchEntity querySearchEntity, int maxResults) throws IOException { - List suggestionEntityList = new ArrayList(); - - /* - * Based on the configured stop words, we need to strip any matched stop-words ( case - * insensitively ) from the query string, before hitting elastic to prevent the words from being - * used against the elastic view-and-inspect index. Another alternative to this approach would - * be to define stop words on the elastic search index configuration for the - * entity-search-index, but but that may be more complicated / more risky than just a simple bug - * fix, but it's something we should think about for the future. - */ - - try { - final String queryStringWithoutStopWords = - stripStopWordsFromQuery(querySearchEntity.getQueryStr()); - - final String fullUrlStr = getSasFullUrl(sasConfig.getIndexName(), VALUE_QUERY, - sasConfig.getIpAddress(), sasConfig.getHttpPort(), sasConfig.getVersion()); - - String postBody = - String.format(VIUI_SEARCH_TEMPLATE, maxResults, queryStringWithoutStopWords); - - OperationResult opResult = search.doPost(fullUrlStr, postBody, "application/json"); - if (opResult.getResultCode() == 200) { - suggestionEntityList = generateSuggestionsForSearchResponse(opResult.getResult(), - querySearchEntity.getQueryStr()); - } - } catch (Exception exc) { - LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, - "View and inspect query failed with error = " + exc.getMessage()); - } - return suggestionEntityList; - } - - protected List performVnfQuerySearch(QuerySearchEntity querySearchEntity, - int resultCountLimit) throws Exception { - return vnfSearch.getSuggestionsResults(querySearchEntity, resultCountLimit); - } - - protected void handleQuerySearch(HttpServletRequest request, HttpServletResponse response) - throws IOException { - String payload = NodeUtils.getBody(request); - if (payload == null || payload.isEmpty()) { - handleSearchServletErrors("Unable to parse payload", null, response); - } else { - QuerySearchEntity querySearchEntity = mapper.readValue(payload, QuerySearchEntity.class); - int maxResultsPerSearch = Integer.valueOf(querySearchEntity.getMaxResults()); - try { - SearchResponse searchResponse = new SearchResponse(); - List viewAndInspectsuggestionEntityList = - new ArrayList(); - List vnfSuggestionEntityList = new ArrayList(); - long processTime = System.currentTimeMillis(); - for (String searchService : suggestionConfig.getSearchIndexToSearchService().values()) { - if (searchService.equals(SearchServiceWrapper.class.getSimpleName())) { - viewAndInspectsuggestionEntityList = - performViewAndInspectQuerySearch(querySearchEntity, maxResultsPerSearch); - } else if (searchService.equals(VnfSearchService.class.getSimpleName())) { - vnfSuggestionEntityList = performVnfQuerySearch(querySearchEntity, maxResultsPerSearch); - } - } - - int totalAdded = 0; - for (int i = 0; i < maxResultsPerSearch; i++) { - if (i < viewAndInspectsuggestionEntityList.size() && totalAdded < maxResultsPerSearch) { - searchResponse.addSuggestion(viewAndInspectsuggestionEntityList.get(i)); - totalAdded++; - } - if (i < vnfSuggestionEntityList.size() && totalAdded < maxResultsPerSearch) { - searchResponse.addSuggestion(vnfSuggestionEntityList.get(i)); - totalAdded++; - } - if (totalAdded >= maxResultsPerSearch) { - break; - } - } - searchResponse.addToTotalFound(totalAdded); - String searchResponseJson = NodeUtils.convertObjectToJson(searchResponse, true); - - processTime = System.currentTimeMillis() - processTime; - searchResponse.setProcessingTimeInMs(processTime); - setServletResponse(response, searchResponseJson); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, - "Query search failed with error = " + exc.getMessage()); - } - } - } - - public void doPost(HttpServletRequest request, HttpServletResponse response) - throws ServletException, IOException { - - String api = null; - try { - - // set default response - response.setStatus(200); - - if (request.getRequestURI().contains(QUERY_SEARCH)) { - api = QUERY_SEARCH; - handleQuerySearch(request, response); - return; - } else if (request.getRequestURI().contains(SUMMARY_BY_ENTITY_TYPE_COUNT_API)) { - api = SUMMARY_BY_ENTITY_TYPE_COUNT_API; - handleSummaryByEntityTypeCount(request, response); - return; - } else if (request.getRequestURI().contains(SUMMARY_BY_ENTITY_TYPE_API)) { - api = SUMMARY_BY_ENTITY_TYPE_API; - handleSummaryByEntityType(request, response); - return; - } else { - - final String errorMessage = "Ignored request-uri = " + request.getRequestURI(); - LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, errorMessage); - response.setStatus(404); - response.setContentType("application/json"); - PrintWriter out = response.getWriter(); - out.println(generateJsonErrorResponse(errorMessage)); - out.close(); - - - } - } catch (JSONException je) { - handleSearchServletErrors("Caught an exception while parsing json in processing for " + api, - je, response); - } catch (Exception e1) { - handleSearchServletErrors("Caught an exception while communicating with elasticsearch", e1, - response); - } - } - - /** - * Generate json error response. - * - * @param message the message - * @return the string - */ - /* - * This is the manual approach, however we could also create an object container for the error - * then use the Jackson ObjectWrite to dump the object to json instead. If it gets any more - * complicated we could do that approach so we don't have to manually trip over the JSON - * formatting. - */ - protected String generateJsonErrorResponse(String message) { - return String.format("{ \"errorMessage\" : %s }", message); - } - - /** - * Handle search servlet errors. - * - * @param errorMsg the error msg - * @param exc the exc - * @param response the response - * @throws IOException Signals that an I/O exception has occurred. - */ - public void handleSearchServletErrors(String errorMsg, Exception exc, - HttpServletResponse response) throws IOException { - - String errorLogMsg = - (exc == null ? errorMsg : errorMsg + ". Error:" + exc.getLocalizedMessage()); - - LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, errorLogMsg); - - response.setContentType("application/json"); - PrintWriter out = response.getWriter(); - out.println(generateJsonErrorResponse(errorMsg)); - out.close(); - } - - - /** - * Execute query. - * - * @param response the response - * @param requestUrl the request url - * @param requestJsonPayload the request json payload - * @throws Exception the exception - */ - public void executeQuery(HttpServletResponse response, String requestUrl, - String requestJsonPayload) throws Exception { - - OperationResult opResult = search.doPost(requestUrl, requestJsonPayload, "application/json"); - - if (opResult != null) { - - response.setStatus(opResult.getResultCode()); - String finalOutput = opResult.getResult(); - - // example: failed to populate drop-down items from formatOutputJson() - if (finalOutput != null) { - response.setContentType("application/json"); - PrintWriter out = response.getWriter(); - out.println(finalOutput); - out.close(); - } - - } else { - response.setStatus(500); - } - - } - - /** - * Sets the servlet response. - * - * @param response the response - * @param postPayload the post payload - * - * @throws IOException Signals that an I/O exception has occurred. - */ - private void setServletResponse(HttpServletResponse response, String postPayload) - throws IOException { - - if (postPayload != null) { - response.setContentType("application/json"); - PrintWriter out = response.getWriter(); - out.println(postPayload); - out.close(); - } - } - - /** - * @return the mapper - */ - public ObjectMapper getMapper() { - return mapper; - } - - /** - * @param mapper the mapper to set - */ - public void setMapper(ObjectMapper mapper) { - this.mapper = mapper; - } - - /** - * @return the serialversionuid - */ - public static long getSerialversionuid() { - return serialVersionUID; - } - - /** - * @return the log - */ - public static Logger getLog() { - return LOG; - } - - /** - * @return the searchString - */ - public static String getSearchString() { - return SEARCH_STRING; - } - - /** - * @return the countString - */ - public static String getCountString() { - return COUNT_STRING; - } - - /** - * @return the querySearch - */ - public static String getQuerySearch() { - return QUERY_SEARCH; - } - - /** - * @return the summaryByEntityTypeApi - */ - public static String getSummaryByEntityTypeApi() { - return SUMMARY_BY_ENTITY_TYPE_API; - } - - /** - * @return the summaryByEntityTypeCountApi - */ - public static String getSummaryByEntityTypeCountApi() { - return SUMMARY_BY_ENTITY_TYPE_COUNT_API; - } - - /** - * @return the valueAnykey - */ - public static String getValueAnykey() { - return VALUE_ANYKEY; - } - - /** - * @return the valueQuery - */ - public static String getValueQuery() { - return VALUE_QUERY; - } - - /** - * @return the keyHashId - */ - public static String getKeyHashId() { - return KEY_HASH_ID; - } - - /** - * @return the keyGroupBy - */ - public static String getKeyGroupBy() { - return KEY_GROUP_BY; - } - - /** - * @return the keySearchResult - */ - public static String getKeySearchResult() { - return KEY_SEARCH_RESULT; - } - - /** - * @return the keyHits - */ - public static String getKeyHits() { - return KEY_HITS; - } - - /** - * @return the keyPayload - */ - public static String getKeyPayload() { - return KEY_PAYLOAD; - } - - /** - * @return the keyDocument - */ - public static String getKeyDocument() { - return KEY_DOCUMENT; - } - - /** - * @return the keyContent - */ - public static String getKeyContent() { - return KEY_CONTENT; - } - - /** - * @return the keySearchTagIds - */ - public static String getKeySearchTagIds() { - return KEY_SEARCH_TAG_IDS; - } - - /** - * @return the keySearchTags - */ - public static String getKeySearchTags() { - return KEY_SEARCH_TAGS; - } - - /** - * @return the keyLink - */ - public static String getKeyLink() { - return KEY_LINK; - } - - /** - * @return the keyEntityType - */ - public static String getKeyEntityType() { - return KEY_ENTITY_TYPE; - } - - /** - * @return the viSuggestionRoute - */ - public static String getViSuggestionRoute() { - return VI_SUGGESTION_ROUTE; - } - - /** - * @return the viuiSearchTemplate - */ - public static String getViuiSearchTemplate() { - return VIUI_SEARCH_TEMPLATE; - } - - - -} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java b/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java index e3f469f..b2ed4a4 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java @@ -36,16 +36,19 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; import org.apache.http.client.utils.URIBuilder; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.rest.OperationResult; import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.entity.SearchableEntity; +import org.onap.aai.sparky.sync.entity.SearchableEntity; import org.onap.aai.sparky.util.NodeUtils; import org.onap.aai.sparky.viewandinspect.config.TierSupportUiConstants; -import org.onap.aai.sparky.viewandinspect.config.VisualizationConfig; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; import org.onap.aai.sparky.viewandinspect.entity.InlineMessage; import org.onap.aai.sparky.viewandinspect.entity.NodeProcessingTransaction; @@ -58,8 +61,6 @@ import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingAction; import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState; import org.onap.aai.sparky.viewandinspect.task.PerformNodeSelfLinkProcessingTask; import org.onap.aai.sparky.viewandinspect.task.PerformSelfLinkDeterminationTask; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.JsonNode; @@ -78,17 +79,18 @@ public class VisualizationContext { private static final Logger LOG = LoggerFactory.getInstance().getLogger(VisualizationContext.class); - private final ActiveInventoryDataProvider aaiProvider; + private final ActiveInventoryAdapter aaiAdapter; private int maxSelfLinkTraversalDepth; private AtomicInteger numLinksDiscovered; private AtomicInteger numSuccessfulLinkResolveFromCache; private AtomicInteger numSuccessfulLinkResolveFromFromServer; private AtomicInteger numFailedLinkResolve; + private AtomicInteger nodeIntegrityWorkOnHand; private AtomicInteger aaiWorkOnHand; private ActiveInventoryConfig aaiConfig; - private VisualizationConfig visualizationConfig; + private VisualizationConfigs visualizationConfigs; private List shallowEntities; private AtomicInteger totalLinksRetrieved; @@ -100,6 +102,7 @@ public class VisualizationContext { private ObjectMapper mapper; private InlineMessage inlineMessage = null; + private ExecutorService tabularExecutorService; private ExecutorService aaiExecutorService; /* @@ -115,14 +118,16 @@ public class VisualizationContext { * @param loader the loader * @throws Exception the exception */ - public VisualizationContext(long contextId, ActiveInventoryDataProvider aaiDataProvider, - ExecutorService aaiExecutorService, OxmModelLoader loader) throws Exception { + public VisualizationContext(long contextId, ActiveInventoryAdapter aaiAdapter, + ExecutorService tabularExecutorService, ExecutorService aaiExecutorService, + VisualizationConfigs visualizationConfigs) throws Exception { this.contextId = contextId; this.contextIdStr = "[Context-Id=" + contextId + "]"; - this.aaiProvider = aaiDataProvider; + this.aaiAdapter = aaiAdapter; + this.tabularExecutorService = tabularExecutorService; this.aaiExecutorService = aaiExecutorService; - this.loader = loader; + this.visualizationConfigs = visualizationConfigs; this.nodeCache = new ConcurrentHashMap(); this.numLinksDiscovered = new AtomicInteger(0); @@ -130,13 +135,13 @@ public class VisualizationContext { this.numSuccessfulLinkResolveFromCache = new AtomicInteger(0); this.numSuccessfulLinkResolveFromFromServer = new AtomicInteger(0); this.numFailedLinkResolve = new AtomicInteger(0); + this.nodeIntegrityWorkOnHand = new AtomicInteger(0); this.aaiWorkOnHand = new AtomicInteger(0); this.aaiConfig = ActiveInventoryConfig.getConfig(); - this.visualizationConfig = VisualizationConfig.getConfig(); this.shallowEntities = aaiConfig.getAaiRestConfig().getShallowEntities(); - this.maxSelfLinkTraversalDepth = visualizationConfig.getMaxSelfLinkTraversalDepth(); + this.maxSelfLinkTraversalDepth = this.visualizationConfigs.getMaxSelfLinkTraversalDepth(); this.mapper = new ObjectMapper(); mapper.setSerializationInclusion(Include.NON_EMPTY); @@ -164,7 +169,8 @@ public class VisualizationContext { return queryParams; } - Map entityDescriptors = loader.getEntityDescriptors(); + Map entityDescriptors = + OxmEntityLookup.getInstance().getEntityDescriptors(); try { @@ -183,7 +189,7 @@ public class VisualizationContext { if (descriptor != null) { entityType = urlPathElements[index]; - primaryKeyNames = descriptor.getPrimaryKeyAttributeName(); + primaryKeyNames = descriptor.getPrimaryKeyAttributeNames(); /* * Make sure from what ever index we matched the parent entity-type on that we can extract @@ -270,7 +276,7 @@ public class VisualizationContext { * */ - ActiveInventoryNode newNode = new ActiveInventoryNode(); + ActiveInventoryNode newNode = new ActiveInventoryNode(this.visualizationConfigs); newNode.setEntityType(entityType); /* @@ -337,7 +343,7 @@ public class VisualizationContext { */ String selfLinkQuery = - aaiProvider.getGenericQueryForSelfLink(entityType, newNode.getQueryParams()); + aaiAdapter.getGenericQueryForSelfLink(entityType, newNode.getQueryParams()); /** *
  • get the self-link @@ -355,7 +361,7 @@ public class VisualizationContext { txn.setNewNode(newNode); txn.setParentNodeId(ain.getNodeId()); aaiWorkOnHand.incrementAndGet(); - supplyAsync(new PerformSelfLinkDeterminationTask(txn, null, aaiProvider), + supplyAsync(new PerformSelfLinkDeterminationTask(txn, null, aaiAdapter), aaiExecutorService).whenComplete((nodeTxn, error) -> { aaiWorkOnHand.decrementAndGet(); if (error != null) { @@ -368,15 +374,13 @@ public class VisualizationContext { if (opResult != null && opResult.wasSuccessful()) { - if (opResult.isResolvedLinkFailure()) { + if (!opResult.wasSuccessful()) { numFailedLinkResolve.incrementAndGet(); } - if (opResult.isResolvedLinkFromCache()) { + if (opResult.isFromCache()) { numSuccessfulLinkResolveFromCache.incrementAndGet(); - } - - if (opResult.isResolvedLinkFromServer()) { + } else { numSuccessfulLinkResolveFromFromServer.incrementAndGet(); } @@ -425,7 +429,6 @@ public class VisualizationContext { newChildNode.setSelfLinkPendingResolve(false); newChildNode.setSelfLinkProcessed(true); - newChildNode.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED, NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK); @@ -542,7 +545,7 @@ public class VisualizationContext { if (nodeValue != null && nodeValue.isValueNode()) { - if (loader.getEntityDescriptor(fieldName) == null) { + if (OxmEntityLookup.getInstance().getEntityDescriptors().get(fieldName) == null) { /* * entity property name is not an entity, thus we can add this property name and value @@ -557,7 +560,7 @@ public class VisualizationContext { if (nodeValue.isArray()) { - if (loader.getEntityDescriptor(fieldName) == null) { + if (OxmEntityLookup.getInstance().getEntityDescriptors().get(fieldName) == null) { /* * entity property name is not an entity, thus we can add this property name and value @@ -623,10 +626,10 @@ public class VisualizationContext { */ ain.clearQueryParams(); ain.addQueryParams(extractQueryParamsFromSelfLink(ain.getSelfLink())); - ain.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED, NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK); + } /** @@ -678,7 +681,7 @@ public class VisualizationContext { txn.setProcessingNode(ain); txn.setRequestParameters(depthModifier); aaiWorkOnHand.incrementAndGet(); - supplyAsync(new PerformNodeSelfLinkProcessingTask(txn, depthModifier, aaiProvider, aaiConfig), + supplyAsync(new PerformNodeSelfLinkProcessingTask(txn, depthModifier, aaiAdapter, aaiConfig), aaiExecutorService).whenComplete((nodeTxn, error) -> { aaiWorkOnHand.decrementAndGet(); if (error != null) { @@ -703,15 +706,13 @@ public class VisualizationContext { if (opResult != null && opResult.wasSuccessful()) { - if (opResult.isResolvedLinkFailure()) { + if (!opResult.wasSuccessful()) { numFailedLinkResolve.incrementAndGet(); } - if (opResult.isResolvedLinkFromCache()) { + if (opResult.isFromCache()) { numSuccessfulLinkResolveFromCache.incrementAndGet(); - } - - if (opResult.isResolvedLinkFromServer()) { + } else { numSuccessfulLinkResolveFromFromServer.incrementAndGet(); } @@ -871,7 +872,7 @@ public class VisualizationContext { * around the root node. */ - if (!rootNodeDiscovered || cacheNode.getNodeDepth() < VisualizationConfig.getConfig() + if (!rootNodeDiscovered || cacheNode.getNodeDepth() < this.visualizationConfigs .getMaxSelfLinkTraversalDepth()) { if (LOG.isDebugEnabled()) { @@ -959,7 +960,7 @@ public class VisualizationContext { LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "Unexpected array type with a key = " + fieldName); } } else if (fieldValue.isValueNode()) { - if (loader.getEntityDescriptor(field.getKey()) == null) { + if (OxmEntityLookup.getInstance().getEntityDescriptors().get(field.getKey()) == null) { /* * property key is not an entity type, add it to our property set. */ @@ -1101,8 +1102,8 @@ public class VisualizationContext { return false; } - List pkeyNames = - loader.getEntityDescriptor(ain.getEntityType()).getPrimaryKeyAttributeName(); + List pkeyNames = OxmEntityLookup.getInstance().getEntityDescriptors() + .get(ain.getEntityType()).getPrimaryKeyAttributeNames(); if (pkeyNames == null || pkeyNames.size() == 0) { LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE_NODE_ID, "Primary key names is empty"); @@ -1179,10 +1180,9 @@ public class VisualizationContext { return false; } - OxmModelLoader modelLoader = OxmModelLoader.getInstance(); - Relationship[] relationshipArray = relationshipList.getRelationshipList(); OxmEntityDescriptor descriptor = null; + String repairedSelfLink = null; if (relationshipArray != null) { @@ -1203,7 +1203,7 @@ public class VisualizationContext { return false; } - newNode = new ActiveInventoryNode(); + newNode = new ActiveInventoryNode(this.visualizationConfigs); String entityType = r.getRelatedTo(); @@ -1213,7 +1213,7 @@ public class VisualizationContext { } } - descriptor = modelLoader.getEntityDescriptor(r.getRelatedTo()); + descriptor = OxmEntityLookup.getInstance().getEntityDescriptors().get(r.getRelatedTo()); newNode.setNodeId(nodeId); newNode.setEntityType(entityType); @@ -1223,7 +1223,7 @@ public class VisualizationContext { if (descriptor != null) { - List pkeyNames = descriptor.getPrimaryKeyAttributeName(); + List pkeyNames = descriptor.getPrimaryKeyAttributeNames(); newNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED, NodeProcessingAction.SELF_LINK_SET); @@ -1337,7 +1337,7 @@ public class VisualizationContext { return; } - ActiveInventoryNode newNode = new ActiveInventoryNode(); + ActiveInventoryNode newNode = new ActiveInventoryNode(this.visualizationConfigs); newNode.setNodeId(searchTargetEntity.getId()); newNode.setEntityType(searchTargetEntity.getEntityType()); @@ -1399,7 +1399,7 @@ public class VisualizationContext { case NEIGHBORS_UNPROCESSED: { - if (n.getNodeDepth() < VisualizationConfig.getConfig().getMaxSelfLinkTraversalDepth()) { + if (n.getNodeDepth() < this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) { /* * Only process our neighbors relationships if our current depth is less than the max * depth @@ -1528,7 +1528,7 @@ public class VisualizationContext { targetNode.addInboundNeighbor(srcNode.getNodeId()); - if (VisualizationConfig.getConfig().makeAllNeighborsBidirectional()) { + if (this.visualizationConfigs.makeAllNeighborsBidirectional()) { targetNode.addOutboundNeighbor(srcNode.getNodeId()); } @@ -1626,7 +1626,8 @@ public class VisualizationContext { return null; } - OxmEntityDescriptor descriptor = loader.getEntityDescriptor(entityType); + OxmEntityDescriptor descriptor = + OxmEntityLookup.getInstance().getEntityDescriptors().get(entityType); if (descriptor == null) { LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, @@ -1634,7 +1635,7 @@ public class VisualizationContext { return null; } - List pkeyNames = descriptor.getPrimaryKeyAttributeName(); + List pkeyNames = descriptor.getPrimaryKeyAttributeNames(); if (pkeyNames == null || pkeyNames.size() == 0) { LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java b/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java index 0a9797f..69ef774 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java @@ -30,30 +30,24 @@ import java.util.concurrent.ExecutorService; import javax.servlet.ServletException; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.dal.aai.ActiveInventoryAdapter; -import org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryRestConfig; -import org.onap.aai.sparky.dal.cache.EntityCache; -import org.onap.aai.sparky.dal.cache.PersistentEntityCache; -import org.onap.aai.sparky.dal.elasticsearch.ElasticSearchAdapter; -import org.onap.aai.sparky.dal.elasticsearch.ElasticSearchDataProvider; -import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.rest.RestClientBuilder; -import org.onap.aai.sparky.dal.rest.RestfulDataAccessor; import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.synchronizer.entity.SearchableEntity; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.entity.SearchableEntity; import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.sparky.viewandinspect.config.VisualizationConfig; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; import org.onap.aai.sparky.viewandinspect.entity.D3VisualizationOutput; import org.onap.aai.sparky.viewandinspect.entity.GraphMeta; import org.onap.aai.sparky.viewandinspect.entity.QueryParams; import org.onap.aai.sparky.viewandinspect.entity.QueryRequest; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonProcessingException; @@ -66,63 +60,50 @@ public class VisualizationService { private static final Logger LOG = LoggerFactory.getInstance().getLogger(VisualizationService.class); - private OxmModelLoader loader; private ObjectMapper mapper = new ObjectMapper(); - private final ActiveInventoryDataProvider aaiProvider; - private final ActiveInventoryRestConfig aaiRestConfig; - private final ElasticSearchDataProvider esProvider; - private final ElasticSearchConfig esConfig; + private final ActiveInventoryAdapter aaiAdapter; + private final ElasticSearchAdapter esAdapter; + private final ExecutorService tabularExecutorService; private final ExecutorService aaiExecutorService; private ConcurrentHashMap contextMap; private final SecureRandom secureRandom; private ActiveInventoryConfig aaiConfig; - private VisualizationConfig visualizationConfig; - - public VisualizationService(OxmModelLoader loader) throws Exception { - this.loader = loader; + private VisualizationConfigs visualizationConfigs; + private ElasticSearchEndpointConfig endpointEConfig; + private ElasticSearchSchemaConfig schemaEConfig; - aaiRestConfig = ActiveInventoryConfig.getConfig().getAaiRestConfig(); + public VisualizationService(OxmModelLoader loader, VisualizationConfigs visualizationConfigs, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchEndpointConfig endpointConfig, ElasticSearchSchemaConfig schemaConfig) + throws Exception { - EntityCache cache = null; - secureRandom = new SecureRandom(); - - ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new RestClientBuilder()); - if (aaiRestConfig.isCacheEnabled()) { - cache = new PersistentEntityCache(aaiRestConfig.getStorageFolderOverride(), - aaiRestConfig.getNumCacheWorkers()); + this.visualizationConfigs = visualizationConfigs; + this.endpointEConfig = endpointConfig; + this.schemaEConfig = schemaConfig; - aaiAdapter.setCacheEnabled(true); - aaiAdapter.setEntityCache(cache); - } + secureRandom = new SecureRandom(); - this.aaiProvider = aaiAdapter; + /* + * Fix constructor with properly wired in properties + */ - RestClientBuilder esClientBuilder = new RestClientBuilder(); - esClientBuilder.setUseHttps(false); - RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(esClientBuilder); - this.esConfig = ElasticSearchConfig.getConfig(); - this.esProvider = new ElasticSearchAdapter(nonCachingRestProvider, this.esConfig); + this.aaiAdapter = aaiAdapter; + this.esAdapter = esAdapter; this.mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); this.contextMap = new ConcurrentHashMap(); - this.visualizationConfig = VisualizationConfig.getConfig(); + this.tabularExecutorService = NodeUtils.createNamedExecutor("TABULAR-WORKER", + this.visualizationConfigs.getNumOfThreadsToFetchNodeIntegrity(), LOG); this.aaiConfig = ActiveInventoryConfig.getConfig(); this.aaiExecutorService = NodeUtils.createNamedExecutor("SLNC-WORKER", aaiConfig.getAaiRestConfig().getNumResolverWorkers(), LOG); - } - public OxmModelLoader getLoader() { - return loader; - } - - public void setLoader(OxmModelLoader loader) { - this.loader = loader; } /** @@ -222,7 +203,9 @@ public class VisualizationService { * Here is where we need to make a dip to elastic-search for the self-link by entity-id (link * hash). */ - dataCollectionResult = esProvider.retrieveEntityById(queryRequest.getHashId()); + dataCollectionResult = esAdapter.retrieveEntityById(endpointEConfig.getEsIpAddress(), + endpointEConfig.getEsServerPort(), schemaEConfig.getIndexName(), + schemaEConfig.getIndexDocType(), queryRequest.getHashId()); sourceEntity = extractSearchableEntityFromElasticEntity(dataCollectionResult); if (sourceEntity != null) { @@ -243,7 +226,8 @@ public class VisualizationService { try { - d3OutputJsonOutput = getVisualizationOutputBasedonGenericQuery(sourceEntity, queryParams); + d3OutputJsonOutput = + getVisualizationOutputBasedonGenericQuery(sourceEntity, queryParams, queryRequest); if (LOG.isDebugEnabled()) { LOG.debug(AaiUiMsgs.DEBUG_GENERIC, @@ -270,22 +254,22 @@ public class VisualizationService { } + /** * Gets the visualization output basedon generic query. * - * @param searchtargetEntity entity that will be used to start visualization flow - * @param queryParams the query params - * @return the visualization output basedon generic query - * @throws ServletException the servlet exception + * @param searchtargetEntity entity that will be used to start visualization flow @param + * queryParams the query params @return the visualization output basedon generic + * query @throws ServletException the servlet exception @throws */ private String getVisualizationOutputBasedonGenericQuery(SearchableEntity searchtargetEntity, - QueryParams queryParams) throws ServletException { + QueryParams queryParams, QueryRequest request) throws ServletException { long opStartTimeInMs = System.currentTimeMillis(); VisualizationTransformer transformer = null; try { - transformer = new VisualizationTransformer(); + transformer = new VisualizationTransformer(visualizationConfigs); } catch (Exception exc) { throw new ServletException( "Failed to create VisualizationTransformer instance because of execption", exc); @@ -294,7 +278,8 @@ public class VisualizationService { VisualizationContext visContext = null; long contextId = secureRandom.nextLong(); try { - visContext = new VisualizationContext(contextId, aaiProvider, aaiExecutorService, loader); + visContext = new VisualizationContext(contextId, this.aaiAdapter, tabularExecutorService, + aaiExecutorService, this.visualizationConfigs); contextMap.putIfAbsent(contextId, visContext); } catch (Exception e1) { LOG.error(AaiUiMsgs.EXCEPTION_CAUGHT, @@ -349,9 +334,10 @@ public class VisualizationService { try { output = transformer .generateVisualizationOutput((System.currentTimeMillis() - opStartTimeInMs), graphMeta); - } catch (Exception exc) { - LOG.error(AaiUiMsgs.FAILURE_TO_PROCESS_REQUEST, exc.getLocalizedMessage()); + } catch (JsonProcessingException exc) { throw new ServletException("Caught an exception while generation visualization output", exc); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.FAILURE_TO_PROCESS_REQUEST, exc.getLocalizedMessage()); } output.setInlineMessage(visContext.getInlineMessage()); @@ -378,8 +364,8 @@ public class VisualizationService { } public void shutdown() { - aaiProvider.shutdown(); + tabularExecutorService.shutdown(); aaiExecutorService.shutdown(); - esProvider.shutdown(); } + } diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java b/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java index fdc078e..7c1d16d 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java @@ -27,21 +27,19 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.UUID; -import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; import org.onap.aai.sparky.logging.AaiUiMsgs; import org.onap.aai.sparky.util.ConfigHelper; -import org.onap.aai.sparky.viewandinspect.config.VisualizationConfig; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; import org.onap.aai.sparky.viewandinspect.entity.D3VisualizationOutput; import org.onap.aai.sparky.viewandinspect.entity.GraphMeta; import org.onap.aai.sparky.viewandinspect.entity.JsonNode; import org.onap.aai.sparky.viewandinspect.entity.JsonNodeLink; import org.onap.aai.sparky.viewandinspect.entity.NodeDebug; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -63,7 +61,6 @@ public class VisualizationTransformer { LoggerFactory.getInstance().getLogger(VisualizationTransformer.class); List flatNodeArray = new ArrayList(); - Set enrichableUriPrefixes = null; /* * Maybe this isn't a string but Json-Model objects that we will convert to final string @@ -75,7 +72,7 @@ public class VisualizationTransformer { - private VisualizationConfig visualizationConfig; + private VisualizationConfigs visualizationConfigs; /** @@ -83,9 +80,8 @@ public class VisualizationTransformer { * * @throws Exception the exception */ - public VisualizationTransformer() throws Exception { - visualizationConfig = VisualizationConfig.getConfig(); - + public VisualizationTransformer(VisualizationConfigs visualizationConfigs) throws Exception { + this.visualizationConfigs = visualizationConfigs; } @@ -108,7 +104,7 @@ public class VisualizationTransformer { for (JsonNode n : flatNodeArray) { if (n.isRootNode()) { n.getNodeMeta().setSearchTarget(true); - n.getNodeMeta().setClassName(visualizationConfig.getSelectedSearchedNodeClassName()); + n.getNodeMeta().setClassName(this.visualizationConfigs.getSelectedSearchedNodeClassName()); } } @@ -160,7 +156,7 @@ public class VisualizationTransformer { ObjectMapper mapper = new ObjectMapper(); final String fileContent = ConfigHelper.getFileContents( - System.getProperty("AJSC_HOME") + visualizationConfig.getAaiEntityNodeDescriptors()); + System.getProperty("AJSC_HOME") + this.visualizationConfigs.getAaiEntityNodeDescriptors()); com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDefinitions = mapper.readTree(fileContent); graphMeta.setAaiEntityNodeDescriptors(aaiEntityNodeDefinitions); @@ -211,7 +207,7 @@ public class VisualizationTransformer { * current node. */ - if (ain.getNodeDepth() < VisualizationConfig.getConfig().getMaxSelfLinkTraversalDepth()) { + if (ain.getNodeDepth() < this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) { Collection outboundNeighbors = ain.getOutboundNeighbors(); @@ -266,17 +262,13 @@ public class VisualizationTransformer { for (ActiveInventoryNode n : nodeMap.values()) { - if (n.getNodeDepth() <= VisualizationConfig.getConfig().getMaxSelfLinkTraversalDepth()) { - - JsonNode jsonNode = new JsonNode(n); + if (n.getNodeDepth() <= this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) { - if (this.isUriEnrichable(n.getSelfLink())) { - jsonNode.getNodeMeta().setEnrichableNode(true); - } + JsonNode jsonNode = new JsonNode(n, this.visualizationConfigs); - jsonNode.getNodeMeta().setClassName(visualizationConfig.getGeneralNodeClassName()); + jsonNode.getNodeMeta().setClassName(this.visualizationConfigs.getGeneralNodeClassName()); - if (VisualizationConfig.getConfig().isVisualizationDebugEnabled()) { + if (this.visualizationConfigs.isVisualizationDebugEnabled()) { NodeDebug nodeDebug = jsonNode.getNodeMeta().getNodeDebug(); @@ -295,92 +287,4 @@ public class VisualizationTransformer { } } - /** - * Checks if is uri enrichable. - * - * @param uri the uri - * @return true, if is uri enrichable - */ - private boolean isUriEnrichable(String uri) { - if (enrichableUriPrefixes != null) { - for (String prefix : enrichableUriPrefixes) { - if (uri.contains(prefix)) { // AAI-4089 - return true; - } - } - } - return false; - } - - - /** - * @return the flatNodeArray - */ - public List getFlatNodeArray() { - return flatNodeArray; - } - - - /** - * @param flatNodeArray the flatNodeArray to set - */ - public void setFlatNodeArray(List flatNodeArray) { - this.flatNodeArray = flatNodeArray; - } - - - /** - * @return the enrichableUriPrefixes - */ - public Set getEnrichableUriPrefixes() { - return enrichableUriPrefixes; - } - - - /** - * @param enrichableUriPrefixes the enrichableUriPrefixes to set - */ - public void setEnrichableUriPrefixes(Set enrichableUriPrefixes) { - this.enrichableUriPrefixes = enrichableUriPrefixes; - } - - - /** - * @return the linkArrayOutput - */ - public List getLinkArrayOutput() { - return linkArrayOutput; - } - - - /** - * @param linkArrayOutput the linkArrayOutput to set - */ - public void setLinkArrayOutput(List linkArrayOutput) { - this.linkArrayOutput = linkArrayOutput; - } - - - /** - * @return the visualizationConfig - */ - public VisualizationConfig getVisualizationConfig() { - return visualizationConfig; - } - - - /** - * @param visualizationConfig the visualizationConfig to set - */ - public void setVisualizationConfig(VisualizationConfig visualizationConfig) { - this.visualizationConfig = visualizationConfig; - } - - - /** - * @return the log - */ - public static Logger getLog() { - return LOG; - } } diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/servlet/SearchServlet.java b/src/main/java/org/onap/aai/sparky/viewandinspect/servlet/SearchServlet.java deleted file mode 100644 index 5a84346..0000000 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/servlet/SearchServlet.java +++ /dev/null @@ -1,224 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.viewandinspect.servlet; - -import java.io.IOException; -import java.io.PrintWriter; -import java.util.HashMap; -import java.util.Map; - -import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.json.JSONException; -import org.json.JSONObject; -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.dal.elasticsearch.SearchAdapter; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.sas.config.SearchServiceConfig; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.search.VnfSearchService; -import org.onap.aai.sparky.search.config.SuggestionConfig; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.sparky.viewandinspect.services.SearchServiceWrapper; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; -import org.onap.aai.cl.mdc.MdcContext; - -/** - * The Class SearchServlet. - */ - -public class SearchServlet extends HttpServlet { - - private static final long serialVersionUID = 1L; - - /** - * @return the searchWrapper - */ - public SearchServiceWrapper getSearchWrapper() { - return searchWrapper; - } - - /** - * @param searchWrapper the searchWrapper to set - */ - public void setSearchWrapper(SearchServiceWrapper searchWrapper) { - this.searchWrapper = searchWrapper; - } - - /** - * @return the serialversionuid - */ - public static long getSerialversionuid() { - return serialVersionUID; - } - - /** - * @return the log - */ - public static Logger getLog() { - return LOG; - } - - /** - * @return the keyPayload - */ - public static String getKeyPayload() { - return KEY_PAYLOAD; - } - - - private static final Logger LOG = LoggerFactory.getInstance().getLogger(SearchServlet.class); - - private SearchServiceWrapper searchWrapper = null; - - private static final String KEY_PAYLOAD = "payload"; - - /** - * Instantiates a new search servlet. - */ - public SearchServlet() {} - - /* - * (non-Javadoc) - * - * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, - * javax.servlet.http.HttpServletResponse) - */ - @Override - public void doGet(HttpServletRequest request, HttpServletResponse response) - throws ServletException, IOException { - doPost(request, response); - } - - public void destroy() { - // TODO Auto-generated method stub - super.destroy(); - } - - public void init() throws ServletException { - super.init(); - searchWrapper = new SearchServiceWrapper(); - } - - protected Map getPayloadParams(JSONObject parameters) { - Map payloadParams = new HashMap(); - try { - JSONObject payload = parameters.getJSONObject(KEY_PAYLOAD); - if (payload.length() > 0) { - for (String key : JSONObject.getNames(payload)) { - payloadParams.put(key, payload.getString(key)); - } - } - } catch (JSONException exc) { - LOG.error(AaiUiMsgs.ERROR_PARSING_PARAMS, exc); - } - return payloadParams; - } - - /* - * (non-Javadoc) - * - * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, - * javax.servlet.http.HttpServletResponse) - */ - @Override - public void doPost(HttpServletRequest request, HttpServletResponse response) - throws ServletException, IOException { - String txnID = request.getHeader("X-TransactionId"); - if (txnID == null) { - txnID = NodeUtils.getRandomTxnId(); - } - - String partnerName = request.getHeader("X-FromAppId"); - if (partnerName == null) { - partnerName = "Browser"; - } - MdcContext.initialize(txnID, "AAI_UI", "", partnerName, request.getRemoteAddr()); - searchWrapper.doPost(request, response); - } - - /** - * Generate json error response. - * - * @param message the message - * @return the string - */ - /* - * This is the manual approach, however we could also create an object container for the error - * then use the Jackson ObjectWrite to dump the object to json instead. If it gets any more - * complicated we could do that approach so we don't have to manually trip over the JSON - * formatting. - */ - protected String generateJsonErrorResponse(String message) { - return String.format("{ \"errorMessage\" : %s }", message); - } - - /** - * Handle search servlet errors. - * - * @param errorMsg the error msg - * @param exc the exc - * @param response the response - * @throws IOException Signals that an I/O exception has occurred. - */ - public void handleSearchServletErrors(String errorMsg, Exception exc, - HttpServletResponse response) throws IOException { - - String errorLogMsg = - (exc == null ? errorMsg : errorMsg + ". Error:" + exc.getLocalizedMessage()); - - LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, errorLogMsg); - - response.setContentType("application/json"); - PrintWriter out = response.getWriter(); - out.println(generateJsonErrorResponse(errorMsg)); - out.close(); - } - - - /** - * Sets the servlet response. - * - * @param response the response - * @param postPayload the post payload - * - * @throws IOException Signals that an I/O exception has occurred. - */ - private void setServletResponse(HttpServletResponse response, String postPayload) - throws IOException { - - if (postPayload != null) { - response.setContentType("application/json"); - PrintWriter out = response.getWriter(); - out.println(postPayload); - out.close(); - } - } - - - -} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/servlet/VisualizationServlet.java b/src/main/java/org/onap/aai/sparky/viewandinspect/servlet/VisualizationServlet.java deleted file mode 100644 index 85ebe50..0000000 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/servlet/VisualizationServlet.java +++ /dev/null @@ -1,200 +0,0 @@ -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * Copyright © 2017 Amdocs - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ -package org.onap.aai.sparky.viewandinspect.servlet; - -import java.io.IOException; -import java.io.PrintWriter; - -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.commons.io.IOUtils; -import org.onap.aai.sparky.config.oxm.OxmModelLoader; -import org.onap.aai.sparky.dal.rest.OperationResult; -import org.onap.aai.sparky.dal.servlet.ResettableStreamHttpServletRequest; -import org.onap.aai.sparky.logging.AaiUiMsgs; -import org.onap.aai.sparky.util.NodeUtils; -import org.onap.aai.sparky.viewandinspect.entity.QueryRequest; -import org.onap.aai.sparky.viewandinspect.services.VisualizationService; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; -import org.onap.aai.cl.mdc.MdcContext; - -/** - * A dedicated servlet for handling Front-End Visualization Requests and performing feats of magic - * to execute the right model/type/config driven queries to build the D3 visualization output JSON - * back to the FE. - * - * @author DAVEA - * - */ -public class VisualizationServlet extends HttpServlet { - - /** - * - */ - private static final long serialVersionUID = 4678831934652478571L; - private static final Logger LOG = - LoggerFactory.getInstance().getLogger(VisualizationServlet.class); - private static final String VISUALIZATION_API_ENDPOINT = "prepareVisualization"; - private final VisualizationService visualizationService; - - /** - * Instantiates a new visualization servlet. - * - * @throws Exception the exception - */ - public VisualizationServlet() throws Exception { - this.visualizationService = new VisualizationService(OxmModelLoader.getInstance()); - } - - /** - * Inits the. - * - * @param filterConfig the filter config - * @throws ServletException the servlet exception - */ - public void init(FilterConfig filterConfig) throws ServletException { - LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "init()"); - } - - /** - * Gets the request body. - * - * @param request the request - * @return the request body - */ - private String getRequestBody(HttpServletRequest request) { - - ResettableStreamHttpServletRequest requestWrapper = - new ResettableStreamHttpServletRequest(request); - - String body = null; - try { - body = IOUtils.toString(requestWrapper.getRequestBody()); - } catch (IOException exc) { - LOG.error(AaiUiMsgs.EXCEPTION_CAUGHT, "Trying to get body from request", - exc.getLocalizedMessage()); - } - - return body; - } - - /* - * (non-Javadoc) - * - * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, - * javax.servlet.http.HttpServletResponse) - */ - @Override - protected void doGet(HttpServletRequest request, HttpServletResponse response) - throws ServletException, IOException { - doPost(request, response); - } - - /* - * (non-Javadoc) - * - * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, - * javax.servlet.http.HttpServletResponse) - */ - @Override - protected void doPost(HttpServletRequest request, HttpServletResponse response) - throws ServletException, IOException { - String txnID = request.getHeader("X-TransactionId"); - if (txnID == null) { - txnID = NodeUtils.getRandomTxnId(); - } - - String partnerName = request.getHeader("X-FromAppId"); - if (partnerName == null) { - partnerName = "Browser"; - } - - MdcContext.initialize(txnID, "AAI-UI", "", partnerName, request.getRemoteAddr()); - - String postRequestBody = getRequestBody(request); - - String requestUri = request.getRequestURI(); - OperationResult operationResult = null; - - /* - * For now we only have a single API call but there could be more in the future - */ - if (requestUri.endsWith(VISUALIZATION_API_ENDPOINT)) { - - /* - * Work our magic and determine the best way to interrogate AAI to get the stuff we are - * interested in. Perhaps it should be an edge-tag-query or perhaps it is a straight up - * derived self-link query. - */ - - /* - * Map request body to an interpreted API PoJo object - */ - QueryRequest queryRequest = visualizationService.analyzeQueryRequestBody(postRequestBody); - - if (queryRequest != null) { - operationResult = visualizationService.buildVisualizationUsingGenericQuery(queryRequest); - } else { - LOG.error(AaiUiMsgs.FAILED_TO_ANALYZE, - String.format("Failed to analyze post request query body = '%s'", postRequestBody)); - - operationResult = new OperationResult(); - operationResult.setResult(500, - String.format("Failed to analyze post request query body = '%s'", postRequestBody)); - - } - - } else { - // unhandled type - LOG.error(AaiUiMsgs.UNKNOWN_SERVER_ERROR, "Unhandled requestUri - " + requestUri); - operationResult = new OperationResult(); - operationResult.setResult(500, "Unknown Server Error: Unhandled requestUri = " + requestUri); - } - - PrintWriter out = response.getWriter(); - response.addHeader("Content-Type", "application/xml"); - - response.setStatus(operationResult.getResultCode()); - - if (operationResult.getResultCode() == 200) { - response.setContentLength(operationResult.getResult().length()); - out.print(operationResult.getResult()); - out.print("\n"); - } else { - response.setContentLength(operationResult.getResult().length()); - out.print(operationResult.getResult()); - out.print("\n"); - } - } - - @Override - public void destroy() { - super.destroy(); - visualizationService.shutdown(); - } -} diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/task/CollectNodeSelfLinkTask.java b/src/main/java/org/onap/aai/sparky/viewandinspect/task/CollectNodeSelfLinkTask.java index 3b750b3..8683299 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/task/CollectNodeSelfLinkTask.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/task/CollectNodeSelfLinkTask.java @@ -33,35 +33,6 @@ import org.onap.aai.sparky.dal.rest.OperationResult; public class CollectNodeSelfLinkTask implements Supplier { private String selfLink; - - /** - * @return the selfLink - */ - public String getSelfLink() { - return selfLink; - } - - /** - * @param selfLink the selfLink to set - */ - public void setSelfLink(String selfLink) { - this.selfLink = selfLink; - } - - /** - * @return the aaiProvider - */ - public ActiveInventoryDataProvider getAaiProvider() { - return aaiProvider; - } - - /** - * @param aaiProvider the aaiProvider to set - */ - public void setAaiProvider(ActiveInventoryDataProvider aaiProvider) { - this.aaiProvider = aaiProvider; - } - private ActiveInventoryDataProvider aaiProvider; /** diff --git a/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java b/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java index 518d569..7c59ffa 100644 --- a/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java +++ b/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java @@ -25,13 +25,13 @@ package org.onap.aai.sparky.viewandinspect.task; import java.util.Map; import java.util.function.Supplier; -import org.onap.aai.sparky.dal.aai.ActiveInventoryDataProvider; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; -import org.onap.aai.sparky.dal.rest.OperationResult; import org.onap.aai.sparky.logging.AaiUiMsgs; import org.onap.aai.sparky.viewandinspect.entity.NodeProcessingTransaction; -import org.onap.aai.cl.api.Logger; -import org.onap.aai.cl.eelf.LoggerFactory; import org.slf4j.MDC; /** @@ -43,7 +43,7 @@ public class PerformNodeSelfLinkProcessingTask implements Supplier contextMap; private ActiveInventoryConfig aaiConfig; @@ -51,12 +51,19 @@ public class PerformNodeSelfLinkProcessingTask implements Supplier contextMap; @@ -53,9 +53,9 @@ public class PerformSelfLinkDeterminationTask * @param aaiProvider the aai provider */ public PerformSelfLinkDeterminationTask(SelfLinkDeterminationTransaction txn, - String requestParameters, ActiveInventoryDataProvider aaiProvider) { + String requestParameters, ActiveInventoryAdapter aaiAdapter) { - this.aaiProvider = aaiProvider; + this.aaiAdapter = aaiAdapter; this.txn = txn; this.contextMap = MDC.getCopyOfContextMap(); } @@ -78,7 +78,7 @@ public class PerformSelfLinkDeterminationTask OperationResult opResult = null; try { opResult = - aaiProvider.queryActiveInventoryWithRetries(txn.getQueryString(), "application/json", + aaiAdapter.queryActiveInventoryWithRetries(txn.getQueryString(), "application/json", ActiveInventoryConfig.getConfig().getAaiRestConfig().getNumRequestRetries()); } catch (Exception exc) { opResult = new OperationResult(); diff --git a/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java b/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java new file mode 100644 index 0000000..ccce3b0 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java @@ -0,0 +1,783 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewinspect.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; +import org.onap.aai.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.SynchronizerConstants; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.MergableEntity; +import org.onap.aai.sparky.sync.entity.SearchableEntity; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchUpdate; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class SearchableEntitySynchronizer. + */ +public class ViewInspectEntitySynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + /** + * The Class RetrySearchableEntitySyncContainer. + */ + private class RetrySearchableEntitySyncContainer { + NetworkTransaction txn; + SearchableEntity se; + + /** + * Instantiates a new retry searchable entity sync container. + * + * @param txn the txn + * @param se the se + */ + public RetrySearchableEntitySyncContainer(NetworkTransaction txn, SearchableEntity se) { + this.txn = txn; + this.se = se; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public SearchableEntity getSearchableEntity() { + return se; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ViewInspectEntitySynchronizer.class); + + private boolean allWorkEnumerated; + private Deque selflinks; + private Deque retryQueue; + private Map retryLimitTracker; + protected ExecutorService esPutExecutor; + + /** + * Instantiates a new searchable entity synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public ViewInspectEntitySynchronizer(ElasticSearchSchemaConfig schemaConfig, + int internalSyncWorkers, int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig) throws Exception { + super(LOG, "SES", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(), + aaiStatConfig, esStatConfig); + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque(); + this.retryQueue = new ConcurrentLinkedDeque(); + this.retryLimitTracker = new ConcurrentHashMap(); + this.synchronizerName = "Searchable Entity Synchronizer"; + this.esPutExecutor = NodeUtils.createNamedExecutor("SES-ES-PUT", 5, LOG); + this.aaiEntityStats.intializeEntityCounters( + SearchableEntityLookup.getInstance().getSearchableEntityDescriptors().keySet()); + this.esEntityStats.intializeEntityCounters( + SearchableEntityLookup.getInstance().getSearchableEntityDescriptors().keySet()); + this.syncDurationInMs = -1; + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + final Map contextMap = MDC.getCopyOfContextMap(); + Map descriptorMap = + SearchableEntityLookup.getInstance().getSearchableEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); + LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); + return OperationState.ERROR; + } + + Collection syncTypes = descriptorMap.keySet(); + + /* + * Collection syncTypes = new ArrayList(); syncTypes.add("service-instance"); + */ + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the of + * the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + // TODO -> LOG, what should be logged here? + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + }); + + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "SearchableEntitySynchronizer", "", "Sync", ""); + + resetCounters(); + this.allWorkEnumerated = false; + syncStartedTimeStampInMs = System.currentTimeMillis(); + collectAllTheWork(); + + return OperationState.OK; + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + String message = "Could not deserialize JSON (representing operation result) as node tree. " + + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + SearchableOxmEntityDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + + descriptor = SearchableEntityLookup.getInstance().getSearchableEntityDescriptors() + .get(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + + if (descriptor.hasSearchableAttributes()) { + selflinks.add(new SelfLinkDescriptor(resourceLink, + SynchronizerConstants.NODES_ONLY_MODIFIER, resourceType)); + } + + } + } + } + } + + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = OxmEntityLookup.getInstance().getEntityDescriptors() + .get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + linkDescriptor.getSelfLink()); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + + } + + } + + /** + * Perform document upsert. + * + * @param esGetTxn the es get txn + * @param se the se + */ + protected void performDocumentUpsert(NetworkTransaction esGetTxn, SearchableEntity se) { + /** + *

    + *

      + * As part of the response processing we need to do the following: + *
    • 1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + *
    • 2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + *
    • a) if version is null or RC=404, then standard put, no _update with version tag + *
    • b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic + *
    + *

    + */ + String link = null; + try { + link = getElasticFullUrl("/" + se.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + String versionNumber = null; + boolean wasEntryDiscovered = false; + if (esGetTxn.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, se.getEntityPrimaryKeyValue()); + } else if (esGetTxn.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + try { + versionNumber = NodeUtils.extractFieldValueFromObject( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_version"); + } catch (IOException exc) { + String message = + "Error extracting version number from response, aborting searchable entity sync of " + + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we + * return. + */ + LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetTxn.getOperationResult().getResultCode())); + return; + } + + try { + String jsonPayload = null; + if (wasEntryDiscovered) { + try { + ArrayList sourceObject = new ArrayList(); + NodeUtils.extractObjectsByKey( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_source", sourceObject); + + if (!sourceObject.isEmpty()) { + String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); + MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); + ObjectReader updater = mapper.readerForUpdating(me); + MergableEntity merged = updater.readValue(NodeUtils.convertObjectToJson(se, false)); + jsonPayload = mapper.writeValueAsString(merged); + } + } catch (IOException exc) { + String message = + "Error extracting source value from response, aborting searchable entity sync of " + + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + jsonPayload = se.getAsJson(); + } + + if (wasEntryDiscovered) { + if (versionNumber != null && jsonPayload != null) { + + String requestPayload = elasticSearchAdapter.buildBulkImportOperationRequest( + getIndexName(), ElasticSearchConfig.getConfig().getType(), se.getId(), versionNumber, + jsonPayload); + + NetworkTransaction transactionTracker = new NetworkTransaction(); + transactionTracker.setEntityType(esGetTxn.getEntityType()); + transactionTracker.setDescriptor(esGetTxn.getDescriptor()); + transactionTracker.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), + requestPayload, elasticSearchAdapter, transactionTracker), esPutExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Searchable entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, se); + } + }); + } + + } else { + + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetTxn.getEntityType()); + updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync( + new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = + "Searchable entity sync UPDATE PUT error - " + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, se); + } + }); + } + } + } catch (Exception exc) { + String message = "Exception caught during searchable entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } + + /** + * Populate searchable entity document. + * + * @param doc the doc + * @param result the result + * @param resultDescriptor the result descriptor + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected void populateSearchableEntityDocument(SearchableEntity doc, String result, + OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { + + doc.setEntityType(resultDescriptor.getEntityName()); + + JsonNode entityNode = mapper.readTree(result); + + List primaryKeyValues = new ArrayList(); + String pkeyValue = null; + + SearchableOxmEntityDescriptor searchableDescriptor = SearchableEntityLookup.getInstance() + .getSearchableEntityDescriptors().get(resultDescriptor.getEntityName()); + + for (String keyName : searchableDescriptor.getPrimaryKeyAttributeNames()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + String message = "populateSearchableEntityDocument(), pKeyValue is null for entityType = " + + resultDescriptor.getEntityName(); + LOG.warn(AaiUiMsgs.WARN_GENERIC, message); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + + final List searchTagFields = searchableDescriptor.getSearchableAttributes(); + + /* + * Based on configuration, use the configured field names for this entity-Type to build a + * multi-value collection of search tags for elastic search entity search criteria. + */ + for (String searchTagField : searchTagFields) { + String searchTagValue = NodeUtils.getNodeFieldAsText(entityNode, searchTagField); + if (searchTagValue != null && !searchTagValue.isEmpty()) { + doc.addSearchTagWithKey(searchTagValue, searchTagField); + } + } + } + + /** + * Fetch document for upsert. + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + if (!txn.getOperationResult().wasSuccessful()) { + String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return; + } + + SearchableOxmEntityDescriptor searchableDescriptor = SearchableEntityLookup.getInstance() + .getSearchableEntityDescriptors().get(txn.getDescriptor().getEntityName()); + + try { + if (searchableDescriptor.hasSearchableAttributes()) { + + final String jsonResult = txn.getOperationResult().getResult(); + if (jsonResult != null && jsonResult.length() > 0) { + + SearchableEntity se = new SearchableEntity(); + se.setLink(ActiveInventoryConfig.extractResourcePath(txn.getLink())); + populateSearchableEntityDocument(se, jsonResult, txn.getDescriptor()); + se.deriveFields(); + + String link = null; + try { + link = getElasticFullUrl("/" + se.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, se); + } + }); + } + } + + } + } catch (JsonProcessingException exc) { + // TODO -> LOG, waht should be logged here? + } catch (IOException exc) { + // TODO -> LOG, waht should be logged here? + } + } + + /** + * Process store document result. + * + * @param esPutResult the es put result + * @param esGetResult the es get result + * @param se the se + */ + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, SearchableEntity se) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(se.getId())) { + esWorkOnHand.incrementAndGet(); + + RetrySearchableEntitySyncContainer rsc = + new RetrySearchableEntitySyncContainer(esGetResult, se); + retryQueue.push(rsc); + + String message = "Store document failed during searchable entity synchronization" + + " due to version conflict. Entity will be re-synced."; + LOG.warn(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } else { + String message = + "Store document failed during searchable entity synchronization with result code " + + or.getResultCode() + " and result message " + or.getResult(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } + } + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetrySearchableEntitySyncContainer rsc = retryQueue.poll(); + if (rsc != null) { + + SearchableEntity se = rsc.getSearchableEntity(); + NetworkTransaction txn = rsc.getNetworkTransaction(); + + String link = null; + try { + /* + * In this retry flow the se object has already derived its fields + */ + link = getElasticFullUrl("/" + se.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already + * called incrementAndGet when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, se); + } + }); + } + + } + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + String message = "Searchable entity re-sync limit reached for " + id + + ", re-sync will no longer be attempted for this entity"; + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + @Override + public SynchronizerState getState() { + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return this.getStatReport(syncDurationInMs, showFinalReport); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + return true; + } + +} diff --git a/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java b/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java new file mode 100644 index 0000000..c2ecbb1 --- /dev/null +++ b/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java @@ -0,0 +1,129 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewinspect.sync; + +import org.onap.aai.sparky.crossentityreference.sync.CrossEntityReferenceSynchronizer; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class ViewInspectSyncController extends SyncControllerImpl + implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + private ActiveInventoryAdapter aaiAdapter; + private ElasticSearchAdapter esAdapter; + private ElasticSearchSchemaConfig schemaConfig; + private ElasticSearchEndpointConfig endpointConfig; + + public ViewInspectSyncController(SyncControllerConfig syncControllerConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig) + throws Exception { + super(syncControllerConfig); + + + // final String controllerName = "View and Inspect Entity Synchronizer"; + + this.aaiAdapter = aaiAdapter; + this.esAdapter = esAdapter; + this.schemaConfig = schemaConfig; + this.endpointConfig = endpointConfig; + IndexIntegrityValidator indexValidator = new IndexIntegrityValidator(esAdapter, schemaConfig, + endpointConfig, ElasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(indexValidator); + + + ViewInspectEntitySynchronizer ses = new ViewInspectEntitySynchronizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig); + ses.setAaiAdapter(aaiAdapter); + ses.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(ses); + + CrossEntityReferenceSynchronizer cers = new CrossEntityReferenceSynchronizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig); + + cers.setAaiAdapter(aaiAdapter); + cers.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(cers); + + IndexCleaner indexCleaner = + new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig); + + registerIndexCleaner(indexCleaner); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + public ActiveInventoryAdapter getAaiAdapter() { + return this.aaiAdapter; + } + + public ElasticSearchAdapter getElasticSearchAdapter() { + return this.esAdapter; + } + + public ElasticSearchEndpointConfig getendpointConfig() { + return this.endpointConfig; + } + + public ElasticSearchSchemaConfig getschemaConfig() { + return this.schemaConfig; + } + + + @Override + public void registerController() { + if (syncControllerRegistry != null) { + if (syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + + } +} diff --git a/src/main/resources/extApps/aai.war b/src/main/resources/extApps/aai.war new file mode 100644 index 0000000..f6d265a Binary files /dev/null and b/src/main/resources/extApps/aai.war differ diff --git a/src/main/resources/extApps/aai.xml b/src/main/resources/extApps/aai.xml index 476d0a8..f7f8073 100644 --- a/src/main/resources/extApps/aai.xml +++ b/src/main/resources/extApps/aai.xml @@ -4,6 +4,5 @@ /extApps/aai.war - /services/aai/webapp /staticContent/aai diff --git a/src/main/resources/logging/AAIUIMsgs.properties b/src/main/resources/logging/AAIUIMsgs.properties index a2c12ca..1843604 100644 --- a/src/main/resources/logging/AAIUIMsgs.properties +++ b/src/main/resources/logging/AAIUIMsgs.properties @@ -34,6 +34,14 @@ VISUALIZATION_GRAPH_OUTPUT=\ AAIUI0003I|\ Generated graph output has {0} node(s) and {1} link(s) +NODE_INTEGRITY_ALREADY_PROCESSED=\ + AAIUI0004I|\ + Node integrity for nodeId, {0}, has already been processed + +SKIPPING_PROCESS_NODE_INTEGRITY=\ + AAIUI0005I|\ + Skipping processNodeIntegrity() for node, {0}, because node integrity overlay is disabled + MAX_EVALUATION_ATTEMPTS_EXCEEDED=\ AAIUI0006I|\ Evaluate node depths exceeded max evaluation attempts @@ -88,7 +96,7 @@ OXM_READ_ERROR_NONVERBOSE=\ OXM_LOAD_SUCCESS=\ AAIUI00019I|\ - OXM File Loaded Successfully + OXM file version v{0} loaded successfully OXM_PARSE_ERROR_NONVERBOSE=\ AAIUI00020I|\ @@ -114,13 +122,25 @@ DATA_CACHE_SUCCESS=\ AAIUI00025D|\ InMemoryEntityCache cached data with key = {0} +ATTRIBUTES_UPDATE_METHOD_CALLED=\ + AAIUI00026I|\ + updateObjectAttribute called for : {0} ATTUID : {1} Attributes : {2} + +ATTRIBUTES_HANDLING_EDIT=\ + AAIUI00027I|\ + Handling Edit Attributes: requestUri = {0} Body : {1} + RESTFULL_OP_COMPLETE=\ AAIUI00028I|\ doRestfulOperation() operation for {0} execution time = {1} ms for link = {2}, ResultCode = {3} + +DI_MS_TIME_FOR_DATA_FETCH=\ + AAIUI00029I|\ + TabularService data fetch time: {0} ms. Status: {1}. COOKIE_FOUND=\ AAIUI00030I|\ - ESHr cookie found in the request <{0}> + attESHr cookie found in the request <{0}> INDEX_ALREADY_EXISTS=\ AAIUI00031I|\ @@ -281,6 +301,19 @@ LOGIN_FILTER_INFO=\ LOGIN_FILTER_DEBUG=\ AAIUI00072D|\ {0} + +DR_PROCESSING_FAILURE=\ + AAIUI00073I|\ + Failure to resolve proxied request. Response code: {0} for proxy payload: {1} + +DR_PROCESSING_TIME=\ + AAIUI00074I|\ + Time taken to resolve proxied request: {0} ms + +DR_PROXY_FROM_TO=\ + AAIUI00075I|\ + Proxying request from url: {0} to: {1} + #-------------------- 300 Series Errors --------------------# @@ -295,6 +328,14 @@ QUERY_AAI_WAIT_INTERRUPTION=\ EXECUTOR_SERV_EXCEPTION=\ AAIUI3003E|\ Thread: {0}. The following exception has occurred: {1} + +ATTRIBUTES_NOT_UPDATED_EXCEPTION=\ + AAIUI3004E|\ + Attributes not updated. {0} + +ATTRIBUTES_NOT_UPDATED_MESSAGE=\ + AAIUI3005E|\ + {0} SYNC_NOT_VALID_STATE_DURING_REQUEST=\ AAIUI3006E|\ @@ -452,6 +493,10 @@ PEGGING_ERROR=\ AAIUI30044E|\ Pegging UNKNOWN_EXCEPTION due to unexpected exception = {0} +ATTRIBUTES_ERROR_LOADING_MODEL_VERSION=\ + AAIUI30045E|\ + Model Version Error. {0} Not Found or not loaded successfully. + INVALID_REQUEST=\ AAIUI30046E|\ {0} @@ -470,7 +515,7 @@ DI_DATA_NOT_FOUND_VERBOSE=\ OXM_FILE_NOT_FOUND=\ AAIUI30050E|\ - Unable to find latest OXM file in directory: {0} + Unable to find any OXM file. OXM_READ_ERROR_VERBOSE=\ AAIUI30051E|\ @@ -511,10 +556,6 @@ ERROR_SORTING_VIOLATION_DATA=\ CONFIGURATION_ERROR=\ AAIUI30060E|\ Failed to load {0} configurations - -ERROR_SERVLET_PROCESSSING=\ - AAIUI30061E|\ - Failure during servlet request processing. Error: {0} QUERY_AAI_RETRY_FAILURE_WITH_SEQ=\ AAIUI30062E|\ @@ -548,6 +589,10 @@ RESTFULL_OP_ERROR_VERBOSE=\ AAIUI30069E|\ Error retrieving link: {0} from restful endpoint due to error: {1} +ATTRIBUTES_ERROR_GETTING_AAI_CONFIG_OR_ADAPTER=\ + AAIUI30070E|\ + Error in getting AAI configuration or Adaptor: {0} + USER_AUTHORIZATION_FILE_UNAVAILABLE=\ AAIUI30071E|\ User authorization file unavailable. User {0} cannot be authorized. @@ -564,6 +609,10 @@ FILE_NOT_FOUND=\ AAIUI30074E|\ Failed to find file: {0} +ATTRIBUTES_USER_NOT_AUTHORIZED_TO_UPDATE=\ + AAIUI30075E|\ + User {0} is not authorized for Attributes update + SELF_LINK_NULL_EMPTY_RESPONSE=\ AAIUI30076E|\ AIN - Failed to process null or empty pathed self link response @@ -700,6 +749,10 @@ VISUALIZATION_OUTPUT_ERROR=\ AAIUI300109E|\ An error occurred while preparing D3 visualization output: {0} +FAILED_TO_PROCESS_NODE_INTEGRITY=\ + AAIUI300110E|\ + Failed to process node integrity: {0} + FAILURE_TO_PROCESS_REQUEST=\ AAIUI300111E\ Failure to process request. {0} @@ -732,6 +785,10 @@ ADD_SEARCH_TARGET_ATTRIBUTES_FAILED=\ AAIUI300118E|\ Add SearchTargetAttributes failure: {0} +NODE_INTEGRITY_OVERLAY_ERROR=\ + AAIUI300119E|\ + Error processing node integrity overlay: {0} + ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES=\ AAIUI300120E|\ Failed to load suggestible entities in OXM file. Synchronizer stopped. @@ -778,9 +835,53 @@ ERROR_D3_GRAPH_VISUALIZATION=\ ERROR_AAI_QUERY_WITH_RETRY=\ AAIUI300130E|\ - Querying AAI with retry failed due to exception: {0} + Querying AAI with retry failed due to exception: {0} + +ERROR_READING_JSON_SCHEMA=\ + AAIUI300131E|\ + Error reading JSON schema from: {0} + +VIEW_NAME_NOT_SUPPORTED=\ + AAIUI300132E|\ + View name not supported: {0} + +ERROR_FETCHING_FILTER_VALUES=\ + AAIUI300133E|\ + Result {0} while fetching filter values for filter {1}. + +ERROR_PROCESSING_WIDGET_REQUEST=\ + AAIUI300134E|\ + Error processing for query: {0} in view: {1} + + +ERROR_FILTERS_NOT_FOUND=\ + AAIUI300135E|\ + No filters were provided as part of request. + +DR_REQUEST_URI_FOR_PROXY_UNKNOWN=\ + AAIUI300136E|\ + Failure to generate routing service URL from: {0} - +OXM_LOADING_ERROR=\ + AAIUI300137E|\ + OXM loading error. Reason: {0} + +URI_DECODING_EXCEPTION=\ + AAIUI300138E|\ + Error decoding exception. {0} + +ENCRYPTION_ERROR=\ + AAIUI300139E|\ + Encryption error for value: {0}. Cause: {1} + +DECRYPTION_ERROR=\ + AAIUI300140E|\ + Decryption error for encrypted value: {0}. Cause: {1} + +RESOURCE_NOT_FOUND=\ + AAIUI300141E|\ + Unsupported request. Resource not found: {0} + #-------------------- 900 Series Errors --------------------# UNKNOWN_SERVER_ERROR=\ @@ -793,8 +894,8 @@ SEARCH_ADAPTER_ERROR=\ QUERY_PARAM_EXTRACTION_ERROR=\ AAIUI9003E|\ - Query Parameter Self-Link Extraction Error: {0} - + Query Parameter Self-Link Extraction Error: {0} + ERROR_EXTRACTING_RESOURCE_PATH_FROM_LINK=\ AAIUI9004E|\ - Error extracting resource path from self-link. Error: {0} \ No newline at end of file + Error extracting resource path from self-link. Error: {0} diff --git a/src/main/scripts/encNameValue.sh b/src/main/scripts/encNameValue.sh new file mode 100644 index 0000000..daefd00 --- /dev/null +++ b/src/main/scripts/encNameValue.sh @@ -0,0 +1,20 @@ +# The script invokes the com.amdocs.aai.audit.security.encryption.EncryptedPropValue class to generate an encrypted value +# e.g +# ./encNameValue.sh odl.auth.password admin +# will return: +# odl.auth.password.x=f1e2c25183ef4b4ff655e7cd94d0c472 +# +if [ "$#" -ne 2 ]; then + echo "Illegal number of parameters (expected 2)" + echo "Usage: `basename $0` " 1>&2 + exit 1 +fi + +# On Windows we must use a different CLASSPATH separator character +if [ "$(expr substr $(uname -s) 1 5)" == "MINGW" ]; then + CPSEP=\; +else + CPSEP=: +fi + +java -cp ".${CPSEP}../extJars/*" com.att.aai.util.EncryptedPropValue -n $1 -v $2 diff --git a/src/main/scripts/start.sh b/src/main/scripts/start.sh index 3d1af06..fc063ad 100644 --- a/src/main/scripts/start.sh +++ b/src/main/scripts/start.sh @@ -2,6 +2,7 @@ BASEDIR="/opt/app/sparky" AJSC_HOME="$BASEDIR" +AJSC_CONF_HOME="$BASEDIR/bundleconfig/" if [ -z "$CONFIG_HOME" ]; then echo "CONFIG_HOME must be set in order to start up process" @@ -12,14 +13,64 @@ if [ -z "$KEY_STORE_PASSWORD" ]; then echo "KEY_STORE_PASSWORD must be set in order to start up process" exit 1 else - echo -e "KEY_STORE_PASSWORD=$KEY_STORE_PASSWORD\n" >> $AJSC_CONF_HOME/etc/sysprops/sys-props.properties + echo "KEY_STORE_PASSWORD=$KEY_STORE_PASSWORD\n" >> $AJSC_CONF_HOME/etc/sysprops/sys-props.properties fi if [ -z "$KEY_MANAGER_PASSWORD" ]; then echo "KEY_MANAGER_PASSWORD must be set in order to start up process" exit 1 else - echo -e "KEY_MANAGER_PASSWORD=$KEY_MANAGER_PASSWORD\n" >> $AJSC_CONF_HOME/etc/sysprops/sys-props.properties + echo "KEY_MANAGER_PASSWORD=$KEY_MANAGER_PASSWORD\n" >> $AJSC_CONF_HOME/etc/sysprops/sys-props.properties +fi + +if [ -z "$UI_HTTPS_PORT" ] && [ -z "$UI_HTTP_PORT" ]; then + echo "Either UI_HTTPS_PORT or UI_HTTP_PORT must be set in order to start up process" + exit 1 +fi + +# Add any routes configured at deploy time to the sparky deployment +if [ -n "$DYNAMIC_ROUTES" ]; then + if [ -e /opt/app/sparky/services/inventory-ui-service_v1.zip ]; then + echo "Adding the following dynamic routes to the deployment: " + mkdir -p /tmp/sparky/v1/routes + for f in `ls $DYNAMIC_ROUTES` + do + cp $DYNAMIC_ROUTES/$f /tmp/sparky/v1/routes + echo "Adding dynamic route $DYNAMIC_ROUTES/$f" + done + jar uf /opt/app/sparky/services/inventory-ui-service_v1.zip* -C /tmp/ sparky + rm -rf /tmp/sparky + fi +fi + +# Add any spring bean configuration files to the sparky deployment +if [ -n "$SERVICE_BEANS" ]; then + if [ -e /opt/app/sparky/services/inventory-ui-service_v1.zip ]; then + echo "Adding the following dynamic service beans to the deployment: " + mkdir -p /tmp/sparky/v1/conf + for f in `ls $SERVICE_BEANS` + do + cp $SERVICE_BEANS/$f /tmp/sparky/v1/conf + echo "Adding dynamic service bean $SERVICE_BEANS/$f" + done + jar uf /opt/app/sparky/services/inventory-ui-service_v1.zip* -C /tmp/ sparky + rm -rf /tmp/sparky + fi +fi + +# Add any dynamic component configuration files to the sparky deployment +if [ -n "$COMPLIB" ]; then + if [ -e /opt/app/sparky/services/inventory-ui-service_v1.zip ]; then + echo "Adding the following dynamic libraries to the deployment: " + mkdir -p /tmp/sparky/v1/lib + for f in `ls $COMPLIB` + do + cp $COMPLIB/$f /tmp/sparky/v1/lib + echo "Adding dynamic library $COMPLIB/$f" + done + jar uf /opt/app/sparky/services/inventory-ui-service_v1.zip* -C /tmp/ sparky + rm -rf /tmp/sparky + fi fi CLASSPATH="$AJSC_HOME/lib/ajsc-runner-2.0.0.jar" @@ -39,4 +90,8 @@ PROPS="$PROPS -DCONFIG_HOME=$CONFIG_HOME" echo $CLASSPATH -/usr/lib/jvm/java-8-openjdk-amd64/bin/java -Xms1024m -Xmx4096m $PROPS -classpath $CLASSPATH com.att.ajsc.runner.Runner context=/ port=9517 +if [ "$UI_HTTPS_PORT" ]; then + /usr/lib/jvm/java-8-openjdk-amd64/bin/java -Xms1024m -Xmx4096m $PROPS -classpath $CLASSPATH com.att.ajsc.runner.Runner context=/ sslport=$UI_HTTPS_PORT +elif [ "$UI_HTTP_PORT" ]; then + /usr/lib/jvm/java-8-openjdk-amd64/bin/java -Xms1024m -Xmx4096m $PROPS -classpath $CLASSPATH com.att.ajsc.runner.Runner context=/ port=$UI_HTTP_PORT +fi \ No newline at end of file -- cgit 1.2.3-korg