summaryrefslogtreecommitdiffstats
path: root/sparkybe-onap-service/src/main/java/org/onap
diff options
context:
space:
mode:
authorda490c <dave.adams@amdocs.com>2018-03-22 00:32:52 -0400
committerda490c <dave.adams@amdocs.com>2018-03-22 09:34:25 -0400
commitba31685194c77ef140411531299696ae701385d4 (patch)
tree912f7d5b3378901ccabb8df52b26866d74572f10 /sparkybe-onap-service/src/main/java/org/onap
parentef7f255958e541ffaec0fd2a977440dd7b6fd6b8 (diff)
Convert Sparky to Spring-Boot
Issue-ID: AAI-599 Change-Id: If474dd02794f442fdddcd90f62fb75e0d6b907e7 Signed-off-by: da490c <dave.adams@amdocs.com>
Diffstat (limited to 'sparkybe-onap-service/src/main/java/org/onap')
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java210
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java129
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java176
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java241
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java782
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java384
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java94
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java178
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AveragingRingBuffer.java121
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/ComponentStatistics.java80
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistogramSampler.java286
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java177
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java105
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java776
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java197
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java99
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java90
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/SparkyResourceLoader.java125
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java78
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java67
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java136
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java61
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java137
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java71
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java68
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java132
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java195
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java33
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java119
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java75
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java54
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java181
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java937
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java404
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java157
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/GizmoAdapter.java336
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java159
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java285
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java139
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java265
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/exception/ElasticSearchOperationException.java53
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java207
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/HttpMethod.java33
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientConstructionException.java38
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientFactory.java97
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestOperationalStatistics.java255
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/config/RestEndpointConfig.java179
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java182
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java362
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java79
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java67
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java69
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java62
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java143
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java180
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java289
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java219
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java472
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/LoggingUtils.java43
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java204
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java407
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummary.java53
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummaryBucket.java46
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchEntityProperties.java49
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchResponse.java102
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchServiceAdapter.java139
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java188
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java36
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java76
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java73
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java39
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java119
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java144
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterQueryBuilder.java218
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilteredSearchHelper.java158
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/UiFiltersEntityConverter.java180
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java158
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersDetailsConfig.java58
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersForViewsConfig.java57
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterConfig.java188
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterDataSourceConfig.java99
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterListItemConfig.java70
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterOptionsValuesConfig.java68
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiViewListItemConfig.java68
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/AggregationEntity.java80
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/BoolQueryBuilder.java123
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/FilteredAggregationQueryBuilder.java65
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/MatchFilterCriteriaEntity.java77
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/SearchFilter.java88
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterEntity.java180
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterValueEntity.java80
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFiltersEntity.java53
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java76
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/BaseCookieDecryptor.java51
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/CookieDecryptor.java31
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/EcompSso.java155
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactory.java78
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactoryImpl.java205
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java267
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java236
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java213
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/UserManager.java170
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java124
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/RolesConfig.java90
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/SubscriptionServiceProcessor.java74
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/config/SubscriptionConfig.java139
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Message.java60
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/ObjectInspectorPayload.java128
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Params.java60
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Payload.java60
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/services/SubscriptionService.java65
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java524
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java604
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java97
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java57
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java178
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java67
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexValidator.java58
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncController.java96
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java682
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java29
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java50
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java222
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java65
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java135
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java75
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java72
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java77
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java239
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java305
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java99
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java111
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java41
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java97
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java100
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java59
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java78
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java142
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java90
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java327
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java56
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java32
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java32
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java97
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java90
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java72
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java82
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformGizmoRetrieval.java95
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java90
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java55
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java101
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java487
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java193
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/EncryptConvertor.java149
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/Encryptor.java155
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java61
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/JsonXmlConverter.java79
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/NodeUtils.java896
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RawByteHelper.java176
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RestletUtils.java119
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java100
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/TreeWalker.java136
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java61
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java99
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/SparkyConstants.java102
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java102
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java174
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java831
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java93
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/EntityEntry.java81
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoEntity.java98
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipEntity.java103
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipHint.java77
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphMeta.java147
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java58
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/InlineMessage.java70
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java207
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNodeLink.java77
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeDebug.java59
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java207
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java109
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryParams.java57
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryRequest.java47
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelatedToProperty.java64
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java96
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipData.java63
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipDirectionality.java42
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java57
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java116
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java80
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphLink.java75
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphNode.java248
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java36
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingState.java31
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java426
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseGizmoVisualizationContext.java990
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationContext.java1631
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationService.java382
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java55
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java52
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java305
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformGizmoNodeSelfLinkProcessingTask.java128
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java129
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformSelfLinkDeterminationTask.java95
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java779
-rw-r--r--sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java122
206 files changed, 34383 insertions, 0 deletions
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java
new file mode 100644
index 0000000..be29889
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java
@@ -0,0 +1,210 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.aggregatevnf.search;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.json.JsonObject;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.camel.Exchange;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.logging.util.ServletUtils;
+import org.onap.aai.sparky.search.filters.FilterQueryBuilder;
+import org.onap.aai.sparky.search.filters.config.FiltersConfig;
+import org.onap.aai.sparky.search.filters.entity.SearchFilter;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+
+public class AggregateSummaryProcessor {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(AggregateSummaryProcessor.class);
+
+ private static final String KEY_FILTERS = "filters";
+
+ private ElasticSearchAdapter elasticSearchAdapter = null;
+
+ private String vnfAggregationIndexName;
+ private FiltersConfig filtersConfig;
+
+ public AggregateSummaryProcessor(ElasticSearchAdapter elasticSearchAdapter, FiltersConfig filtersConfig) {
+ this.elasticSearchAdapter = elasticSearchAdapter;
+ this.filtersConfig = filtersConfig;
+ }
+
+ public void setVnfAggregationIndexName(String vnfAggregationIndexName) {
+ this.vnfAggregationIndexName = vnfAggregationIndexName;
+ }
+
+ public void getFilteredAggregation(Exchange exchange) {
+
+ HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class);
+ ServletUtils.setUpMdcContext(exchange, request);
+
+
+ try {
+ String payload = exchange.getIn().getBody(String.class);
+
+ if (payload == null || payload.isEmpty()) {
+
+ LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, "Request Payload is empty");
+
+ /*
+ * Don't throw back an error, just return an empty set
+ */
+
+ } else {
+
+ JSONObject parameters = new JSONObject(payload);
+
+ JSONArray requestFilters = null;
+ if (parameters.has(KEY_FILTERS)) {
+ requestFilters = parameters.getJSONArray(KEY_FILTERS);
+ } else {
+
+ JSONObject zeroResponsePayload = new JSONObject();
+ zeroResponsePayload.put("count", 0);
+ //response.setStatus(Status.SUCCESS_OK);
+ //response.setEntity(zeroResponsePayload.toString(), MediaType.APPLICATION_JSON);
+ exchange.getOut().setBody(zeroResponsePayload.toString());
+
+ LOG.error(AaiUiMsgs.ERROR_FILTERS_NOT_FOUND);
+ return;
+ }
+
+ if (requestFilters != null && requestFilters.length() > 0) {
+ List<JSONObject> filtersToQuery = new ArrayList<JSONObject>();
+ for(int i = 0; i < requestFilters.length(); i++) {
+ JSONObject filterEntry = requestFilters.getJSONObject(i);
+ filtersToQuery.add(filterEntry);
+ }
+
+ String jsonResponsePayload = getVnfFilterAggregations(filtersToQuery);
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 200);
+ exchange.getOut().setHeader(Exchange.CONTENT_TYPE, "application/json");
+ exchange.getOut().setBody(jsonResponsePayload);
+
+ } else {
+ String emptyResponse = getEmptyAggResponse();
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 200);
+ exchange.getOut().setHeader(Exchange.CONTENT_TYPE, "application/json");
+ exchange.getOut().setBody(emptyResponse);
+ LOG.error(AaiUiMsgs.ERROR_FILTERS_NOT_FOUND);
+ }
+ }
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "FilterProcessor failed to get filter list due to error = " + exc.getMessage());
+ }
+ }
+
+ private String getEmptyAggResponse() {
+ JSONObject aggPayload = new JSONObject();
+ aggPayload.put("totalChartHits", 0);
+ aggPayload.put("buckets", new JSONArray());
+ JSONObject payload = new JSONObject();
+ payload.append("groupby_aggregation", aggPayload);
+
+ return payload.toString();
+ }
+
+ private static final String FILTER_ID_KEY = "filterId";
+ private static final String FILTER_VALUE_KEY = "filterValue";
+ private static final int DEFAULT_SHOULD_MATCH_SCORE = 1;
+ private static final String VNF_FILTER_AGGREGATION = "vnfFilterAggregation";
+
+
+ private String getVnfFilterAggregations(List<JSONObject> filtersToQuery) throws IOException {
+
+ List<SearchFilter> searchFilters = new ArrayList<SearchFilter>();
+ for(JSONObject filterEntry : filtersToQuery) {
+
+ String filterId = filterEntry.getString(FILTER_ID_KEY);
+ if(filterId != null) {
+ SearchFilter filter = new SearchFilter();
+ filter.setFilterId(filterId);
+
+ if(filterEntry.has(FILTER_VALUE_KEY)) {
+ String filterValue = filterEntry.getString(FILTER_VALUE_KEY);
+ filter.addValue(filterValue);
+ }
+
+ searchFilters.add(filter);
+ }
+ }
+
+ // Create query for summary by entity type
+ JsonObject vnfSearch = FilterQueryBuilder.createCombinedBoolAndAggQuery(filtersConfig, searchFilters, DEFAULT_SHOULD_MATCH_SCORE);
+
+ // Parse response for summary by entity type query
+ OperationResult opResult = elasticSearchAdapter.doPost(
+ elasticSearchAdapter.buildElasticSearchUrlForApi(vnfAggregationIndexName,
+ SparkyConstants.ES_SEARCH_API),
+ vnfSearch.toString(), javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE);
+
+ if ( opResult.wasSuccessful()) {
+ return buildAggregateVnfResponseJson(opResult.getResult());
+ } else {
+ return buildEmptyAggregateVnfResponseJson();
+ }
+ }
+
+ private String buildEmptyAggregateVnfResponseJson() {
+ JSONObject finalOutputToFe = new JSONObject();
+ finalOutputToFe.put("total", 0);
+ return finalOutputToFe.toString();
+ }
+
+ private String buildAggregateVnfResponseJson(String responseJsonStr) {
+
+ JSONObject finalOutputToFe = new JSONObject();
+ JSONObject responseJson = new JSONObject(responseJsonStr);
+
+
+ JSONObject hits = responseJson.getJSONObject("hits");
+ int totalHits = hits.getInt("total");
+ finalOutputToFe.put("total", totalHits);
+
+ JSONObject aggregations = responseJson.getJSONObject("aggregations");
+ String[] aggKeys = JSONObject.getNames(aggregations);
+ JSONObject aggregationsList = new JSONObject();
+
+ for(String aggName : aggKeys) {
+ JSONObject aggregation = aggregations.getJSONObject(aggName);
+ JSONArray buckets = aggregation.getJSONArray("buckets");
+ aggregationsList.put(aggName, buckets);
+ }
+
+ finalOutputToFe.put("aggregations", aggregationsList);
+
+ return finalOutputToFe.toString();
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java
new file mode 100644
index 0000000..6e7b456
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java
@@ -0,0 +1,129 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.aggregatevnf.search;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.json.JsonObject;
+import javax.ws.rs.core.MediaType;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.common.search.CommonSearchSuggestion;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.search.api.SearchProvider;
+import org.onap.aai.sparky.search.entity.QuerySearchEntity;
+import org.onap.aai.sparky.search.entity.SearchSuggestion;
+import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class AggregateVnfSearchProvider implements SearchProvider {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(AggregateVnfSearchProvider.class);
+
+ private ObjectMapper mapper;
+ private ElasticSearchAdapter elasticSearchAdapter = null;
+ private String autoSuggestIndexName;
+ private String vnfSearchSuggestionRoute;
+
+ public AggregateVnfSearchProvider(ElasticSearchAdapter elasticSearchAdapter,
+ String autoSuggestIndexName, String vnfSearchSuggestionRoute) {
+ mapper = new ObjectMapper();
+ this.elasticSearchAdapter = elasticSearchAdapter;
+ this.autoSuggestIndexName = autoSuggestIndexName;
+ this.vnfSearchSuggestionRoute = vnfSearchSuggestionRoute;
+ }
+
+ public void setAutoSuggestIndexName(String autoSuggestIndexName) {
+ this.autoSuggestIndexName = autoSuggestIndexName;
+ }
+
+ @Override
+ public List<SearchSuggestion> search(QuerySearchEntity queryRequest) {
+
+ List<SearchSuggestion> returnList = new ArrayList<SearchSuggestion>();
+
+ try {
+
+ /* Create suggestions query */
+ JsonObject vnfSearch = VnfSearchQueryBuilder.createSuggestionsQuery(String.valueOf(queryRequest.getMaxResults()), queryRequest.getQueryStr());
+
+ /* Parse suggestions response */
+ OperationResult opResult = elasticSearchAdapter.doPost(
+ elasticSearchAdapter.buildElasticSearchUrlForApi(autoSuggestIndexName,
+ SparkyConstants.ES_SUGGEST_API),
+ vnfSearch.toString(), MediaType.APPLICATION_JSON_TYPE);
+
+ String result = opResult.getResult();
+
+ if (!opResult.wasSuccessful()) {
+ LOG.error(AaiUiMsgs.ERROR_PARSING_JSON_PAYLOAD_VERBOSE, result);
+ return returnList;
+ }
+
+ JSONObject responseJson = new JSONObject(result);
+ String suggestionsKey = "vnfs";
+ JSONArray suggestionsArray = new JSONArray();
+ JSONArray suggestions = responseJson.getJSONArray(suggestionsKey);
+ if (suggestions.length() > 0) {
+ suggestionsArray = suggestions.getJSONObject(0).getJSONArray("options");
+ for (int i = 0; i < suggestionsArray.length(); i++) {
+ JSONObject querySuggestion = suggestionsArray.getJSONObject(i);
+ if (querySuggestion != null) {
+ CommonSearchSuggestion responseSuggestion = new CommonSearchSuggestion();
+ responseSuggestion.setText(querySuggestion.getString("text"));
+ responseSuggestion.setRoute(vnfSearchSuggestionRoute);
+ responseSuggestion.setHashId(NodeUtils.generateUniqueShaDigest(querySuggestion.getString("text")));
+
+ // Extract filter list from JSON and add to response suggestion
+ JSONObject payload = querySuggestion.getJSONObject("payload");
+ if (payload.length() > 0) {
+ JSONArray filterList = payload.getJSONArray("filterList");
+ for (int filter = 0; filter < filterList.length(); filter++) {
+ String filterValueString = filterList.getJSONObject(filter).toString();
+ UiFilterValueEntity filterValue = mapper.readValue(filterValueString, UiFilterValueEntity.class);
+ responseSuggestion.getFilterValues().add(filterValue);
+ }
+ }
+ returnList.add(responseSuggestion);
+ }
+ }
+ }
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "Search failed due to error = " + exc.getMessage());
+ }
+
+ return returnList;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java
new file mode 100644
index 0000000..2645433
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java
@@ -0,0 +1,176 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.aggregatevnf.search;
+
+import java.util.Map;
+
+import javax.json.Json;
+import javax.json.JsonArray;
+import javax.json.JsonArrayBuilder;
+import javax.json.JsonObject;
+import javax.json.JsonObjectBuilder;
+
+
+/**
+ * Build a JSON payload to send to elastic search to get vnf search data.
+ */
+
+public class VnfSearchQueryBuilder {
+
+
+ /**
+ * Creates the suggestions query.
+ *
+ * @param maxResults maximum number of suggestions to fetch
+ * @param queryStr query string
+ * @return the json object
+ */
+
+ /*
+ * { "vnfs" : { "text" : "VNFs", "completion" : { "field" : "entity_suggest", "size": 1 } } }
+ */
+ public static JsonObject createSuggestionsQuery(String maxResults, String queryStr) {
+ JsonObjectBuilder jsonBuilder = Json.createObjectBuilder();
+
+ JsonObjectBuilder completionBlob = Json.createObjectBuilder();
+ completionBlob.add("field", "entity_suggest");
+ completionBlob.add("size", maxResults);
+
+ JsonObjectBuilder jsonAllBuilder = Json.createObjectBuilder();
+ jsonAllBuilder.add("text", queryStr);
+ jsonAllBuilder.add("completion", completionBlob);
+
+ jsonBuilder.add("vnfs", jsonAllBuilder.build());
+ return jsonBuilder.build();
+ }
+
+ public static JsonObject getTermBlob(String key, String value) {
+ JsonObjectBuilder termBlobBuilder = Json.createObjectBuilder();
+ JsonObjectBuilder jsonBuilder = Json.createObjectBuilder().add(key, value);
+ return termBlobBuilder.add("term", jsonBuilder.build()).build();
+ }
+
+ public static void getSummaryAggsBlob(JsonObjectBuilder aggsBlobBuilder, String aggsKey,
+ int resultSize) {
+ JsonObjectBuilder fieldBuilder =
+ Json.createObjectBuilder().add("field", aggsKey).add("size", resultSize);
+ JsonObject aggsFieldBlob = fieldBuilder.build();
+ JsonObjectBuilder defaultBlobBuilder = Json.createObjectBuilder().add("terms", aggsFieldBlob);
+ JsonObject defaultBlob = defaultBlobBuilder.build();
+ aggsBlobBuilder.add("default", defaultBlob);
+ }
+
+ public static void buildSingleTermCountQuery(JsonObjectBuilder jsonBuilder, String key,
+ String value) {
+ jsonBuilder.add("query", getTermBlob(key, value));
+ }
+
+ public static void buildSingleTermSummaryQuery(JsonObjectBuilder jsonBuilder, String key,
+ String value, String groupByKey) {
+ JsonObjectBuilder queryBlobBuilder = Json.createObjectBuilder();
+ JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder();
+
+ queryBlobBuilder.add("constant_score",
+ Json.createObjectBuilder().add("filter", getTermBlob(key, value)));
+
+ getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0);
+
+ jsonBuilder.add("query", queryBlobBuilder.build());
+ jsonBuilder.add("aggs", aggsBlobBuilder.build());
+ }
+
+ public static void buildMultiTermSummaryQuery(JsonObjectBuilder jsonBuilder,
+ Map<String, String> attributes, String groupByKey) {
+ JsonObjectBuilder queryBlobBuilder = Json.createObjectBuilder();
+ JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder();
+ JsonArrayBuilder mustBlobBuilder = Json.createArrayBuilder();
+ for (String key : attributes.keySet()) {
+ mustBlobBuilder.add(getTermBlob(key, attributes.get(key)));
+ }
+ JsonArray mustBlob = mustBlobBuilder.build();
+
+ queryBlobBuilder.add("constant_score", Json.createObjectBuilder().add("filter",
+ Json.createObjectBuilder().add("bool", Json.createObjectBuilder().add("must", mustBlob))));
+
+ getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0);
+
+ jsonBuilder.add("query", queryBlobBuilder.build());
+ jsonBuilder.add("aggs", aggsBlobBuilder.build());
+ }
+
+ public static void buildZeroTermSummaryQuery(JsonObjectBuilder jsonBuilder, String groupByKey) {
+ JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder();
+
+ getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0);
+
+ jsonBuilder.add("aggs", aggsBlobBuilder.build());
+ }
+
+ public static void buildMultiTermCountQuery(JsonObjectBuilder jsonBuilder,
+ Map<String, String> attributes) {
+ JsonArrayBuilder mustBlobBuilder = Json.createArrayBuilder();
+ for (String key : attributes.keySet()) {
+ mustBlobBuilder.add(getTermBlob(key, attributes.get(key)));
+ }
+ jsonBuilder.add("query", Json.createObjectBuilder().add("bool",
+ Json.createObjectBuilder().add("must", mustBlobBuilder)));
+ }
+
+
+
+ public static JsonObject createSummaryByEntityTypeQuery(Map<String, String> attributes,
+ String groupByKey) {
+ JsonObjectBuilder jsonBuilder = Json.createObjectBuilder();
+ jsonBuilder.add("size", "0"); // avoid source data
+ if (attributes.size() == 0) {
+ buildZeroTermSummaryQuery(jsonBuilder, groupByKey);
+ } else if (attributes.size() == 1) {
+ Map.Entry<String, String> entry = attributes.entrySet().iterator().next();
+ buildSingleTermSummaryQuery(jsonBuilder, entry.getKey(), entry.getValue(), groupByKey);
+ } else {
+ buildMultiTermSummaryQuery(jsonBuilder, attributes, groupByKey);
+ }
+ return jsonBuilder.build();
+ }
+
+ public static JsonObject createEntityCountsQuery(Map<String, String> attributes) {
+ JsonObjectBuilder jsonBuilder = Json.createObjectBuilder();
+ if (attributes.size() == 1) {
+ Map.Entry<String, String> entry = attributes.entrySet().iterator().next();
+ buildSingleTermCountQuery(jsonBuilder, entry.getKey(), entry.getValue());
+ } else {
+ buildMultiTermCountQuery(jsonBuilder, attributes);
+ }
+ return jsonBuilder.build();
+ }
+
+ public static JsonArray getSortCriteria(String sortFieldName, String sortOrder) {
+ JsonArrayBuilder jsonBuilder = Json.createArrayBuilder();
+ jsonBuilder.add(Json.createObjectBuilder().add(sortFieldName,
+ Json.createObjectBuilder().add("order", sortOrder)));
+
+ return jsonBuilder.build();
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java
new file mode 100644
index 0000000..8681853
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java
@@ -0,0 +1,241 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.aggregation.sync;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.config.oxm.SuggestionEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner;
+import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory;
+import org.onap.aai.sparky.sync.IndexCleaner;
+import org.onap.aai.sparky.sync.IndexIntegrityValidator;
+import org.onap.aai.sparky.sync.SyncController;
+import org.onap.aai.sparky.sync.SyncControllerImpl;
+import org.onap.aai.sparky.sync.SyncControllerRegistrar;
+import org.onap.aai.sparky.sync.SyncControllerRegistry;
+import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.config.SyncControllerConfig;
+
+public class AggregationSyncControllerFactory implements SyncControllerRegistrar {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(AggregationSyncControllerFactory.class);
+
+ private ActiveInventoryAdapter aaiAdapter;
+ private ElasticSearchAdapter esAdapter;
+ private SuggestionEntityLookup suggestionEntityLookup;
+
+ private Map<String, String> aggregationEntityToIndexMap;
+ private Map<String, ElasticSearchSchemaConfig> indexNameToSchemaConfigMap;
+
+ private ElasticSearchEndpointConfig elasticSearchEndpointConfig;
+ private SyncControllerConfig syncControllerConfig;
+ private SyncControllerRegistry syncControllerRegistry;
+ private NetworkStatisticsConfig aaiStatConfig;
+ private NetworkStatisticsConfig esStatConfig;
+ private OxmEntityLookup oxmEntityLookup;
+ private ElasticSearchSchemaFactory elasticSearchSchemaFactory;
+
+ private List<SyncController> syncControllers;
+
+ public AggregationSyncControllerFactory(ElasticSearchEndpointConfig esEndpointConfig,
+ SyncControllerConfig syncControllerConfig, SyncControllerRegistry syncControllerRegistry,
+ SuggestionEntityLookup suggestionEntityLookup,
+ OxmEntityLookup oxmEntityLookup,
+ ElasticSearchSchemaFactory elasticSearchSchemaFactory) {
+ this.elasticSearchSchemaFactory = elasticSearchSchemaFactory;
+ this.syncControllers = new ArrayList<SyncController>();
+ this.elasticSearchEndpointConfig = esEndpointConfig;
+ this.syncControllerConfig = syncControllerConfig;
+ this.syncControllerRegistry = syncControllerRegistry;
+ this.suggestionEntityLookup = suggestionEntityLookup;
+ this.oxmEntityLookup = oxmEntityLookup;
+ }
+
+ public NetworkStatisticsConfig getAaiStatConfig() {
+ return aaiStatConfig;
+ }
+
+ public void setAaiStatConfig(NetworkStatisticsConfig aaiStatConfig) {
+ this.aaiStatConfig = aaiStatConfig;
+ }
+
+ public NetworkStatisticsConfig getEsStatConfig() {
+ return esStatConfig;
+ }
+
+ public void setEsStatConfig(NetworkStatisticsConfig esStatConfig) {
+ this.esStatConfig = esStatConfig;
+ }
+
+ public Map<String, ElasticSearchSchemaConfig> getIndexNameToSchemaConfigMap() {
+ return indexNameToSchemaConfigMap;
+ }
+
+ public void setIndexNameToSchemaConfigMap(
+ Map<String, ElasticSearchSchemaConfig> indexNameToSchemaConfigMap) {
+ this.indexNameToSchemaConfigMap = indexNameToSchemaConfigMap;
+ }
+
+ public ElasticSearchEndpointConfig getElasticSearchEndpointConfig() {
+ return elasticSearchEndpointConfig;
+ }
+
+ public void setElasticSearchEndpointConfig(
+ ElasticSearchEndpointConfig elasticSearchEndpointConfig) {
+ this.elasticSearchEndpointConfig = elasticSearchEndpointConfig;
+ }
+
+ public SyncControllerConfig getSyncControllerConfig() {
+ return syncControllerConfig;
+ }
+
+ public void setSyncControllerConfig(SyncControllerConfig syncControllerConfig) {
+ this.syncControllerConfig = syncControllerConfig;
+ }
+
+ public ActiveInventoryAdapter getAaiAdapter() {
+ return aaiAdapter;
+ }
+
+ public void setAaiAdapter(ActiveInventoryAdapter aaiAdapter) {
+ this.aaiAdapter = aaiAdapter;
+ }
+
+ public ElasticSearchAdapter getEsAdapter() {
+ return esAdapter;
+ }
+
+ public void setEsAdapter(ElasticSearchAdapter esAdapter) {
+ this.esAdapter = esAdapter;
+ }
+
+ public SuggestionEntityLookup getSuggestionEntityLookup() {
+ return suggestionEntityLookup;
+ }
+
+ public void setSuggestionEntityLookup(SuggestionEntityLookup suggestionEntityLookup) {
+ this.suggestionEntityLookup = suggestionEntityLookup;
+ }
+
+ public Map<String, String> getAggregationEntityToIndexMap() {
+ return aggregationEntityToIndexMap;
+ }
+
+ public void setAggregationEntityToIndexMap(Map<String, String> aggregationEntityToIndexMap) {
+ this.aggregationEntityToIndexMap = aggregationEntityToIndexMap;
+ }
+
+ public void buildControllers() {
+
+ if (syncControllerConfig.isEnabled()) {
+
+ Map<String, SuggestionEntityDescriptor> suggestionEntitites =
+ suggestionEntityLookup.getSuggestionSearchEntityDescriptors();
+ SyncControllerImpl aggregationSyncController = null;
+
+ for (String entityType : suggestionEntitites.keySet()) {
+
+ String indexName = aggregationEntityToIndexMap.get(entityType);
+
+ if (indexName == null) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "Could not determine aggregation index name" + " for entity type: " + entityType);
+ continue;
+ }
+
+ try {
+
+ aggregationSyncController = new SyncControllerImpl(syncControllerConfig, entityType);
+
+ ElasticSearchSchemaConfig schemaConfig = indexNameToSchemaConfigMap.get(indexName);
+
+ if (schemaConfig == null) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "Could not determine elastic search schema config for index name: " + indexName);
+ continue;
+ }
+
+ IndexIntegrityValidator aggregationIndexValidator = new IndexIntegrityValidator(esAdapter,
+ schemaConfig, elasticSearchEndpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig));
+
+ aggregationSyncController.registerIndexValidator(aggregationIndexValidator);
+
+ AggregationSynchronizer aggSynchronizer = new AggregationSynchronizer(entityType,
+ schemaConfig, syncControllerConfig.getNumInternalSyncWorkers(),
+ syncControllerConfig.getNumSyncActiveInventoryWorkers(),
+ syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig,
+ oxmEntityLookup);
+
+ aggSynchronizer.setAaiAdapter(aaiAdapter);
+ aggSynchronizer.setElasticSearchAdapter(esAdapter);
+
+ aggregationSyncController.registerEntitySynchronizer(aggSynchronizer);
+
+ IndexCleaner entityDataIndexCleaner =
+ new ElasticSearchIndexCleaner(esAdapter, elasticSearchEndpointConfig, schemaConfig);
+
+ aggregationSyncController.registerIndexCleaner(entityDataIndexCleaner);
+
+ syncControllers.add(aggregationSyncController);
+ } catch (Exception exc) {
+
+ exc.printStackTrace();
+
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "Failed to build aggregation sync controller. Error : " + exc.getMessage());
+ }
+
+ }
+ } else {
+ LOG.info(AaiUiMsgs.INFO_GENERIC, "Sync controller with name = "
+ + syncControllerConfig.getControllerName() + " is disabled");
+ }
+ }
+
+ @Override
+ public void registerController() {
+
+ buildControllers();
+
+ if ( syncControllerRegistry != null ) {
+ for ( SyncController controller : syncControllers ) {
+ syncControllerRegistry.registerSyncController(controller);
+ }
+ }
+
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java
new file mode 100644
index 0000000..a438215
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java
@@ -0,0 +1,782 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.aggregation.sync;
+
+import static java.util.concurrent.CompletableFuture.supplyAsync;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Deque;
+import java.util.EnumSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentLinkedDeque;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.dal.rest.HttpMethod;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.AbstractEntitySynchronizer;
+import org.onap.aai.sparky.sync.IndexSynchronizer;
+import org.onap.aai.sparky.sync.SynchronizerConstants;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.entity.AggregationEntity;
+import org.onap.aai.sparky.sync.entity.MergableEntity;
+import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchPut;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchUpdate;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.slf4j.MDC;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+
+/**
+ * The Class AutosuggestionSynchronizer.
+ */
+public class AggregationSynchronizer extends AbstractEntitySynchronizer
+ implements IndexSynchronizer {
+
+ /**
+ * The Class RetryAggregationEntitySyncContainer.
+ */
+ private class RetryAggregationEntitySyncContainer {
+ NetworkTransaction txn;
+ AggregationEntity ae;
+
+ /**
+ * Instantiates a new retry aggregation entity sync container.
+ *
+ * @param txn the txn
+ * @param ae the se
+ */
+ public RetryAggregationEntitySyncContainer(NetworkTransaction txn, AggregationEntity ae) {
+ this.txn = txn;
+ this.ae = ae;
+ }
+
+ public NetworkTransaction getNetworkTransaction() {
+ return txn;
+ }
+
+ public AggregationEntity getAggregationEntity() {
+ return ae;
+ }
+ }
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(AggregationSynchronizer.class);
+ private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ";
+
+ private boolean allWorkEnumerated;
+ private Deque<SelfLinkDescriptor> selflinks;
+ private Deque<RetryAggregationEntitySyncContainer> retryQueue;
+ private Map<String, Integer> retryLimitTracker;
+ protected ExecutorService esPutExecutor;
+ private ConcurrentHashMap<String, AtomicInteger> entityCounters;
+ private boolean syncInProgress;
+ private Map<String, String> contextMap;
+ private String entityType;
+ private ElasticSearchSchemaConfig schemaConfig;
+ private OxmEntityLookup oxmEntityLookup;
+
+ /**
+ * Instantiates a new entity aggregation synchronizer.
+ *
+ * @param indexName the index name
+ * @throws Exception the exception
+ */
+ public AggregationSynchronizer(String entityType, ElasticSearchSchemaConfig schemaConfig,
+ int numSyncWorkers, int numActiveInventoryWorkers, int numElasticWorkers,
+ NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig,
+ OxmEntityLookup oxmEntityLookup) throws Exception {
+
+ super(LOG, "AGGES-" + schemaConfig.getIndexName().toUpperCase(), numSyncWorkers,
+ numActiveInventoryWorkers, numElasticWorkers, schemaConfig.getIndexName(),aaiStatConfig, esStatConfig);
+
+ this.oxmEntityLookup = oxmEntityLookup;
+
+ this.schemaConfig = schemaConfig;
+ this.entityType = entityType;
+ this.allWorkEnumerated = false;
+ this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>();
+ this.synchronizerName = "Entity Aggregation Synchronizer";
+ this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS);
+ this.syncInProgress = false;
+ this.allWorkEnumerated = false;
+ this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>();
+ this.retryQueue = new ConcurrentLinkedDeque<RetryAggregationEntitySyncContainer>();
+ this.retryLimitTracker = new ConcurrentHashMap<String, Integer>();
+
+ this.esPutExecutor = NodeUtils.createNamedExecutor("AGGES-ES-PUT", 1, LOG);
+
+ this.aaiEntityStats.intializeEntityCounters(entityType);
+ this.esEntityStats.intializeEntityCounters(entityType);
+
+ this.contextMap = MDC.getCopyOfContextMap();
+ }
+
+ /**
+ * Collect all the work.
+ *
+ * @return the operation state
+ */
+ private OperationState collectAllTheWork() {
+ final Map<String, String> contextMap = MDC.getCopyOfContextMap();
+ final String entity = this.getEntityType();
+ try {
+
+ aaiWorkOnHand.set(1);
+
+ supplyAsync(new Supplier<Void>() {
+
+ @Override
+ public Void get() {
+ MDC.setContextMap(contextMap);
+ OperationResult typeLinksResult = null;
+ try {
+ typeLinksResult = aaiAdapter.getSelfLinksByEntityType(entity);
+ aaiWorkOnHand.decrementAndGet();
+ processEntityTypeSelfLinks(typeLinksResult);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "Processing execption while building working set. Error:"
+ + exc.getMessage());
+ }
+
+ return null;
+ }
+
+ }, aaiExecutor).whenComplete((result, error) -> {
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "An error occurred getting data from AAI. Error = " + error.getMessage());
+ }
+ });
+
+ while (aaiWorkOnHand.get() != 0) {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED);
+ }
+
+ Thread.sleep(1000);
+ }
+
+ aaiWorkOnHand.set(selflinks.size());
+ allWorkEnumerated = true;
+ syncEntityTypes();
+
+ while (!isSyncDone()) {
+ performRetrySync();
+ Thread.sleep(1000);
+ }
+
+ /*
+ * Make sure we don't hang on to retries that failed which could cause issues during future
+ * syncs
+ */
+ retryLimitTracker.clear();
+
+ } catch (Exception exc) {
+ // TODO -> LOG, waht should be logged here?
+ }
+
+ return OperationState.OK;
+ }
+
+
+ /**
+ * Perform retry sync.
+ */
+ private void performRetrySync() {
+ while (retryQueue.peek() != null) {
+
+ RetryAggregationEntitySyncContainer rsc = retryQueue.poll();
+ if (rsc != null) {
+
+ AggregationEntity ae = rsc.getAggregationEntity();
+ NetworkTransaction txn = rsc.getNetworkTransaction();
+
+ String link = null;
+ try {
+ /*
+ * In this retry flow the se object has already derived its fields
+ */
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), ae.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage());
+ }
+
+ if (link != null) {
+ NetworkTransaction retryTransaction = new NetworkTransaction();
+ retryTransaction.setLink(link);
+ retryTransaction.setEntityType(txn.getEntityType());
+ retryTransaction.setDescriptor(txn.getDescriptor());
+ retryTransaction.setOperationType(HttpMethod.GET);
+
+ /*
+ * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already
+ * called incrementAndGet when queuing the failed PUT!
+ */
+
+ supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter),
+ esExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ performDocumentUpsert(result, ae);
+ }
+ });
+ }
+
+ }
+ }
+ }
+
+ /**
+ * Perform document upsert.
+ *
+ * @param esGetTxn the es get txn
+ * @param ae the ae
+ */
+ protected void performDocumentUpsert(NetworkTransaction esGetTxn, AggregationEntity ae) {
+ /**
+ * <p>
+ * <ul>
+ * As part of the response processing we need to do the following:
+ * <li>1. Extract the version (if present), it will be the ETAG when we use the
+ * Search-Abstraction-Service
+ * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version
+ * tag
+ * <li>a) if version is null or RC=404, then standard put, no _update with version tag
+ * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic
+ * </ul>
+ * </p>
+ */
+ String link = null;
+ try {
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), ae.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage());
+ return;
+ }
+
+ String versionNumber = null;
+ boolean wasEntryDiscovered = false;
+ if (esGetTxn.getOperationResult().getResultCode() == 404) {
+ LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, ae.getEntityPrimaryKeyValue());
+ } else if (esGetTxn.getOperationResult().getResultCode() == 200) {
+ wasEntryDiscovered = true;
+ try {
+ versionNumber = NodeUtils.extractFieldValueFromObject(
+ NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()),
+ "_version");
+ } catch (IOException exc) {
+ String message =
+ "Error extracting version number from response, aborting aggregation entity sync of "
+ + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message);
+ return;
+ }
+ } else {
+ /*
+ * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we
+ * return.
+ */
+ LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE,
+ String.valueOf(esGetTxn.getOperationResult().getResultCode()));
+ return;
+ }
+
+ try {
+ String jsonPayload = null;
+ if (wasEntryDiscovered) {
+ try {
+ ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>();
+ NodeUtils.extractObjectsByKey(
+ NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()),
+ "_source", sourceObject);
+
+ if (!sourceObject.isEmpty()) {
+ String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false);
+ MergableEntity me = mapper.readValue(responseSource, MergableEntity.class);
+ ObjectReader updater = mapper.readerForUpdating(me);
+ MergableEntity merged = updater.readValue(ae.getAsJson());
+ jsonPayload = mapper.writeValueAsString(merged);
+ }
+ } catch (IOException exc) {
+ String message =
+ "Error extracting source value from response, aborting aggregation entity sync of "
+ + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message);
+ return;
+ }
+ } else {
+ jsonPayload = ae.getAsJson();
+ }
+
+ if (wasEntryDiscovered) {
+ if (versionNumber != null && jsonPayload != null) {
+
+ String requestPayload =
+ elasticSearchAdapter.buildBulkImportOperationRequest(schemaConfig.getIndexName(),
+ schemaConfig.getIndexDocType(), ae.getId(), versionNumber, jsonPayload);
+
+ NetworkTransaction transactionTracker = new NetworkTransaction();
+ transactionTracker.setEntityType(esGetTxn.getEntityType());
+ transactionTracker.setDescriptor(esGetTxn.getDescriptor());
+ transactionTracker.setOperationType(HttpMethod.PUT);
+
+ esWorkOnHand.incrementAndGet();
+ supplyAsync(new PerformElasticSearchUpdate(elasticSearchAdapter.getBulkUrl(),
+ requestPayload, elasticSearchAdapter, transactionTracker), esPutExecutor)
+ .whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ String message = "Aggregation entity sync UPDATE PUT error - "
+ + error.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ } else {
+ updateElasticSearchCounters(result);
+ processStoreDocumentResult(result, esGetTxn, ae);
+ }
+ });
+ }
+
+ } else {
+ if (link != null && jsonPayload != null) {
+
+ NetworkTransaction updateElasticTxn = new NetworkTransaction();
+ updateElasticTxn.setLink(link);
+ updateElasticTxn.setEntityType(esGetTxn.getEntityType());
+ updateElasticTxn.setDescriptor(esGetTxn.getDescriptor());
+ updateElasticTxn.setOperationType(HttpMethod.PUT);
+
+ esWorkOnHand.incrementAndGet();
+ supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter),
+ esPutExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ String message =
+ "Aggregation entity sync UPDATE PUT error - " + error.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ } else {
+ updateElasticSearchCounters(result);
+ processStoreDocumentResult(result, esGetTxn, ae);
+ }
+ });
+ }
+ }
+ } catch (Exception exc) {
+ String message = "Exception caught during aggregation entity sync PUT operation. Message - "
+ + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ }
+ }
+
+ /**
+ * Should allow retry.
+ *
+ * @param id the id
+ * @return true, if successful
+ */
+ private boolean shouldAllowRetry(String id) {
+ boolean isRetryAllowed = true;
+ if (retryLimitTracker.get(id) != null) {
+ Integer currentCount = retryLimitTracker.get(id);
+ if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) {
+ isRetryAllowed = false;
+ String message = "Aggregation entity re-sync limit reached for " + id
+ + ", re-sync will no longer be attempted for this entity";
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ } else {
+ Integer newCount = new Integer(currentCount.intValue() + 1);
+ retryLimitTracker.put(id, newCount);
+ }
+ } else {
+ Integer firstRetryCount = new Integer(1);
+ retryLimitTracker.put(id, firstRetryCount);
+ }
+
+ return isRetryAllowed;
+ }
+
+ /**
+ * Process store document result.
+ *
+ * @param esPutResult the es put result
+ * @param esGetResult the es get result
+ * @param ae the ae
+ */
+ private void processStoreDocumentResult(NetworkTransaction esPutResult,
+ NetworkTransaction esGetResult, AggregationEntity ae) {
+
+ OperationResult or = esPutResult.getOperationResult();
+
+ if (!or.wasSuccessful()) {
+ if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) {
+
+ if (shouldAllowRetry(ae.getId())) {
+ esWorkOnHand.incrementAndGet();
+
+ RetryAggregationEntitySyncContainer rsc =
+ new RetryAggregationEntitySyncContainer(esGetResult, ae);
+ retryQueue.push(rsc);
+
+ String message = "Store document failed during aggregation entity synchronization"
+ + " due to version conflict. Entity will be re-synced.";
+ LOG.warn(AaiUiMsgs.ERROR_GENERIC, message);
+ }
+ } else {
+ String message =
+ "Store document failed during aggregation entity synchronization with result code "
+ + or.getResultCode() + " and result message " + or.getResult();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ }
+ }
+ }
+
+ /**
+ * Sync entity types.
+ */
+ private void syncEntityTypes() {
+
+ while (selflinks.peek() != null) {
+
+ SelfLinkDescriptor linkDescriptor = selflinks.poll();
+ aaiWorkOnHand.decrementAndGet();
+
+ OxmEntityDescriptor descriptor = null;
+
+ if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) {
+
+ descriptor = oxmEntityLookup.getEntityDescriptors().get(linkDescriptor.getEntityType());
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType());
+ // go to next element in iterator
+ continue;
+ }
+
+ NetworkTransaction txn = new NetworkTransaction();
+ txn.setDescriptor(descriptor);
+ txn.setLink(linkDescriptor.getSelfLink());
+ txn.setOperationType(HttpMethod.GET);
+ txn.setEntityType(linkDescriptor.getEntityType());
+
+ aaiWorkOnHand.incrementAndGet();
+
+ supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor)
+ .whenComplete((result, error) -> {
+
+ aaiWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage());
+ } else {
+ if (result == null) {
+ LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK,
+ linkDescriptor.getSelfLink());
+ } else {
+ updateActiveInventoryCounters(result);
+ fetchDocumentForUpsert(result);
+ }
+ }
+ });
+ }
+
+ }
+
+ }
+
+ /**
+ * Fetch document for upsert.
+ *
+ * @param txn the txn
+ */
+ private void fetchDocumentForUpsert(NetworkTransaction txn) {
+ // modified
+ if (!txn.getOperationResult().wasSuccessful()) {
+ String message = "Self link failure. Result - " + txn.getOperationResult().getResult();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ return;
+ }
+
+ try {
+ final String jsonResult = txn.getOperationResult().getResult();
+ if (jsonResult != null && jsonResult.length() > 0) {
+
+ AggregationEntity ae = new AggregationEntity();
+ ae.setLink(ActiveInventoryAdapter.extractResourcePath(txn.getLink()));
+ populateAggregationEntityDocument(ae, jsonResult, txn.getDescriptor());
+ ae.deriveFields();
+
+ String link = null;
+ try {
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), ae.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage());
+ }
+
+ if (link != null) {
+ NetworkTransaction n2 = new NetworkTransaction();
+ n2.setLink(link);
+ n2.setEntityType(txn.getEntityType());
+ n2.setDescriptor(txn.getDescriptor());
+ n2.setOperationType(HttpMethod.GET);
+
+ esWorkOnHand.incrementAndGet();
+
+ supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor)
+ .whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ performDocumentUpsert(result, ae);
+ }
+ });
+ }
+ }
+
+ } catch (JsonProcessingException exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "There was a JSON processing error fetching the elastic document for upsert. Error: "
+ + exc.getMessage());
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "There was an IO error fetching the elastic document for upsert. Error: " + exc.getMessage());
+ }
+ }
+
+
+ /**
+ * Populate aggregation entity document.
+ *
+ * @param doc the doc
+ * @param result the result
+ * @param resultDescriptor the result descriptor
+ * @throws JsonProcessingException the json processing exception
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ protected void populateAggregationEntityDocument(AggregationEntity doc, String result,
+ OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException {
+ doc.setEntityType(resultDescriptor.getEntityName());
+ JsonNode entityNode = mapper.readTree(result);
+ Map<String, Object> map = mapper.convertValue(entityNode, Map.class);
+ doc.copyAttributeKeyValuePair(map);
+ }
+
+ /**
+ * Process entity type self links.
+ *
+ * @param operationResult the operation result
+ */
+ private void processEntityTypeSelfLinks(OperationResult operationResult) {
+
+ JsonNode rootNode = null;
+
+ if ( operationResult == null ) {
+ return;
+ }
+
+ final String jsonResult = operationResult.getResult();
+
+ if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) {
+
+ try {
+ rootNode = mapper.readTree(jsonResult);
+ } catch (IOException exc) {
+ String message =
+ "Could not deserialize JSON (representing operation result) as node tree. " +
+ "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message);
+ }
+
+ JsonNode resultData = rootNode.get("result-data");
+ ArrayNode resultDataArrayNode = null;
+
+ if (resultData.isArray()) {
+ resultDataArrayNode = (ArrayNode) resultData;
+
+ Iterator<JsonNode> elementIterator = resultDataArrayNode.elements();
+ JsonNode element = null;
+
+ while (elementIterator.hasNext()) {
+ element = elementIterator.next();
+
+ final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type");
+ final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link");
+
+ OxmEntityDescriptor descriptor = null;
+
+ if (resourceType != null && resourceLink != null) {
+
+ descriptor = oxmEntityLookup.getEntityDescriptors().get(resourceType);
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType);
+ // go to next element in iterator
+ continue;
+ }
+
+ selflinks.add(new SelfLinkDescriptor(resourceLink, SynchronizerConstants.NODES_ONLY_MODIFIER, resourceType));
+
+
+ }
+ }
+ }
+ }
+
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync()
+ */
+ @Override
+ public OperationState doSync() {
+ this.syncDurationInMs = -1;
+ syncStartedTimeStampInMs = System.currentTimeMillis();
+ String txnID = NodeUtils.getRandomTxnId();
+ MdcContext.initialize(txnID, "AggregationSynchronizer", "", "Sync", "");
+
+ return collectAllTheWork();
+ }
+
+ @Override
+ public SynchronizerState getState() {
+
+ if (!isSyncDone()) {
+ return SynchronizerState.PERFORMING_SYNCHRONIZATION;
+ }
+
+ return SynchronizerState.IDLE;
+
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean)
+ */
+ @Override
+ public String getStatReport(boolean showFinalReport) {
+ syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs;
+ return getStatReport(syncDurationInMs, showFinalReport);
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown()
+ */
+ @Override
+ public void shutdown() {
+ this.shutdownExecutors();
+ }
+
+ @Override
+ protected boolean isSyncDone() {
+
+ int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get();
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = "
+ + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated);
+ }
+
+ if (totalWorkOnHand > 0 || !allWorkEnumerated) {
+ return false;
+ }
+
+ this.syncInProgress = false;
+
+ return true;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache()
+ */
+ @Override
+ public void clearCache() {
+
+ if (syncInProgress) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Autosuggestion Entity Summarizer in progress, request to clear cache ignored");
+ return;
+ }
+
+ super.clearCache();
+ this.resetCounters();
+ if (entityCounters != null) {
+ entityCounters.clear();
+ }
+
+ allWorkEnumerated = false;
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java
new file mode 100644
index 0000000..9063e92
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java
@@ -0,0 +1,384 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.aggregation.sync;
+
+import static java.util.concurrent.CompletableFuture.supplyAsync;
+
+import java.io.IOException;
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.Collection;
+import java.util.EnumSet;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
+
+import javax.json.Json;
+import javax.ws.rs.core.MediaType;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.SearchableEntityLookup;
+import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor;
+import org.onap.aai.sparky.dal.rest.HttpMethod;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.AbstractEntitySynchronizer;
+import org.onap.aai.sparky.sync.IndexSynchronizer;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.slf4j.MDC;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+
+/**
+ * The Class HistoricalEntitySummarizer.
+ */
+public class HistoricalEntitySummarizer extends AbstractEntitySynchronizer
+ implements IndexSynchronizer {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(HistoricalEntitySummarizer.class);
+ private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ";
+
+ private boolean allWorkEnumerated;
+ private ConcurrentHashMap<String, AtomicInteger> entityCounters;
+ private boolean syncInProgress;
+ private Map<String, String> contextMap;
+ private ElasticSearchSchemaConfig schemaConfig;
+ private SearchableEntityLookup searchableEntityLookup;
+
+ /**
+ * Instantiates a new historical entity summarizer.
+ *
+ * @param indexName the index name
+ * @throws Exception the exception
+ */
+ public HistoricalEntitySummarizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers,
+ int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig,
+ NetworkStatisticsConfig esStatConfig, SearchableEntityLookup searchableEntityLookup)
+ throws Exception {
+ super(LOG, "HES", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(), aaiStatConfig, esStatConfig);
+
+ this.schemaConfig = schemaConfig;
+ this.allWorkEnumerated = false;
+ this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>();
+ this.synchronizerName = "Historical Entity Summarizer";
+ this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS);
+ this.syncInProgress = false;
+ this.contextMap = MDC.getCopyOfContextMap();
+ this.syncDurationInMs = -1;
+ this.searchableEntityLookup = searchableEntityLookup;
+ }
+
+ /**
+ * Collect all the work.
+ *
+ * @return the operation state
+ */
+ private OperationState collectAllTheWork() {
+
+ Map<String, SearchableOxmEntityDescriptor> descriptorMap =
+ searchableEntityLookup.getSearchableEntityDescriptors();
+
+ if (descriptorMap.isEmpty()) {
+ LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "historical entities");
+
+ return OperationState.ERROR;
+ }
+
+ Collection<String> entityTypes = descriptorMap.keySet();
+
+ AtomicInteger asyncWoH = new AtomicInteger(0);
+
+ asyncWoH.set(entityTypes.size());
+
+ try {
+ for (String entityType : entityTypes) {
+
+ supplyAsync(new Supplier<Void>() {
+
+ @Override
+ public Void get() {
+ MDC.setContextMap(contextMap);
+ try {
+ OperationResult typeLinksResult =
+ aaiAdapter.getSelfLinksByEntityType(entityType);
+ updateActiveInventoryCounters(HttpMethod.GET, entityType, typeLinksResult);
+ processEntityTypeSelfLinks(entityType, typeLinksResult);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc.getMessage());
+
+ }
+
+ return null;
+ }
+
+ }, aaiExecutor).whenComplete((result, error) -> {
+
+ asyncWoH.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, error.getMessage());
+ }
+
+ });
+
+ }
+
+
+ while (asyncWoH.get() > 0) {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + " summarizer waiting for all the links to be processed.");
+ }
+
+ Thread.sleep(250);
+ }
+
+ esWorkOnHand.set(entityCounters.size());
+
+ // start doing the real work
+ allWorkEnumerated = true;
+
+ insertEntityTypeCounters();
+
+ if (LOG.isDebugEnabled()) {
+
+ StringBuilder sb = new StringBuilder(128);
+
+ sb.append("\n\nHistorical Entity Counters:");
+
+ for (Entry<String, AtomicInteger> entry : entityCounters.entrySet()) {
+ sb.append("\n").append(entry.getKey()).append(" = ").append(entry.getValue().get());
+ }
+
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, sb.toString());
+
+ }
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, exc.getMessage());
+
+
+ esWorkOnHand.set(0);
+ allWorkEnumerated = true;
+
+ return OperationState.ERROR;
+ }
+
+ return OperationState.OK;
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync()
+ */
+ @Override
+ public OperationState doSync() {
+ this.syncDurationInMs = -1;
+ String txnID = NodeUtils.getRandomTxnId();
+ MdcContext.initialize(txnID, "HistoricalEntitySynchronizer", "", "Sync", "");
+
+ if (syncInProgress) {
+ LOG.info(AaiUiMsgs.HISTORICAL_SYNC_PENDING);
+ return OperationState.PENDING;
+ }
+
+ clearCache();
+
+ syncInProgress = true;
+ this.syncStartedTimeStampInMs = System.currentTimeMillis();
+ allWorkEnumerated = false;
+
+ return collectAllTheWork();
+ }
+
+ /**
+ * Process entity type self links.
+ *
+ * @param entityType the entity type
+ * @param operationResult the operation result
+ */
+ private void processEntityTypeSelfLinks(String entityType, OperationResult operationResult) {
+
+ JsonNode rootNode = null;
+
+ final String jsonResult = operationResult.getResult();
+
+ if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) {
+
+ try {
+ rootNode = mapper.readTree(jsonResult);
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc.getMessage());
+ return;
+ }
+
+ JsonNode resultData = rootNode.get("result-data");
+ ArrayNode resultDataArrayNode = null;
+
+ if (resultData != null && resultData.isArray()) {
+ resultDataArrayNode = (ArrayNode) resultData;
+ entityCounters.put(entityType, new AtomicInteger(resultDataArrayNode.size()));
+ }
+ }
+
+ }
+
+ /**
+ * Insert entity type counters.
+ */
+ private void insertEntityTypeCounters() {
+
+ if (esWorkOnHand.get() <= 0) {
+ return;
+ }
+
+ SimpleDateFormat dateFormat = new SimpleDateFormat(INSERTION_DATE_TIME_FORMAT);
+ Timestamp timestamp = new Timestamp(System.currentTimeMillis());
+ String currentFormattedTimeStamp = dateFormat.format(timestamp);
+
+ Set<Entry<String, AtomicInteger>> entityCounterEntries = entityCounters.entrySet();
+
+ for (Entry<String, AtomicInteger> entityCounterEntry : entityCounterEntries) {
+
+ supplyAsync(new Supplier<Void>() {
+
+ @Override
+ public Void get() {
+ MDC.setContextMap(contextMap);
+ String jsonString = Json.createObjectBuilder().add(
+ "count", entityCounterEntry.getValue().get())
+ .add("entityType", entityCounterEntry.getKey())
+ .add("timestamp", currentFormattedTimeStamp).build().toString();
+
+ String link = null;
+ try {
+ link = elasticSearchAdapter.buildElasticSearchPostUrl(indexName);
+ OperationResult or = elasticSearchAdapter.doPost(link, jsonString, MediaType.APPLICATION_JSON_TYPE);
+ updateElasticSearchCounters(HttpMethod.POST, entityCounterEntry.getKey(), or);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_STORE_FAILURE, exc.getMessage() );
+ }
+
+ return null;
+ }
+
+ }, esExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ });
+
+ }
+
+ while (esWorkOnHand.get() > 0) {
+
+ try {
+ Thread.sleep(500);
+ } catch (InterruptedException exc) {
+ LOG.error(AaiUiMsgs.INTERRUPTED, "historical Entities", exc.getMessage());
+ }
+ }
+
+ }
+
+ @Override
+ public SynchronizerState getState() {
+
+ if (!isSyncDone()) {
+ return SynchronizerState.PERFORMING_SYNCHRONIZATION;
+ }
+
+ return SynchronizerState.IDLE;
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean)
+ */
+ @Override
+ public String getStatReport(boolean showFinalReport) {
+ syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs;
+ return this.getStatReport(syncDurationInMs, showFinalReport);
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown()
+ */
+ @Override
+ public void shutdown() {
+ this.shutdownExecutors();
+ }
+
+ @Override
+ protected boolean isSyncDone() {
+
+ int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get();
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand
+ + " all work enumerated = " + allWorkEnumerated);
+ }
+
+ if (totalWorkOnHand > 0 || !allWorkEnumerated) {
+ return false;
+ }
+
+ this.syncInProgress = false;
+
+ return true;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache()
+ */
+ @Override
+ public void clearCache() {
+
+ if (syncInProgress) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "Historical Entity Summarizer in progress, request to clear cache ignored");
+ return;
+ }
+
+ super.clearCache();
+ this.resetCounters();
+ if (entityCounters != null) {
+ entityCounters.clear();
+ }
+
+ allWorkEnumerated = false;
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java
new file mode 100644
index 0000000..eb42489
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java
@@ -0,0 +1,94 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.aggregation.sync;
+
+import org.onap.aai.sparky.config.oxm.SearchableEntityLookup;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory;
+import org.onap.aai.sparky.sync.IndexIntegrityValidator;
+import org.onap.aai.sparky.sync.SyncControllerImpl;
+import org.onap.aai.sparky.sync.SyncControllerRegistrar;
+import org.onap.aai.sparky.sync.SyncControllerRegistry;
+import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.config.SyncControllerConfig;
+
+public class HistoricalEntitySyncController extends SyncControllerImpl
+ implements SyncControllerRegistrar {
+
+ private SyncControllerRegistry syncControllerRegistry;
+
+ public HistoricalEntitySyncController(SyncControllerConfig syncControllerConfig,
+ ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter,
+ ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig,
+ int syncFrequencyInMinutes, NetworkStatisticsConfig aaiStatConfig,
+ NetworkStatisticsConfig esStatConfig, SearchableEntityLookup searchableEntityLookup,
+ ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception {
+ super(syncControllerConfig);
+
+ // final String controllerName = "Historical Entity Count Synchronizer";
+
+ long taskFrequencyInMs = syncFrequencyInMinutes * 60 * 1000;
+
+ setDelayInMs(taskFrequencyInMs);
+ setSyncFrequencyInMs(taskFrequencyInMs);
+
+ IndexIntegrityValidator entityCounterHistoryValidator = new IndexIntegrityValidator(esAdapter,
+ schemaConfig, endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig));
+
+ registerIndexValidator(entityCounterHistoryValidator);
+
+ HistoricalEntitySummarizer historicalSummarizer = new HistoricalEntitySummarizer(schemaConfig,
+ syncControllerConfig.getNumInternalSyncWorkers(),
+ syncControllerConfig.getNumSyncActiveInventoryWorkers(),
+ syncControllerConfig.getNumSyncElasticWorkers(),aaiStatConfig, esStatConfig,searchableEntityLookup);
+
+ historicalSummarizer.setAaiAdapter(aaiAdapter);
+ historicalSummarizer.setElasticSearchAdapter(esAdapter);
+
+ registerEntitySynchronizer(historicalSummarizer);
+
+ }
+
+ public SyncControllerRegistry getSyncControllerRegistry() {
+ return syncControllerRegistry;
+ }
+
+ public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) {
+ this.syncControllerRegistry = syncControllerRegistry;
+ }
+
+ @Override
+ public void registerController() {
+ if ( syncControllerRegistry != null ) {
+ if ( syncControllerConfig.isEnabled()) {
+ syncControllerRegistry.registerSyncController(this);
+ }
+ }
+
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java
new file mode 100644
index 0000000..8197398
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java
@@ -0,0 +1,178 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.analytics;
+
+import java.util.HashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * The Class AbstractStatistics.
+ */
+public class AbstractStatistics implements ComponentStatistics {
+
+ private HashMap<String, AtomicInteger> namedCounters;
+ private HashMap<String, HistogramSampler> namedHistograms;
+
+ /**
+ * Instantiates a new abstract statistics.
+ */
+ protected AbstractStatistics() {
+ namedCounters = new HashMap<String, AtomicInteger>();
+ namedHistograms = new HashMap<String, HistogramSampler>();
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.analytics.ComponentStatistics#addCounter(java.lang.String)
+ */
+ /*
+ * sync-lock the creation of counters during initialization, but run time should not use lock
+ * synchronization, only thread safe types
+ *
+ */
+ @Override
+ public synchronized void addCounter(String key) {
+
+ AtomicInteger counter = namedCounters.get(key);
+
+ if (counter == null) {
+ counter = new AtomicInteger(0);
+ namedCounters.put(key, counter);
+ }
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.analytics.ComponentStatistics#pegCounter(java.lang.String)
+ */
+ @Override
+ public void pegCounter(String key) {
+
+ AtomicInteger counter = namedCounters.get(key);
+
+ if (counter != null) {
+ counter.incrementAndGet();
+ }
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.analytics.ComponentStatistics#incrementCounter(java.lang.String, int)
+ */
+ @Override
+ public void incrementCounter(String key, int value) {
+
+ AtomicInteger counter = namedCounters.get(key);
+
+ if (counter != null) {
+ counter.addAndGet(value);
+ }
+
+ }
+
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.analytics.ComponentStatistics#addHistogram(java.lang.String, java.lang.String, long, int, int)
+ */
+ @Override
+ public synchronized void addHistogram(String key, String histName, long maxYValue, int numBins,
+ int numDecimalPoints) {
+ HistogramSampler histSampler = namedHistograms.get(key);
+
+ if (histSampler == null) {
+ histSampler = new HistogramSampler(histName, maxYValue, numBins, numDecimalPoints);
+ namedHistograms.put(key, histSampler);
+ }
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.analytics.ComponentStatistics#updateHistogram(java.lang.String, long)
+ */
+ @Override
+ public void updateHistogram(String key, long value) {
+ HistogramSampler histSampler = namedHistograms.get(key);
+
+ if (histSampler != null) {
+ histSampler.track(value);
+ }
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.analytics.ComponentStatistics#reset()
+ */
+ @Override
+ public void reset() {
+
+ for (HistogramSampler h : namedHistograms.values()) {
+ h.clear();
+ }
+
+ for (AtomicInteger c : namedCounters.values()) {
+ c.set(0);
+ }
+
+ }
+
+ /**
+ * Gets the counter value.
+ *
+ * @param key the key
+ * @return the counter value
+ */
+ protected int getCounterValue(String key) {
+
+ AtomicInteger counter = namedCounters.get(key);
+
+ if (counter == null) {
+ return -1;
+ }
+
+ return counter.get();
+
+ }
+
+ /**
+ * Gets the histogram stats.
+ *
+ * @param key the key
+ * @param verboseEnabled the verbose enabled
+ * @param indentPadding the indent padding
+ * @return the histogram stats
+ */
+ protected String getHistogramStats(String key, boolean verboseEnabled, String indentPadding) {
+
+ HistogramSampler histSampler = namedHistograms.get(key);
+
+ if (histSampler == null) {
+ return null;
+ }
+
+ return histSampler.getStats(verboseEnabled, indentPadding);
+
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AveragingRingBuffer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AveragingRingBuffer.java
new file mode 100644
index 0000000..fd5f277
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AveragingRingBuffer.java
@@ -0,0 +1,121 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.analytics;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * TODO: Fill in description.
+ *
+ * @author davea
+ */
+public class AveragingRingBuffer {
+
+ private int numElements;
+
+ private long[] data;
+
+ private AtomicInteger index;
+
+ private long average;
+
+ private boolean initialAverageCalculated;
+
+ /**
+ * Instantiates a new averaging ring buffer.
+ *
+ * @param size the size
+ */
+ public AveragingRingBuffer(int size) {
+
+ if (size == 0) {
+ throw new IllegalArgumentException("Size must be greater than zero");
+ }
+
+ this.initialAverageCalculated = false;
+ this.numElements = size;
+ this.data = new long[this.numElements];
+ this.index = new AtomicInteger(-1);
+ }
+
+ /**
+ * Calculate average.
+ *
+ * @param maxArrayIndex the max array index
+ */
+ private void calculateAverage(int maxArrayIndex) {
+
+ long sum = 0;
+
+ for (int i = 0; i <= maxArrayIndex; i++) {
+ sum += data[i];
+ }
+
+ average = (sum / (maxArrayIndex + 1));
+
+ }
+
+ public long getAvg() {
+
+ if (!initialAverageCalculated) {
+ /*
+ * until the index rolls once we will calculate the average from the data that has been added
+ * to the array, not including the zero elements
+ */
+ if (index.get() < 0) {
+ calculateAverage(0);
+ } else {
+ calculateAverage(index.get());
+ }
+
+ }
+
+ return average;
+ }
+
+ /**
+ * Adds the sample.
+ *
+ * @param value the value
+ */
+ public synchronized void addSample(long value) {
+
+ index.incrementAndGet();
+
+ data[index.get()] = value;
+
+ if (index.get() == (numElements - 1)) {
+ calculateAverage(numElements - 1);
+
+ if (!initialAverageCalculated) {
+ initialAverageCalculated = true;
+ }
+
+ index.set(-1);
+ }
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/ComponentStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/ComponentStatistics.java
new file mode 100644
index 0000000..ef78f9e
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/ComponentStatistics.java
@@ -0,0 +1,80 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.analytics;
+
+
+/**
+ * The Interface ComponentStatistics.
+ */
+public interface ComponentStatistics {
+
+ /**
+ * Adds the counter.
+ *
+ * @param key the key
+ */
+ public void addCounter(String key);
+
+ /**
+ * Peg counter.
+ *
+ * @param key the key
+ */
+ public void pegCounter(String key);
+
+ /**
+ * Increment counter.
+ *
+ * @param key the key
+ * @param value the value
+ */
+ public void incrementCounter(String key, int value);
+
+ /**
+ * Adds the histogram.
+ *
+ * @param key the key
+ * @param name the name
+ * @param maxYValue the max Y value
+ * @param numBins the num bins
+ * @param numDecimalPoints the num decimal points
+ */
+ public void addHistogram(String key, String name, long maxYValue, int numBins,
+ int numDecimalPoints);
+
+ /**
+ * Update histogram.
+ *
+ * @param key the key
+ * @param value the value
+ */
+ public void updateHistogram(String key, long value);
+
+ /**
+ * Reset.
+ */
+ public void reset();
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistogramSampler.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistogramSampler.java
new file mode 100644
index 0000000..55fb9d8
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistogramSampler.java
@@ -0,0 +1,286 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.analytics;
+
+/**
+ * A class that models a histogram for reporting and tracking long values with variable steps, bins,
+ * and floating point accuracy.
+ *
+ * @author davea.
+ */
+public final class HistogramSampler {
+
+ private String label;
+
+ private long binMaxValue;
+
+ private int numBins;
+
+ private double stepSize;
+
+ private long sampleValueTotal;
+
+ private long minValue = -1;
+
+ private long maxValue = 0;
+
+ private long numSamples = 0;
+
+ private long decimalPointAccuracy = 0;
+
+ private static String FORMAT_FLOAT_TEMPLATE = "%%.%df";
+
+ private String floatFormatStr;
+
+ private long[] histogramBins;
+
+ /**
+ * Instantiates a new histogram sampler.
+ *
+ * @param label the label
+ * @param maxValue the max value
+ * @param numBins the num bins
+ * @param decimalPointAccuracy the decimal point accuracy
+ */
+ public HistogramSampler(String label, long maxValue, int numBins, int decimalPointAccuracy) {
+ this.label = label;
+ this.binMaxValue = maxValue;
+ this.numBins = numBins;
+ this.stepSize = ((double) binMaxValue / (double) numBins);
+ this.decimalPointAccuracy = decimalPointAccuracy;
+ this.floatFormatStr = String.format(FORMAT_FLOAT_TEMPLATE, this.decimalPointAccuracy);
+
+ /*
+ * [numBins + 1] => last bin is catch-all for outliers
+ */
+
+ initializeHistogramBins(numBins + 1);
+
+ }
+
+ /**
+ * Initialize histogram bins.
+ *
+ * @param numBins the num bins
+ */
+ private void initializeHistogramBins(int numBins) {
+
+ histogramBins = new long[numBins];
+ int counter = 0;
+ while (counter < numBins) {
+ histogramBins[counter] = 0;
+ counter++;
+ }
+
+ }
+
+ /*
+ * Is it really necessary to synchronize the collection, or should we simply switch the underlying
+ * data type to an AtomicLong
+ */
+
+ /**
+ * Track.
+ *
+ * @param value the value
+ */
+ public synchronized void track(long value) {
+
+ if (value < 0) {
+ return;
+ }
+
+ sampleValueTotal += value;
+ numSamples++;
+
+ if (minValue == -1) {
+ minValue = value;
+ }
+
+ if (value < minValue) {
+ minValue = value;
+ }
+
+ if (value > maxValue) {
+ maxValue = value;
+ }
+
+ /*
+ * One step bin determination
+ */
+
+ if (value < (numBins * stepSize)) {
+
+ int index = (int) (value / stepSize);
+ histogramBins[index]++;
+
+ } else {
+ // peg the metric in the outlier bin
+ histogramBins[numBins - 1]++;
+ }
+
+ }
+
+ /**
+ * Clear.
+ */
+ public void clear() {
+
+ int counter = 0;
+ while (counter < numBins) {
+ histogramBins[counter] = 0;
+ counter++;
+ }
+
+ minValue = -1;
+ maxValue = 0;
+ numSamples = 0;
+ sampleValueTotal = 0;
+
+ }
+
+ /**
+ * Re initialize bins.
+ *
+ * @param label the label
+ * @param numBins the num bins
+ * @param maxValue the max value
+ * @param decimalPointAccuracy the decimal point accuracy
+ */
+ public void reInitializeBins(String label, int numBins, long maxValue, int decimalPointAccuracy) {
+ this.label = label;
+ this.decimalPointAccuracy = decimalPointAccuracy;
+ this.floatFormatStr = String.format(FORMAT_FLOAT_TEMPLATE, this.decimalPointAccuracy);
+ this.numBins = numBins;
+ this.minValue = -1;
+ this.maxValue = 0;
+ initializeHistogramBins(numBins);
+ this.stepSize = (maxValue / numBins);
+ clear();
+ }
+
+ public long getNumberOfSamples() {
+ return numSamples;
+ }
+
+ public long getTotalValueSum() {
+ return sampleValueTotal;
+ }
+
+ /**
+ * Gets the stats.
+ *
+ * @param formatted the formatted
+ * @param indentPadding the indent padding
+ * @return the stats
+ */
+ public String getStats(boolean formatted, String indentPadding) {
+
+ StringBuilder sb = new StringBuilder(128);
+
+
+ if (!formatted) {
+ // generate CSV in the following format
+
+ /*
+ * label,minValue,maxValue,avgValue,numSamples,stepSize,numSteps,stepCounters
+ */
+ sb.append(indentPadding);
+ sb.append(label).append(",");
+ sb.append(minValue).append(",");
+ sb.append(maxValue).append(",");
+ if (numSamples == 0) {
+ sb.append(0).append(",");
+ } else {
+ sb.append((sampleValueTotal / numSamples)).append(",");
+ }
+ sb.append(numSamples).append(",");
+ sb.append(numBins).append(",");
+ sb.append(String.format(floatFormatStr, stepSize));
+
+ int counter = 0;
+ while (counter < numBins) {
+
+ if (counter != (numBins)) {
+ sb.append(",");
+ }
+
+ sb.append(histogramBins[counter]);
+
+ counter++;
+
+ }
+
+ return sb.toString();
+
+ }
+
+ sb.append("\n");
+ sb.append(indentPadding).append("Label = ").append(label).append("\n");
+ sb.append(indentPadding).append("Min = ").append(minValue).append("\n");
+ sb.append(indentPadding).append("Max = ").append(maxValue).append("\n");
+ sb.append(indentPadding).append("numSamples = ").append(numSamples).append("\n");
+
+ if (numSamples == 0) {
+ sb.append(indentPadding).append("Avg = ").append(0).append("\n");
+ } else {
+ sb.append(indentPadding).append("Avg = ").append((sampleValueTotal / numSamples))
+ .append("\n");
+ }
+
+ sb.append(indentPadding).append("StepSize = ").append(String.format(floatFormatStr, stepSize))
+ .append("\n");
+
+ sb.append(indentPadding).append("Sample Histogram:").append("\n");
+
+ int counter = 0;
+ while (counter < numBins) {
+
+ if (counter == (numBins - 1)) {
+ // outlier bin
+ double leftBound = (stepSize * counter);
+ sb.append(indentPadding).append("\t")
+ .append(" x >= " + String.format(floatFormatStr, leftBound) + " : "
+ + histogramBins[counter])
+ .append("\n");
+
+ } else {
+ double leftBound = (stepSize * counter);
+ double rightBound = ((stepSize) * (counter + 1));
+ sb.append(indentPadding).append("\t")
+ .append((String.format(floatFormatStr, leftBound) + " < x < "
+ + String.format(floatFormatStr, rightBound) + " : " + histogramBins[counter]))
+ .append("\n");
+ }
+
+ counter++;
+
+ }
+
+ return sb.toString();
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java
new file mode 100644
index 0000000..1a534e3
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java
@@ -0,0 +1,177 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.analytics;
+
+/**
+ * A simple class to model a historical counter. A set of values will be tracked and basic
+ * statistics will be calculated in real time (n, min, max, avg).
+ *
+ * @author davea
+ */
+public class HistoricalCounter {
+
+ private double min;
+
+ private double max;
+
+ private double totalOfSamples;
+
+ private long numSamples;
+
+ private double value;
+
+ private boolean maintainSingleValue;
+
+ /**
+ * Instantiates a new historical counter.
+ *
+ * @param trackSingleValue the track single value
+ */
+ public HistoricalCounter(boolean trackSingleValue) {
+ min = -1;
+ max = 0;
+ totalOfSamples = 0;
+ value = 0.0;
+ numSamples = 0;
+ this.maintainSingleValue = trackSingleValue;
+ }
+
+ public boolean isSingleValue() {
+ return maintainSingleValue;
+ }
+
+ /**
+ * Update.
+ *
+ * @param value the value
+ */
+ public synchronized void update(double value) {
+
+ if (value < 0) {
+ return;
+ }
+
+ if (maintainSingleValue) {
+
+ this.value = value;
+
+ } else {
+
+ if (min == -1) {
+ min = value;
+ }
+
+ if (value < min) {
+ min = value;
+ }
+
+ if (value > max) {
+ max = value;
+ }
+
+ totalOfSamples += value;
+ numSamples++;
+ }
+ }
+
+ public double getValue() {
+ return value;
+ }
+
+ public double getMin() {
+ return min;
+ }
+
+ public double getMax() {
+ return max;
+ }
+
+ public long getNumSamples() {
+ return numSamples;
+ }
+
+ public double getAvg() {
+ if (numSamples == 0) {
+ return 0;
+ }
+
+ return (totalOfSamples / numSamples);
+ }
+ public void setMin(double min) {
+ this.min = min;
+ }
+
+ public void setMax(double max) {
+ this.max = max;
+ }
+
+ public double getTotalOfSamples() {
+ return totalOfSamples;
+ }
+
+ public void setTotalOfSamples(double totalOfSamples) {
+ this.totalOfSamples = totalOfSamples;
+ }
+
+ public void setNumSamples(long numSamples) {
+ this.numSamples = numSamples;
+ }
+
+ public void setMaintainSingleValue(boolean maintainSingleValue) {
+ this.maintainSingleValue = maintainSingleValue;
+ }
+
+
+ /**
+ * Reset.
+ */
+ public synchronized void reset() {
+ min = -1;
+ max = 0;
+ numSamples = 0;
+ totalOfSamples = 0;
+ value = 0.0;
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder(32);
+
+ if (maintainSingleValue) {
+ sb.append("[ Val=").append(value).append(" ]");
+ } else {
+ sb.append("[ NumSamples=").append(numSamples).append(",");
+ sb.append(" Min=").append(min).append(",");
+ sb.append(" Max=").append(max).append(",");
+ sb.append(" Avg=").append(getAvg()).append(" ]");
+ }
+
+ return sb.toString();
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java
new file mode 100644
index 0000000..05ce775
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java
@@ -0,0 +1,105 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.autosuggestion.sync;
+
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.search.filters.config.FiltersConfig;
+import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner;
+import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory;
+import org.onap.aai.sparky.sync.IndexCleaner;
+import org.onap.aai.sparky.sync.IndexIntegrityValidator;
+import org.onap.aai.sparky.sync.SyncControllerImpl;
+import org.onap.aai.sparky.sync.SyncControllerRegistrar;
+import org.onap.aai.sparky.sync.SyncControllerRegistry;
+import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.config.SyncControllerConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+
+public class AutoSuggestionSyncController extends SyncControllerImpl implements SyncControllerRegistrar {
+
+ private SyncControllerRegistry syncControllerRegistry;
+
+ public AutoSuggestionSyncController(SyncControllerConfig syncControllerConfig,
+ ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter,
+ ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig,
+ NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig,
+ OxmEntityLookup oxmEntityLookup, SuggestionEntityLookup suggestionEntityLookup,
+ FiltersConfig filtersConfig,
+ ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception {
+ super(syncControllerConfig);
+
+ // final String controllerName = "Auto Suggestion Synchronizer";
+
+ IndexIntegrityValidator autoSuggestionIndexValidator = new IndexIntegrityValidator(esAdapter,
+ schemaConfig, endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig));
+
+ registerIndexValidator(autoSuggestionIndexValidator);
+
+ AutosuggestionSynchronizer suggestionSynchronizer = new AutosuggestionSynchronizer(schemaConfig,
+ syncControllerConfig.getNumInternalSyncWorkers(),
+ syncControllerConfig.getNumSyncActiveInventoryWorkers(),
+ syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig,
+ oxmEntityLookup, suggestionEntityLookup, filtersConfig);
+
+ suggestionSynchronizer.setAaiAdapter(aaiAdapter);
+ suggestionSynchronizer.setElasticSearchAdapter(esAdapter);
+
+ registerEntitySynchronizer(suggestionSynchronizer);
+
+ IndexCleaner autosuggestIndexCleaner =
+ new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig);
+
+ registerIndexCleaner(autosuggestIndexCleaner);
+
+ }
+
+ public SyncControllerRegistry getSyncControllerRegistry() {
+ return syncControllerRegistry;
+ }
+
+
+
+ public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) {
+ this.syncControllerRegistry = syncControllerRegistry;
+ }
+
+
+
+ @Override
+ public void registerController() {
+
+ if ( syncControllerRegistry != null ) {
+ if ( syncControllerConfig.isEnabled()) {
+ syncControllerRegistry.registerSyncController(this);
+ }
+ }
+
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java
new file mode 100644
index 0000000..baffa54
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java
@@ -0,0 +1,776 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.autosuggestion.sync;
+
+import static java.util.concurrent.CompletableFuture.supplyAsync;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Deque;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentLinkedDeque;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.config.oxm.SuggestionEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.dal.rest.HttpMethod;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.search.filters.config.FiltersConfig;
+import org.onap.aai.sparky.sync.AbstractEntitySynchronizer;
+import org.onap.aai.sparky.sync.IndexSynchronizer;
+import org.onap.aai.sparky.sync.SynchronizerConstants;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor;
+import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchPut;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.util.SuggestionsPermutation;
+import org.slf4j.MDC;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+
+/**
+ * The Class AutosuggestionSynchronizer.
+ */
+public class AutosuggestionSynchronizer extends AbstractEntitySynchronizer
+ implements IndexSynchronizer {
+
+ private class RetrySuggestionEntitySyncContainer {
+ NetworkTransaction txn;
+ SuggestionSearchEntity ssec;
+
+ /**
+ * Instantiates a new RetrySuggestionEntitySyncContainer.
+ *
+ * @param txn the txn
+ * @param icer the icer
+ */
+ public RetrySuggestionEntitySyncContainer(NetworkTransaction txn, SuggestionSearchEntity icer) {
+ this.txn = txn;
+ this.ssec = icer;
+ }
+
+ public NetworkTransaction getNetworkTransaction() {
+ return txn;
+ }
+
+ public SuggestionSearchEntity getSuggestionSearchEntity() {
+ return ssec;
+ }
+ }
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(AutosuggestionSynchronizer.class);
+ private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ";
+
+ private boolean allWorkEnumerated;
+ private Deque<SelfLinkDescriptor> selflinks;
+ private ConcurrentHashMap<String, AtomicInteger> entityCounters;
+ private boolean syncInProgress;
+ private Map<String, String> contextMap;
+ protected ExecutorService esPutExecutor;
+ private Deque<RetrySuggestionEntitySyncContainer> retryQueue;
+ private Map<String, Integer> retryLimitTracker;
+ private OxmEntityLookup oxmEntityLookup;
+ private SuggestionEntityLookup suggestionEntityLookup;
+ private FiltersConfig filtersConfig;
+
+ /**
+ * Instantiates a new historical entity summarizer.
+ *
+ * @param indexName the index name
+ * @throws Exception the exception
+ */
+ public AutosuggestionSynchronizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers,
+ int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig,
+ NetworkStatisticsConfig esStatConfig, OxmEntityLookup oxmEntityLookup,
+ SuggestionEntityLookup suggestionEntityLookup, FiltersConfig filtersConfig) throws Exception {
+
+ super(LOG, "ASES-" + schemaConfig.getIndexName().toUpperCase(), internalSyncWorkers, aaiWorkers,
+ esWorkers, schemaConfig.getIndexName(), aaiStatConfig, esStatConfig);
+
+ this.oxmEntityLookup = oxmEntityLookup;
+ this.suggestionEntityLookup = suggestionEntityLookup;
+ this.allWorkEnumerated = false;
+ this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>();
+ this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>();
+ this.synchronizerName = "Autosuggestion Entity Synchronizer";
+ this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS);
+ this.syncInProgress = false;
+ this.contextMap = MDC.getCopyOfContextMap();
+ this.esPutExecutor = NodeUtils.createNamedExecutor("SUES-ES-PUT", 5, LOG);
+ this.retryQueue = new ConcurrentLinkedDeque<RetrySuggestionEntitySyncContainer>();
+ this.retryLimitTracker = new ConcurrentHashMap<String, Integer>();
+ this.syncDurationInMs = -1;
+ this.filtersConfig = filtersConfig;
+ }
+
+ /**
+ * Collect all the work.
+ *
+ * @return the operation state
+ */
+ private OperationState collectAllTheWork() {
+ final Map<String, String> contextMap = MDC.getCopyOfContextMap();
+ Map<String, SuggestionEntityDescriptor> descriptorMap =
+ suggestionEntityLookup.getSuggestionSearchEntityDescriptors();
+
+ if (descriptorMap.isEmpty()) {
+ LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES);
+ LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES);
+ return OperationState.ERROR;
+ }
+
+ Collection<String> syncTypes = descriptorMap.keySet();
+
+ try {
+
+ /*
+ * launch a parallel async thread to process the documents for each entity-type (to max the of
+ * the configured executor anyway)
+ */
+
+ aaiWorkOnHand.set(syncTypes.size());
+
+ for (String key : syncTypes) {
+
+ supplyAsync(new Supplier<Void>() {
+
+ @Override
+ public Void get() {
+ MDC.setContextMap(contextMap);
+ OperationResult typeLinksResult = null;
+ try {
+ typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key);
+ aaiWorkOnHand.decrementAndGet();
+ processEntityTypeSelfLinks(typeLinksResult);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "An error occurred while processing entity self-links. Error: "
+ + exc.getMessage());
+ }
+
+ return null;
+ }
+
+ }, aaiExecutor).whenComplete((result, error) -> {
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "An error occurred getting data from AAI. Error = " + error.getMessage());
+ }
+ });
+
+ }
+
+ while (aaiWorkOnHand.get() != 0) {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED);
+ }
+
+ Thread.sleep(1000);
+ }
+
+ aaiWorkOnHand.set(selflinks.size());
+ allWorkEnumerated = true;
+ syncEntityTypes();
+
+ while (!isSyncDone()) {
+ performRetrySync();
+ Thread.sleep(1000);
+ }
+
+ /*
+ * Make sure we don't hang on to retries that failed which could cause issues during future
+ * syncs
+ */
+ retryLimitTracker.clear();
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "An error occurred while performing the sync. Error: " + exc.getMessage());
+ }
+
+ return OperationState.OK;
+
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync()
+ */
+ @Override
+ public OperationState doSync() {
+ this.syncDurationInMs = -1;
+ syncStartedTimeStampInMs = System.currentTimeMillis();
+ String txnID = NodeUtils.getRandomTxnId();
+ MdcContext.initialize(txnID, "AutosuggestionSynchronizer", "", "Sync", "");
+
+ return collectAllTheWork();
+ }
+
+ /**
+ * Process entity type self links.
+ *
+ * @param operationResult the operation result
+ */
+ private void processEntityTypeSelfLinks(OperationResult operationResult) {
+
+ JsonNode rootNode = null;
+
+ if ( operationResult == null ) {
+ return;
+ }
+
+ final String jsonResult = operationResult.getResult();
+
+ if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) {
+
+ try {
+ rootNode = mapper.readTree(jsonResult);
+ } catch (IOException exc) {
+ String message = "Could not deserialize JSON (representing operation result) as node tree. "
+ + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message);
+ }
+
+ JsonNode resultData = rootNode.get("result-data");
+ ArrayNode resultDataArrayNode = null;
+
+ if (resultData.isArray()) {
+ resultDataArrayNode = (ArrayNode) resultData;
+
+ Iterator<JsonNode> elementIterator = resultDataArrayNode.elements();
+ JsonNode element = null;
+
+ while (elementIterator.hasNext()) {
+ element = elementIterator.next();
+
+ final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type");
+ final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link");
+
+ OxmEntityDescriptor descriptor = null;
+
+ if (resourceType != null && resourceLink != null) {
+
+ descriptor = oxmEntityLookup.getEntityDescriptors().get(resourceType);
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType);
+ // go to next element in iterator
+ continue;
+ }
+ selflinks.add(new SelfLinkDescriptor(resourceLink,
+ SynchronizerConstants.NODES_ONLY_MODIFIER, resourceType));
+
+
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Sync entity types.
+ */
+ private void syncEntityTypes() {
+
+ while (selflinks.peek() != null) {
+
+ SelfLinkDescriptor linkDescriptor = selflinks.poll();
+ aaiWorkOnHand.decrementAndGet();
+
+ OxmEntityDescriptor descriptor = null;
+
+ if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) {
+
+ descriptor = oxmEntityLookup.getEntityDescriptors().get(linkDescriptor.getEntityType());
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType());
+ // go to next element in iterator
+ continue;
+ }
+
+ NetworkTransaction txn = new NetworkTransaction();
+ txn.setDescriptor(descriptor);
+ txn.setLink(linkDescriptor.getSelfLink());
+ txn.setOperationType(HttpMethod.GET);
+ txn.setEntityType(linkDescriptor.getEntityType());
+
+ aaiWorkOnHand.incrementAndGet();
+
+ supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor)
+ .whenComplete((result, error) -> {
+
+ aaiWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage());
+ } else {
+ if (result == null) {
+ LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK,
+ linkDescriptor.getSelfLink());
+ } else {
+ updateActiveInventoryCounters(result);
+ fetchDocumentForUpsert(result);
+ }
+ }
+ });
+ }
+
+ }
+
+ }
+
+ /*
+ * Return a set of valid suggestion attributes for the provided entityName that are present in the
+ * JSON
+ *
+ * @param node JSON node in which the attributes should be found
+ *
+ * @param entityName Name of the entity
+ *
+ * @return List of all valid suggestion attributes(key's)
+ */
+ public List<String> getSuggestableAttrNamesFromReponse(JsonNode node, String entityName) {
+ List<String> suggestableAttr = new ArrayList<String>();
+
+ HashMap<String, String> desc =
+ suggestionEntityLookup.getSuggestionSearchEntityOxmModel().get(entityName);
+
+ if (desc != null) {
+
+ String attr = desc.get("suggestibleAttributes");
+
+ if (attr != null) {
+ suggestableAttr = Arrays.asList(attr.split(","));
+ List<String> suggestableValue = new ArrayList<String>();
+ for (String attribute : suggestableAttr) {
+ if (node.get(attribute) != null && node.get(attribute).asText().length() > 0) {
+ suggestableValue.add(attribute);
+ }
+ }
+ return suggestableValue;
+ }
+ }
+
+ return new ArrayList<String>();
+ }
+
+ /**
+ * Fetch all the documents for upsert. Based on the number of permutations that are available the
+ * number of documents will be different
+ *
+ * @param txn the txn
+ */
+ private void fetchDocumentForUpsert(NetworkTransaction txn) {
+ if (!txn.getOperationResult().wasSuccessful()) {
+ String message = "Self link failure. Result - " + txn.getOperationResult().getResult();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ return;
+ }
+ try {
+ final String jsonResult = txn.getOperationResult().getResult();
+
+ if (jsonResult != null && jsonResult.length() > 0) {
+
+ // Step 1: Calculate the number of possible permutations of attributes
+ String entityName = txn.getDescriptor().getEntityName();
+ JsonNode entityNode = mapper.readTree(jsonResult);
+
+ List<String> availableSuggestableAttrName =
+ getSuggestableAttrNamesFromReponse(entityNode, entityName);
+
+ ArrayList<ArrayList<String>> uniqueLists =
+ SuggestionsPermutation.getNonEmptyUniqueLists(availableSuggestableAttrName);
+ // Now we have a list of all possible permutations for the status that are
+ // defined for this entity type. Try inserting a document for every combination.
+ for (ArrayList<String> uniqueList : uniqueLists) {
+
+ SuggestionSearchEntity sse = new SuggestionSearchEntity(filtersConfig, suggestionEntityLookup);
+ sse.setSuggestableAttr(uniqueList);
+ sse.setFilterBasedPayloadFromResponse(entityNode, entityName, uniqueList);
+ sse.setLink(ActiveInventoryAdapter.extractResourcePath(txn.getLink()));
+ populateSuggestionSearchEntityDocument(sse, jsonResult, txn);
+ // The unique id for the document will be created at derive fields
+ sse.deriveFields();
+ // Insert the document only if it has valid statuses
+ if (sse.isSuggestableDoc()) {
+ String link = null;
+ try {
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), sse.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage());
+ }
+
+ if (link != null) {
+ NetworkTransaction n2 = new NetworkTransaction();
+ n2.setLink(link);
+ n2.setEntityType(txn.getEntityType());
+ n2.setDescriptor(txn.getDescriptor());
+ n2.setOperationType(HttpMethod.GET);
+
+ esWorkOnHand.incrementAndGet();
+
+ supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor)
+ .whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ performDocumentUpsert(result, sse);
+ }
+ });
+ }
+ }
+ }
+ }
+ } catch (JsonProcessingException exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "There was a json processing error while processing the result from elasticsearch. Error: " + exc.getMessage());
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "There was a io processing error while processing the result from elasticsearch. Error: " + exc.getMessage());
+ }
+ }
+
+ protected void populateSuggestionSearchEntityDocument(SuggestionSearchEntity sse, String result,
+ NetworkTransaction txn) throws JsonProcessingException, IOException {
+
+ OxmEntityDescriptor resultDescriptor = txn.getDescriptor();
+
+ sse.setEntityType(resultDescriptor.getEntityName());
+
+ JsonNode entityNode = mapper.readTree(result);
+
+ List<String> primaryKeyValues = new ArrayList<String>();
+ String pkeyValue = null;
+
+ for (String keyName : resultDescriptor.getPrimaryKeyAttributeNames()) {
+ pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName);
+ if (pkeyValue != null) {
+ primaryKeyValues.add(pkeyValue);
+ } else {
+ String message = "populateSuggestionSearchEntityDocument(),"
+ + " pKeyValue is null for entityType = " + resultDescriptor.getEntityName();
+ LOG.warn(AaiUiMsgs.WARN_GENERIC, message);
+ }
+ }
+
+ final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/");
+ sse.setEntityPrimaryKeyValue(primaryCompositeKeyValue);
+ sse.generateSuggestionInputPermutations();
+ }
+
+ protected void performDocumentUpsert(NetworkTransaction esGetTxn, SuggestionSearchEntity sse) {
+ /**
+ * <p>
+ * <ul>
+ * As part of the response processing we need to do the following:
+ * <li>1. Extract the version (if present), it will be the ETAG when we use the
+ * Search-Abstraction-Service
+ * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version
+ * tag
+ * <li>a) if version is null or RC=404, then standard put, no _update with version tag
+ * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic
+ * </ul>
+ * </p>
+ */
+ String link = null;
+ try {
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), sse.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage());
+ return;
+ }
+
+ boolean wasEntryDiscovered = false;
+ if (esGetTxn.getOperationResult().getResultCode() == 404) {
+ LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, sse.getEntityPrimaryKeyValue());
+ } else if (esGetTxn.getOperationResult().getResultCode() == 200) {
+ wasEntryDiscovered = true;
+ } else {
+ /*
+ * Not being a 200 does not mean a failure. eg 201 is returned for created. and 500 for es not
+ * found TODO -> Should we return.
+ */
+ LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE,
+ String.valueOf(esGetTxn.getOperationResult().getResultCode()));
+ return;
+ }
+ // Insert a new document only if the paylod is different.
+ // This is determined by hashing the payload and using it as a id for the document
+ //
+ if (!wasEntryDiscovered) {
+ try {
+ String jsonPayload = null;
+
+ jsonPayload = sse.getAsJson();
+ if (link != null && jsonPayload != null) {
+
+ NetworkTransaction updateElasticTxn = new NetworkTransaction();
+ updateElasticTxn.setLink(link);
+ updateElasticTxn.setEntityType(esGetTxn.getEntityType());
+ updateElasticTxn.setDescriptor(esGetTxn.getDescriptor());
+ updateElasticTxn.setOperationType(HttpMethod.PUT);
+
+ esWorkOnHand.incrementAndGet();
+ supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter),
+ esPutExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ String message = "Suggestion search entity sync UPDATE PUT error - "
+ + error.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message);
+ } else {
+ updateElasticSearchCounters(result);
+ processStoreDocumentResult(result, esGetTxn, sse);
+ }
+ });
+ }
+ } catch (Exception exc) {
+ String message =
+ "Exception caught during suggestion search entity sync PUT operation. Message - "
+ + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message);
+ }
+ }
+ }
+
+ private void processStoreDocumentResult(NetworkTransaction esPutResult,
+ NetworkTransaction esGetResult, SuggestionSearchEntity sse) {
+
+ OperationResult or = esPutResult.getOperationResult();
+
+ if (!or.wasSuccessful()) {
+ if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) {
+
+ if (shouldAllowRetry(sse.getId())) {
+ esWorkOnHand.incrementAndGet();
+
+ RetrySuggestionEntitySyncContainer rssec =
+ new RetrySuggestionEntitySyncContainer(esGetResult, sse);
+ retryQueue.push(rssec);
+
+ String message = "Store document failed during suggestion search entity synchronization"
+ + " due to version conflict. Entity will be re-synced.";
+ LOG.warn(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message);
+ }
+ } else {
+ String message =
+ "Store document failed during suggestion search entity synchronization with result code "
+ + or.getResultCode() + " and result message " + or.getResult();
+ LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message);
+ }
+ }
+ }
+
+ /**
+ * Perform retry sync.
+ */
+ private void performRetrySync() {
+ while (retryQueue.peek() != null) {
+
+ RetrySuggestionEntitySyncContainer susc = retryQueue.poll();
+ if (susc != null) {
+
+ SuggestionSearchEntity sus = susc.getSuggestionSearchEntity();
+ NetworkTransaction txn = susc.getNetworkTransaction();
+
+ String link = null;
+ try {
+ /*
+ * In this retry flow the se object has already derived its fields
+ */
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), sus.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage());
+ }
+
+ if (link != null) {
+ NetworkTransaction retryTransaction = new NetworkTransaction();
+ retryTransaction.setLink(link);
+ retryTransaction.setEntityType(txn.getEntityType());
+ retryTransaction.setDescriptor(txn.getDescriptor());
+ retryTransaction.setOperationType(HttpMethod.GET);
+
+ /*
+ * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already
+ * called incrementAndGet when queuing the failed PUT!
+ */
+
+ supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter),
+ esExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ performDocumentUpsert(result, sus);
+ }
+ });
+ }
+
+ }
+ }
+ }
+
+ /**
+ * Should allow retry.
+ *
+ * @param id the id
+ * @return true, if successful
+ */
+ private boolean shouldAllowRetry(String id) {
+ boolean isRetryAllowed = true;
+ if (retryLimitTracker.get(id) != null) {
+ Integer currentCount = retryLimitTracker.get(id);
+ if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) {
+ isRetryAllowed = false;
+ String message = "Searchable entity re-sync limit reached for " + id
+ + ", re-sync will no longer be attempted for this entity";
+ LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message);
+ } else {
+ Integer newCount = new Integer(currentCount.intValue() + 1);
+ retryLimitTracker.put(id, newCount);
+ }
+ } else {
+ Integer firstRetryCount = new Integer(1);
+ retryLimitTracker.put(id, firstRetryCount);
+ }
+
+ return isRetryAllowed;
+ }
+
+
+
+ @Override
+ public SynchronizerState getState() {
+
+ if (!isSyncDone()) {
+ return SynchronizerState.PERFORMING_SYNCHRONIZATION;
+ }
+
+ return SynchronizerState.IDLE;
+
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean)
+ */
+ @Override
+ public String getStatReport(boolean showFinalReport) {
+ syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs;
+ return getStatReport(syncDurationInMs, showFinalReport);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown()
+ */
+ @Override
+ public void shutdown() {
+ this.shutdownExecutors();
+ }
+
+ @Override
+ protected boolean isSyncDone() {
+
+ int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get();
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = "
+ + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated);
+ }
+
+ if (totalWorkOnHand > 0 || !allWorkEnumerated) {
+ return false;
+ }
+
+ this.syncInProgress = false;
+
+ return true;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache()
+ */
+ @Override
+ public void clearCache() {
+
+ if (syncInProgress) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Autosuggestion Entity Summarizer in progress, request to clear cache ignored");
+ return;
+ }
+
+ super.clearCache();
+ this.resetCounters();
+ if (entityCounters != null) {
+ entityCounters.clear();
+ }
+
+ allWorkEnumerated = false;
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java
new file mode 100644
index 0000000..7226c27
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java
@@ -0,0 +1,197 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.autosuggestion.sync;
+
+import static java.util.concurrent.CompletableFuture.supplyAsync;
+
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.dal.rest.HttpMethod;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.search.filters.config.FiltersConfig;
+import org.onap.aai.sparky.sync.AbstractEntitySynchronizer;
+import org.onap.aai.sparky.sync.IndexSynchronizer;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.entity.AggregationSuggestionEntity;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchPut;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.slf4j.MDC;
+
+
+public class VnfAliasSuggestionSynchronizer extends AbstractEntitySynchronizer
+ implements IndexSynchronizer {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(VnfAliasSuggestionSynchronizer.class);
+
+ private boolean isSyncInProgress;
+ private boolean shouldPerformRetry;
+ private Map<String, String> contextMap;
+ protected ExecutorService esPutExecutor;
+ private FiltersConfig filtersConfig;
+
+ public VnfAliasSuggestionSynchronizer(ElasticSearchSchemaConfig schemaConfig,
+ int internalSyncWorkers, int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig,
+ NetworkStatisticsConfig esStatConfig, FiltersConfig filtersConfig) throws Exception {
+ super(LOG, "VASS-" + schemaConfig.getIndexName().toUpperCase(), internalSyncWorkers, aaiWorkers,
+ esWorkers, schemaConfig.getIndexName(), aaiStatConfig, esStatConfig);
+
+ this.isSyncInProgress = false;
+ this.shouldPerformRetry = false;
+ this.synchronizerName = "VNFs Alias Suggestion Synchronizer";
+ this.contextMap = MDC.getCopyOfContextMap();
+ this.esPutExecutor = NodeUtils.createNamedExecutor("ASS-ES-PUT", 2, LOG);
+ this.filtersConfig = filtersConfig;
+ }
+
+ @Override
+ protected boolean isSyncDone() {
+ int totalWorkOnHand = esWorkOnHand.get();
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand);
+ }
+
+ if (totalWorkOnHand > 0 || !isSyncInProgress) {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public OperationState doSync() {
+ isSyncInProgress = true;
+ this.syncDurationInMs = -1;
+ syncStartedTimeStampInMs = System.currentTimeMillis();
+
+ syncEntity();
+
+ while (!isSyncDone()) {
+ try {
+ if (shouldPerformRetry) {
+ syncEntity();
+ }
+ Thread.sleep(1000);
+ } catch (Exception exc) {
+ // We don't care about this exception
+ }
+ }
+
+ return OperationState.OK;
+ }
+
+ private void syncEntity() {
+ String txnId = NodeUtils.getRandomTxnId();
+ MdcContext.initialize(txnId, synchronizerName, "", "Sync", "");
+
+ AggregationSuggestionEntity syncEntity = new AggregationSuggestionEntity(filtersConfig);
+ syncEntity.deriveFields();
+ syncEntity.initializeFilters();
+
+ String link = null;
+ try {
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), syncEntity.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage());
+ }
+
+ try {
+ String jsonPayload = null;
+ jsonPayload = syncEntity.getAsJson();
+ if (link != null && jsonPayload != null) {
+
+ NetworkTransaction elasticPutTxn = new NetworkTransaction();
+ elasticPutTxn.setLink(link);
+ elasticPutTxn.setOperationType(HttpMethod.PUT);
+
+ esWorkOnHand.incrementAndGet();
+ final Map<String, String> contextMap = MDC.getCopyOfContextMap();
+ supplyAsync(new PerformElasticSearchPut(jsonPayload, elasticPutTxn,
+ elasticSearchAdapter, contextMap), esPutExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ String message = "Aggregation suggestion entity sync UPDATE PUT error - "
+ + error.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message);
+ } else {
+ updateElasticSearchCounters(result);
+ wasEsOperationSuccessful(result);
+ }
+ });
+ }
+ } catch (Exception exc) {
+ String message =
+ "Exception caught during aggregation suggestion entity sync PUT operation. Message - "
+ + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message);
+ }
+ }
+
+ private void wasEsOperationSuccessful(NetworkTransaction result) {
+ if (result != null) {
+ OperationResult opResult = result.getOperationResult();
+
+ if (!opResult.wasSuccessful()) {
+ shouldPerformRetry = true;
+ } else {
+ isSyncInProgress = false;
+ shouldPerformRetry = false;
+ }
+ }
+ }
+
+ @Override
+ public SynchronizerState getState() {
+ if (!isSyncDone()) {
+ return SynchronizerState.PERFORMING_SYNCHRONIZATION;
+ }
+
+ return SynchronizerState.IDLE;
+ }
+
+ @Override
+ public String getStatReport(boolean shouldDisplayFinalReport) {
+ syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs;
+ return getStatReport(syncDurationInMs, shouldDisplayFinalReport);
+ }
+
+ @Override
+ public void shutdown() {
+ this.shutdownExecutors();
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java
new file mode 100644
index 0000000..f6504ad
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java
@@ -0,0 +1,99 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.autosuggestion.sync;
+
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.search.filters.config.FiltersConfig;
+import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner;
+import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory;
+import org.onap.aai.sparky.sync.IndexCleaner;
+import org.onap.aai.sparky.sync.IndexIntegrityValidator;
+import org.onap.aai.sparky.sync.SyncControllerImpl;
+import org.onap.aai.sparky.sync.SyncControllerRegistrar;
+import org.onap.aai.sparky.sync.SyncControllerRegistry;
+import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.config.SyncControllerConfig;
+
+public class VnfAliasSyncController extends SyncControllerImpl implements SyncControllerRegistrar {
+
+ private SyncControllerRegistry syncControllerRegistry;
+
+ public VnfAliasSyncController(SyncControllerConfig syncControllerConfig,
+ ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter,
+ ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig,
+ NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig,
+ FiltersConfig filtersConfig,
+ ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception {
+ super(syncControllerConfig);
+
+ // final String controllerName = "VNFs Alias Suggestion Synchronizer";
+
+ IndexIntegrityValidator indexValidator = new IndexIntegrityValidator(esAdapter, schemaConfig,
+ endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig));
+
+ registerIndexValidator(indexValidator);
+
+ VnfAliasSuggestionSynchronizer synchronizer = new VnfAliasSuggestionSynchronizer(schemaConfig,
+ syncControllerConfig.getNumInternalSyncWorkers(),
+ syncControllerConfig.getNumSyncActiveInventoryWorkers(),
+ syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig, filtersConfig);
+
+ synchronizer.setAaiAdapter(aaiAdapter);
+ synchronizer.setElasticSearchAdapter(esAdapter);
+
+ registerEntitySynchronizer(synchronizer);
+
+
+ IndexCleaner indexCleaner =
+ new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig);
+
+ registerIndexCleaner(indexCleaner);
+
+ }
+
+ public SyncControllerRegistry getSyncControllerRegistry() {
+ return syncControllerRegistry;
+ }
+
+ public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) {
+ this.syncControllerRegistry = syncControllerRegistry;
+ }
+
+ @Override
+ public void registerController() {
+
+ if ( syncControllerRegistry != null ) {
+ if ( syncControllerConfig.isEnabled()) {
+ syncControllerRegistry.registerSyncController(this);
+ }
+ }
+
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java
new file mode 100644
index 0000000..624573f
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java
@@ -0,0 +1,90 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.common.search;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.aai.sparky.search.entity.SearchSuggestion;
+import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+
+@JsonInclude(Include.NON_NULL)
+public class CommonSearchSuggestion implements SearchSuggestion {
+ protected String hashId;
+ protected String route;
+ protected String text;
+ protected List<UiFilterValueEntity> filterValues = new ArrayList<>();
+
+ public CommonSearchSuggestion() {}
+
+ public CommonSearchSuggestion(String hashId, String route, String text, String perspective,
+ List<UiFilterValueEntity> filterValues) {
+ this.hashId = hashId;
+ this.route = route;
+ this.text = text;
+ this.filterValues = filterValues;
+ }
+
+ public List<UiFilterValueEntity> getFilterValues() {
+ return filterValues;
+ }
+
+ public String getHashId() {
+ return hashId;
+ }
+
+ public String getRoute() {
+ return route;
+ }
+
+ public String getText() {
+ return text;
+ }
+
+ public void setHashId(String hashId) {
+ this.hashId = hashId;
+ }
+
+ public void setRoute(String route) {
+ this.route = route;
+ }
+
+ public void setText(String text) {
+ this.text = text;
+ }
+
+ @Override
+ public String toString() {
+ return "CommonSearchSuggestion [" + (hashId != null ? "hashId=" + hashId + ", " : "")
+ + (route != null ? "route=" + route + ", " : "")
+ + (text != null ? "text=" + text + ", " : "")
+ + (filterValues != null ? "filterValues=" + filterValues : "") + "]";
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/SparkyResourceLoader.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/SparkyResourceLoader.java
new file mode 100644
index 0000000..286b445
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/SparkyResourceLoader.java
@@ -0,0 +1,125 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+import org.springframework.context.ResourceLoaderAware;
+import org.springframework.core.io.Resource;
+import org.springframework.core.io.ResourceLoader;
+
+public class SparkyResourceLoader implements ResourceLoaderAware {
+
+
+ private static final String FILE_URI = "file:";
+ private ResourceLoader resourceLoader;
+ private String configHomeEnvVar;
+
+ // private static Logger LOG = LoggerFactory.getInstance().getLogger(SparkyResourceLoader.class);
+
+ @Override
+ public void setResourceLoader(ResourceLoader resourceLoader) {
+ this.resourceLoader = resourceLoader;
+ }
+
+ public String getFullFileUri(String uriFilePath) {
+ return FILE_URI + System.getProperty(configHomeEnvVar) + uriFilePath;
+ }
+
+ public String getAbsolutePath(String uriFilePath) {
+ return System.getProperty(configHomeEnvVar) + uriFilePath;
+ }
+
+ protected Resource getResource(String uriFilePath, boolean isRelative) {
+
+ String fileUri = uriFilePath;
+
+ if (!uriFilePath.startsWith("file:")) {
+ fileUri = "file:" + uriFilePath;
+ }
+
+ if (isRelative) {
+ return resourceLoader.getResource(getFullFileUri(fileUri));
+ } else {
+ return resourceLoader.getResource(fileUri);
+ }
+
+ }
+
+ public File getResourceAsFile(String uriFilePath, boolean isRelativePath) throws IOException {
+
+ Resource resource = getResource(uriFilePath, isRelativePath);
+
+ if (resource.exists()) {
+ return resource.getFile();
+ }
+
+ return null;
+
+ }
+
+ public byte[] getResourceAsBytes(String uriFilePath, boolean isRelativePath) throws IOException {
+
+ Resource resource = getResource(uriFilePath, isRelativePath);
+
+ if (resource.exists()) {
+ return getResourceAsBytes(resource);
+ }
+
+ return null;
+ }
+
+ public byte[] getResourceAsBytes(Resource resource) throws IOException {
+
+ if ( resource != null && resource.exists()) {
+ return Files.readAllBytes(Paths.get(resource.getFile().getAbsolutePath()));
+ }
+
+ return null;
+ }
+
+ public String getResourceAsString(String uriFilePath, boolean isRelativePath) throws IOException {
+
+ Resource resource = getResource(uriFilePath, isRelativePath);
+
+ if (resource.exists()) {
+ return new String(getResourceAsBytes(resource));
+ }
+
+ return null;
+ }
+
+ public String getConfigHomeEnvVar() {
+ return configHomeEnvVar;
+ }
+
+ public void setConfigHomeEnvVar(String configHomeEnvVar) {
+ this.configHomeEnvVar = configHomeEnvVar;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java
new file mode 100644
index 0000000..d632c5a
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java
@@ -0,0 +1,78 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * The Class CrossEntityReference.
+ */
+public class CrossEntityReference {
+ private String targetEntityType;
+ private List<String> referenceAttributes;
+
+ /**
+ * Instantiates a new cross entity reference.
+ */
+ public CrossEntityReference() {
+ targetEntityType = null;
+ referenceAttributes = new ArrayList<String>();
+ }
+
+ public String getTargetEntityType() {
+ return targetEntityType;
+ }
+
+ public void setTargetEntityType(String targetEntityType) {
+ this.targetEntityType = targetEntityType;
+ }
+
+ public List<String> getReferenceAttributes() {
+ return referenceAttributes;
+ }
+
+ public void setReferenceAttributes(List<String> referenceAttributes) {
+ this.referenceAttributes = referenceAttributes;
+ }
+
+ /**
+ * Adds the reference attribute.
+ *
+ * @param additionalAttribute the additional attribute
+ */
+ public void addReferenceAttribute(String additionalAttribute) {
+ referenceAttributes.add(additionalAttribute);
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "CrossEntityReference [targetEntityType=" + targetEntityType + ", referenceAttributes="
+ + referenceAttributes + "]";
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java
new file mode 100644
index 0000000..c44b1f4
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java
@@ -0,0 +1,67 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+public class CrossEntityReferenceDescriptor extends OxmEntityDescriptor {
+ protected CrossEntityReference crossEntityReference;
+
+ public CrossEntityReference getCrossEntityReference() {
+ return crossEntityReference;
+ }
+
+ public void setCrossEntityReference(CrossEntityReference crossEntityReference) {
+ this.crossEntityReference = crossEntityReference;
+ }
+
+ /**
+ * Checks for cross entity references.
+ *
+ * @return true, if successful
+ */
+ public boolean hasCrossEntityReferences() {
+ if (this.crossEntityReference == null) {
+ return false;
+ }
+ if (!this.crossEntityReference.getReferenceAttributes().isEmpty()) {
+ return true;
+ }
+ return false;
+ }
+
+
+ @Override
+ public String toString() {
+ return "CrossEntityReferenceDescriptor ["
+ + (crossEntityReference != null ? "crossEntityReference=" + crossEntityReference + ", "
+ : "")
+ + (entityName != null ? "entityName=" + entityName + ", " : "")
+ + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames
+ : "")
+ + "]";
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java
new file mode 100644
index 0000000..603b93d
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java
@@ -0,0 +1,136 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.eclipse.persistence.dynamic.DynamicType;
+import org.eclipse.persistence.internal.oxm.mappings.Descriptor;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+
+public class CrossEntityReferenceLookup implements OxmModelProcessor {
+
+ private Map<String, HashMap<String, String>> crossReferenceEntityOxmModel;
+ private Map<String, CrossEntityReferenceDescriptor> crossReferenceEntityDescriptors;
+
+
+ public CrossEntityReferenceLookup() {
+ crossReferenceEntityOxmModel = new LinkedHashMap<String, HashMap<String, String>>();
+ crossReferenceEntityDescriptors = new HashMap<String, CrossEntityReferenceDescriptor>();
+ }
+
+ @Override
+ public void processOxmModel(DynamicJAXBContext jaxbContext) {
+
+ @SuppressWarnings("rawtypes")
+ List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors();
+
+ for (@SuppressWarnings("rawtypes")
+ Descriptor desc : descriptorsList) {
+
+ DynamicType entity = jaxbContext.getDynamicType(desc.getAlias());
+
+ LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>();
+
+ // Not all fields have key attributes
+ if (desc.getPrimaryKeyFields() != null) {
+ oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString()
+ .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", ""));
+ }
+
+ String entityName = desc.getDefaultRootElement();
+
+ // add entityName
+ oxmProperties.put("entityName", entityName);
+
+ Map<String, String> properties = entity.getDescriptor().getProperties();
+ if (properties != null) {
+ for (Map.Entry<String, String> entry : properties.entrySet()) {
+
+ if (entry.getKey().equalsIgnoreCase("crossEntityReference")) {
+ oxmProperties.put("crossEntityReference", entry.getValue());
+ }
+ }
+ }
+
+ if (oxmProperties.containsKey("crossEntityReference")) {
+ crossReferenceEntityOxmModel.put(entityName, oxmProperties);
+ }
+
+ }
+
+ for (Entry<String, HashMap<String, String>> crossRefModel : crossReferenceEntityOxmModel
+ .entrySet()) {
+ HashMap<String, String> attribute = crossRefModel.getValue();
+ CrossEntityReferenceDescriptor entity = new CrossEntityReferenceDescriptor();
+ entity.setEntityName(attribute.get("entityName"));
+ entity.setPrimaryKeyAttributeNames(
+ Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(",")));
+
+ List<String> crossEntityRefTokens =
+ Arrays.asList(attribute.get("crossEntityReference").split(","));
+
+ if (crossEntityRefTokens.size() >= 2) {
+ CrossEntityReference entityRef = new CrossEntityReference();
+ entityRef.setTargetEntityType(crossEntityRefTokens.get(0));
+
+ for (int i = 1; i < crossEntityRefTokens.size(); i++) {
+ entityRef.addReferenceAttribute(crossEntityRefTokens.get(i));
+ }
+
+ entity.setCrossEntityReference(entityRef);
+ }
+ crossReferenceEntityDescriptors.put(attribute.get("entityName"), entity);
+ }
+
+ }
+
+ public Map<String, HashMap<String, String>> getCrossReferenceEntityOxmModel() {
+ return crossReferenceEntityOxmModel;
+ }
+
+ public void setCrossReferenceEntityOxmModel(
+ Map<String, HashMap<String, String>> crossReferenceEntityOxmModel) {
+ this.crossReferenceEntityOxmModel = crossReferenceEntityOxmModel;
+ }
+
+ public Map<String, CrossEntityReferenceDescriptor> getCrossReferenceEntityDescriptors() {
+ return crossReferenceEntityDescriptors;
+ }
+
+ public void setCrossReferenceEntityDescriptors(
+ Map<String, CrossEntityReferenceDescriptor> crossReferenceEntityDescriptors) {
+ this.crossReferenceEntityDescriptors = crossReferenceEntityDescriptors;
+ }
+
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java
new file mode 100644
index 0000000..4e995a5
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java
@@ -0,0 +1,61 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+public class GeoEntityDescriptor extends OxmEntityDescriptor {
+
+ protected String geoLatName;
+
+ protected String geoLongName;
+
+ public String getGeoLatName() {
+ return geoLatName;
+ }
+
+ public void setGeoLatName(String geoLatName) {
+ this.geoLatName = geoLatName;
+ }
+
+ public String getGeoLongName() {
+ return geoLongName;
+ }
+
+ public void setGeoLongName(String geoLongName) {
+ this.geoLongName = geoLongName;
+ }
+
+ @Override
+ public String toString() {
+ return "GeoEntityDescriptor [" + (geoLatName != null ? "geoLatName=" + geoLatName + ", " : "")
+ + (geoLongName != null ? "geoLongName=" + geoLongName + ", " : "")
+ + (entityName != null ? "entityName=" + entityName + ", " : "")
+ + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames
+ : "")
+ + "]";
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java
new file mode 100644
index 0000000..1e61345
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java
@@ -0,0 +1,137 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.eclipse.persistence.dynamic.DynamicType;
+import org.eclipse.persistence.internal.oxm.mappings.Descriptor;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+
+public class GeoEntityLookup implements OxmModelProcessor {
+
+ private Map<String, HashMap<String, String>> geoEntityOxmModel;
+
+ private Map<String, GeoOxmEntityDescriptor> geoEntityDescriptors;
+
+ public GeoEntityLookup() {
+ geoEntityOxmModel = new LinkedHashMap<String, HashMap<String, String>>();
+ geoEntityDescriptors = new HashMap<String, GeoOxmEntityDescriptor>();
+ }
+
+ public Map<String, HashMap<String, String>> getGeoEntityOxmModel() {
+ return geoEntityOxmModel;
+ }
+
+ public void setGeoEntityOxmModel(Map<String, HashMap<String, String>> geoEntityOxmModel) {
+ this.geoEntityOxmModel = geoEntityOxmModel;
+ }
+
+ public Map<String, GeoOxmEntityDescriptor> getGeoEntityDescriptors() {
+ return geoEntityDescriptors;
+ }
+
+ public void setGeoEntityDescriptors(Map<String, GeoOxmEntityDescriptor> geoEntityDescriptors) {
+ this.geoEntityDescriptors = geoEntityDescriptors;
+ }
+
+ @Override
+ public void processOxmModel(DynamicJAXBContext jaxbContext) {
+
+ @SuppressWarnings("rawtypes")
+ List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors();
+
+ for (@SuppressWarnings("rawtypes")
+ Descriptor desc : descriptorsList) {
+
+ DynamicType entity = jaxbContext.getDynamicType(desc.getAlias());
+
+ LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>();
+
+ // Not all fields have key attributes
+ if (desc.getPrimaryKeyFields() != null) {
+ oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString()
+ .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", ""));
+ }
+
+ String entityName = desc.getDefaultRootElement();
+
+ // add entityName
+ oxmProperties.put("entityName", entityName);
+
+ Map<String, String> properties = entity.getDescriptor().getProperties();
+
+ if (properties != null) {
+ for (Map.Entry<String, String> entry : properties.entrySet()) {
+
+ if (entry.getKey().equalsIgnoreCase("geoLat")) {
+ if (entry.getValue().length() > 0) {
+ oxmProperties.put("geoLat", entry.getValue());
+ }
+ } else if (entry.getKey().equalsIgnoreCase("geoLong")) {
+ if (entry.getValue().length() > 0) {
+ oxmProperties.put("geoLong", entry.getValue());
+ }
+ }
+ }
+ }
+
+ if (oxmProperties.containsKey("geoLat") && oxmProperties.containsKey("geoLong")) {
+ geoEntityOxmModel.put(entityName, oxmProperties);
+ }
+
+ }
+
+ for (Entry<String, HashMap<String, String>> entityModel : geoEntityOxmModel.entrySet()) {
+
+ HashMap<String, String> attribute = entityModel.getValue();
+
+ GeoOxmEntityDescriptor entity = new GeoOxmEntityDescriptor();
+
+ entity.setEntityName(attribute.get("entityName"));
+
+ if (attribute.containsKey("primaryKeyAttributeNames")) {
+
+ entity.setPrimaryKeyAttributeNames(
+ Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(",")));
+
+ if (attribute.containsKey("geoLat") || attribute.containsKey("geoLong")) {
+ entity.setGeoLatName(attribute.get("geoLat"));
+ entity.setGeoLongName(attribute.get("geoLong"));
+ }
+
+ geoEntityDescriptors.put(attribute.get("entityName"), entity);
+ }
+ }
+
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java
new file mode 100644
index 0000000..03fb9d6
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java
@@ -0,0 +1,71 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+public class GeoOxmEntityDescriptor extends OxmEntityDescriptor {
+
+ private String geoLatName;
+
+ private String geoLongName;
+
+ public String getGeoLatName() {
+ return geoLatName;
+ }
+
+ public void setGeoLatName(String geoLatName) {
+ this.geoLatName = geoLatName;
+ }
+
+ public String getGeoLongName() {
+ return geoLongName;
+ }
+
+ public void setGeoLongName(String geoLongName) {
+ this.geoLongName = geoLongName;
+ }
+
+ /**
+ * Checks for geo entity.
+ *
+ * @return true, if successful
+ */
+ public boolean hasGeoEntity() {
+ return (this.geoLongName != null && this.geoLatName != null);
+ }
+
+ @Override
+ public String toString() {
+ return "GeoOxmEntityDescriptor ["
+ + (geoLatName != null ? "geoLatName=" + geoLatName + ", " : "")
+ + (geoLongName != null ? "geoLongName=" + geoLongName + ", " : "")
+ + (entityName != null ? "entityName=" + entityName + ", " : "")
+ + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames
+ : "")
+ + "]";
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java
new file mode 100644
index 0000000..fd071d1
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java
@@ -0,0 +1,68 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class OxmEntityDescriptor {
+
+ protected String entityName;
+
+ protected List<String> primaryKeyAttributeNames;
+
+ public OxmEntityDescriptor() {
+ primaryKeyAttributeNames = new ArrayList<String>();
+ }
+
+ public String getEntityName() {
+ return entityName;
+ }
+
+ public void setEntityName(String entityName) {
+ this.entityName = entityName;
+ }
+
+ public List<String> getPrimaryKeyAttributeNames() {
+ return primaryKeyAttributeNames;
+ }
+
+ public void setPrimaryKeyAttributeNames(List<String> primaryKeyAttributeNames) {
+ this.primaryKeyAttributeNames = primaryKeyAttributeNames;
+ }
+
+ public void addPrimaryKeyName(String name) {
+ primaryKeyAttributeNames.add(name);
+ }
+
+ @Override
+ public String toString() {
+ return "OxmEntityDescriptor [" + (entityName != null ? "entityName=" + entityName + ", " : "")
+ + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames
+ : "")
+ + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java
new file mode 100644
index 0000000..09326a8
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java
@@ -0,0 +1,132 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.eclipse.persistence.dynamic.DynamicType;
+import org.eclipse.persistence.internal.oxm.mappings.Descriptor;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+
+public class OxmEntityLookup implements OxmModelProcessor {
+
+ private Map<String, HashMap<String, String>> oxmModel;
+
+ private Map<String, DynamicType> entityTypeLookup;
+
+ private Map<String, OxmEntityDescriptor> entityDescriptors;
+
+
+ public OxmEntityLookup() {
+ oxmModel = new LinkedHashMap<String, HashMap<String, String>>();
+ entityTypeLookup = new LinkedHashMap<String, DynamicType>();
+ entityDescriptors = new HashMap<String, OxmEntityDescriptor>();
+ }
+
+ @Override
+ public void processOxmModel(DynamicJAXBContext jaxbContext) {
+
+ @SuppressWarnings("rawtypes")
+ List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors();
+
+ for (@SuppressWarnings("rawtypes")
+ Descriptor desc : descriptorsList) {
+
+ DynamicType entity = jaxbContext.getDynamicType(desc.getAlias());
+
+ LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>();
+
+ // Not all fields have key attributes
+ if (desc.getPrimaryKeyFields() != null) {
+ oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString()
+ .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", ""));
+ }
+
+ String entityName = desc.getDefaultRootElement();
+
+ entityTypeLookup.put(entityName, entity);
+
+ // add entityName
+ oxmProperties.put("entityName", entityName);
+
+ Map<String, String> properties = entity.getDescriptor().getProperties();
+
+ oxmModel.put(entityName, oxmProperties);
+
+ }
+
+ for (Entry<String, HashMap<String, String>> entityModel : oxmModel.entrySet()) {
+ HashMap<String, String> attribute = entityModel.getValue();
+ OxmEntityDescriptor entity = new OxmEntityDescriptor();
+
+ entity.setEntityName(attribute.get("entityName"));
+
+ if (attribute.containsKey("primaryKeyAttributeNames")) {
+
+ entity.setPrimaryKeyAttributeNames(
+ Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(",")));
+
+ entityDescriptors.put(attribute.get("entityName"), entity);
+ }
+ }
+
+ }
+
+ public Map<String, HashMap<String, String>> getOxmModel() {
+ return oxmModel;
+ }
+
+ public void setOxmModel(Map<String, HashMap<String, String>> oxmModel) {
+ this.oxmModel = oxmModel;
+ }
+
+ public Map<String, DynamicType> getEntityTypeLookup() {
+ return entityTypeLookup;
+ }
+
+ public void setEntityTypeLookup(Map<String, DynamicType> entityTypeLookup) {
+ this.entityTypeLookup = entityTypeLookup;
+ }
+
+ public Map<String, OxmEntityDescriptor> getEntityDescriptors() {
+ return entityDescriptors;
+ }
+
+ public void setEntityDescriptors(Map<String, OxmEntityDescriptor> entityDescriptors) {
+ this.entityDescriptors = entityDescriptors;
+ }
+
+ public void addEntityDescriptor(String type, OxmEntityDescriptor descriptor) {
+ if ( this.entityDescriptors != null ) {
+ this.entityDescriptors.put(type, descriptor);
+ }
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java
new file mode 100644
index 0000000..475fe8f
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java
@@ -0,0 +1,195 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.eclipse.persistence.jaxb.JAXBContextProperties;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContextFactory;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.springframework.core.io.Resource;
+import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
+import org.springframework.core.io.support.ResourcePatternResolver;
+
+public class OxmModelLoader {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(OxmModelLoader.class);
+
+ /*
+ * The intent of this parameter is to be able to programmatically over-ride the latest AAI schema
+ * version discovered from the aai-schema jar file. This property is optional, but if set on the
+ * bean or by another class in the system, then it will override the spec version that is loaded.
+ *
+ * If the latestVersionOverride is greater than 0 then it will set the latest version to the
+ * specified version, and that stream will be returned if available.
+ */
+
+ protected int oxmApiVersionOverride;
+ protected Set<OxmModelProcessor> processors;
+ private int latestVersionNum = 0;
+
+ private final static Pattern p = Pattern.compile("aai_oxm_(v)(.*).xml");
+
+ public OxmModelLoader() {
+ this(-1, new HashSet<OxmModelProcessor>());
+ }
+
+ public OxmModelLoader(int apiVersionOverride,Set<OxmModelProcessor> oxmModelProcessors) {
+ this.oxmApiVersionOverride = apiVersionOverride;
+ this.processors = oxmModelProcessors;
+ }
+
+ protected synchronized Map<Integer, InputStream> getStreamHandlesForOxmFromResource() {
+ Map<Integer, InputStream> listOfOxmFiles = new HashMap<Integer, InputStream>();
+ ClassLoader oxmClassLoader = OxmModelLoader.class.getClassLoader();
+ ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(oxmClassLoader);
+ Resource[] resources = null;
+ try {
+ resources = resolver.getResources("classpath*:/oxm/aai_oxm*.xml");
+ } catch (IOException ex) {
+ LOG.error(AaiUiMsgs.OXM_LOADING_ERROR, ex.getMessage());
+ }
+
+ if (resources == null) {
+ LOG.error(AaiUiMsgs.OXM_LOADING_ERROR, "No OXM schema files found on classpath");
+ }
+
+ for (Resource resource : resources) {
+ Matcher m = p.matcher(resource.getFilename());
+
+ if (m.matches()) {
+ try {
+ listOfOxmFiles.put(new Integer(m.group(2)), resource.getInputStream());
+ } catch (Exception e) {
+ LOG.error(AaiUiMsgs.OXM_LOADING_ERROR,
+ resource.getFilename(), e.getMessage());
+ }
+ }
+ }
+ return listOfOxmFiles;
+ }
+
+ /**
+ * Load an oxm model.
+ * @param inputStream file handle for oxm
+ */
+ protected void loadModel(InputStream inputStream) {
+ Map<String, Object> properties = new HashMap<String, Object>();
+ properties.put(JAXBContextProperties.OXM_METADATA_SOURCE, inputStream);
+ try {
+ final DynamicJAXBContext oxmContext = DynamicJAXBContextFactory
+ .createContextFromOXM(Thread.currentThread().getContextClassLoader(), properties);
+
+ parseOxmContext(oxmContext);
+ // populateSearchableOxmModel();
+ LOG.info(AaiUiMsgs.OXM_LOAD_SUCCESS, String.valueOf(latestVersionNum));
+ } catch (Exception exc) {
+ LOG.info(AaiUiMsgs.OXM_PARSE_ERROR_NONVERBOSE);
+ LOG.error(AaiUiMsgs.OXM_PARSE_ERROR_VERBOSE, "OXM v" + latestVersionNum, exc.getMessage());
+ }
+ }
+
+ /**
+ * Load the latest oxm model.
+ */
+ public synchronized void loadLatestOxmModel() {
+
+ LOG.info(AaiUiMsgs.INITIALIZE_OXM_MODEL_LOADER);
+
+ // find handles for available oxm models
+ final Map<Integer, InputStream> listOfOxmStreams = getStreamHandlesForOxmFromResource();
+ if (listOfOxmStreams.isEmpty()) {
+ LOG.error(AaiUiMsgs.OXM_FILE_NOT_FOUND);
+ return;
+ }
+
+ InputStream stream = null;
+
+ if (oxmApiVersionOverride > 0) {
+ latestVersionNum = oxmApiVersionOverride;
+ LOG.warn(AaiUiMsgs.WARN_GENERIC, "Overriding AAI Schema with version = " + latestVersionNum);
+ stream = listOfOxmStreams.get(latestVersionNum);
+ } else {
+
+ for (Integer key : listOfOxmStreams.keySet()) {
+ if (key.intValue() > latestVersionNum) {
+ latestVersionNum = key.intValue();
+ stream = listOfOxmStreams.get(key);
+ }
+ }
+ }
+
+ // load the latest oxm file
+ loadModel(stream);
+
+ }
+
+ public int getLatestVersionNum() {
+ return latestVersionNum;
+ }
+
+ public void setLatestVersionNum(int latestVersionNum) {
+ this.latestVersionNum = latestVersionNum;
+ }
+
+ /**
+ * Parses the oxm context.
+ *
+ * @param oxmContext the oxm context
+ */
+ private void parseOxmContext(DynamicJAXBContext oxmContext) {
+
+ if (processors != null && processors.size() > 0) {
+
+ for (OxmModelProcessor processor : processors) {
+
+ try {
+
+ processor.processOxmModel(oxmContext);
+
+ } catch (Exception exc) {
+
+ LOG.warn(AaiUiMsgs.WARN_GENERIC,
+ "OxmModelProcessor experienced an error. Error: " + exc.getMessage());
+
+ }
+
+ }
+
+ }
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java
new file mode 100644
index 0000000..9e250b7
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java
@@ -0,0 +1,33 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+
+public interface OxmModelProcessor {
+
+ public void processOxmModel(DynamicJAXBContext jaxbContext);
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java
new file mode 100644
index 0000000..7833ee0
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java
@@ -0,0 +1,119 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.eclipse.persistence.dynamic.DynamicType;
+import org.eclipse.persistence.internal.oxm.mappings.Descriptor;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+
+public class SearchableEntityLookup implements OxmModelProcessor {
+
+ private Map<String, HashMap<String, String>> searchableOxmModel;
+ private Map<String, SearchableOxmEntityDescriptor> searchableEntityDescriptors;
+
+ public SearchableEntityLookup() {
+ searchableOxmModel = new LinkedHashMap<String, HashMap<String, String>>();
+ searchableEntityDescriptors = new HashMap<String, SearchableOxmEntityDescriptor>();
+ }
+
+ @Override
+ public void processOxmModel(DynamicJAXBContext jaxbContext) {
+
+ @SuppressWarnings("rawtypes")
+ List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors();
+
+ for (@SuppressWarnings("rawtypes")
+ Descriptor desc : descriptorsList) {
+
+ DynamicType entity = jaxbContext.getDynamicType(desc.getAlias());
+
+ LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>();
+
+ // Not all fields have key attributes
+ if (desc.getPrimaryKeyFields() != null) {
+ oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString()
+ .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", ""));
+ }
+
+ String entityName = desc.getDefaultRootElement();
+
+ // add entityName
+ oxmProperties.put("entityName", entityName);
+
+ Map<String, String> properties = entity.getDescriptor().getProperties();
+ if (properties != null) {
+ for (Map.Entry<String, String> entry : properties.entrySet()) {
+
+ if (entry.getKey().equalsIgnoreCase("searchable")) {
+ oxmProperties.put("searchableAttributes", entry.getValue());
+ }
+ }
+ }
+
+ // Add all searchable entity types for reserve lookup
+ if (oxmProperties.containsKey("searchableAttributes")) {
+ searchableOxmModel.put(entityName, oxmProperties);
+ }
+
+ }
+
+ for (Entry<String, HashMap<String, String>> searchableModel : searchableOxmModel.entrySet()) {
+ HashMap<String, String> attribute = searchableModel.getValue();
+ SearchableOxmEntityDescriptor entity = new SearchableOxmEntityDescriptor();
+ entity.setEntityName(attribute.get("entityName"));
+ entity.setPrimaryKeyAttributeNames(
+ Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(",")));
+ entity
+ .setSearchableAttributes(Arrays.asList(attribute.get("searchableAttributes").split(",")));
+ searchableEntityDescriptors.put(attribute.get("entityName"), entity);
+ }
+
+ }
+
+ public Map<String, HashMap<String, String>> getSearchableOxmModel() {
+ return searchableOxmModel;
+ }
+
+ public void setSearchableOxmModel(Map<String, HashMap<String, String>> searchableOxmModel) {
+ this.searchableOxmModel = searchableOxmModel;
+ }
+
+ public Map<String, SearchableOxmEntityDescriptor> getSearchableEntityDescriptors() {
+ return searchableEntityDescriptors;
+ }
+
+ public void setSearchableEntityDescriptors(
+ Map<String, SearchableOxmEntityDescriptor> searchableEntityDescriptors) {
+ this.searchableEntityDescriptors = searchableEntityDescriptors;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java
new file mode 100644
index 0000000..9f2809f
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java
@@ -0,0 +1,75 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import java.util.List;
+
+public class SearchableOxmEntityDescriptor extends OxmEntityDescriptor {
+
+ protected List<String> searchableAttributes;
+
+ public List<String> getSearchableAttributes() {
+ return searchableAttributes;
+ }
+
+ public void setSearchableAttributes(List<String> searchableAttributes) {
+ this.searchableAttributes = searchableAttributes;
+ }
+
+ public void addSearchableAttribute(String attributeName) {
+ searchableAttributes.add(attributeName);
+ }
+
+ /**
+ * Checks for searchable attributes.
+ *
+ * @return true, if successful
+ */
+ public boolean hasSearchableAttributes() {
+
+ if (this.searchableAttributes == null) {
+ return false;
+ }
+
+ if (this.searchableAttributes.size() > 0) {
+ return true;
+ }
+
+ return false;
+
+ }
+
+ @Override
+ public String toString() {
+ return "SearchableOxmEntityDescriptor ["
+ + (searchableAttributes != null ? "searchableAttributes=" + searchableAttributes + ", "
+ : "")
+ + (entityName != null ? "entityName=" + entityName + ", " : "")
+ + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames
+ : "")
+ + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java
new file mode 100644
index 0000000..774f6b0
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java
@@ -0,0 +1,54 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity;
+
+public class SuggestionEntityDescriptor extends OxmEntityDescriptor {
+
+ protected SuggestionSearchEntity suggestionSearchEntity;
+
+ public SuggestionSearchEntity getSuggestionSearchEntity() {
+ return suggestionSearchEntity;
+ }
+
+ public void setSuggestionSearchEntity(SuggestionSearchEntity suggestionSearchEntity) {
+ this.suggestionSearchEntity = suggestionSearchEntity;
+ }
+
+ @Override
+ public String toString() {
+ return "SuggestionEntityDescriptor ["
+ + (suggestionSearchEntity != null
+ ? "suggestionSearchEntity=" + suggestionSearchEntity + ", " : "")
+ + (entityName != null ? "entityName=" + entityName + ", " : "")
+ + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames
+ : "")
+ + "]";
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java
new file mode 100644
index 0000000..fde1b6a
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java
@@ -0,0 +1,181 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.config.oxm;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Vector;
+
+import org.eclipse.persistence.dynamic.DynamicType;
+import org.eclipse.persistence.internal.oxm.mappings.Descriptor;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+import org.eclipse.persistence.mappings.DatabaseMapping;
+import org.onap.aai.sparky.search.filters.config.FiltersConfig;
+import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity;
+
+public class SuggestionEntityLookup implements OxmModelProcessor {
+
+ private Map<String, HashMap<String, String>> suggestionSearchEntityOxmModel;
+ private Map<String, SuggestionEntityDescriptor> suggestionSearchEntityDescriptors;
+ private FiltersConfig filtersConfig;
+
+ public SuggestionEntityLookup(FiltersConfig filtersConfig) {
+ suggestionSearchEntityOxmModel = new LinkedHashMap<String, HashMap<String, String>>();
+ suggestionSearchEntityDescriptors = new HashMap<String, SuggestionEntityDescriptor>();
+ this.filtersConfig = filtersConfig;
+ }
+
+ @Override
+ public void processOxmModel(DynamicJAXBContext jaxbContext) {
+
+ @SuppressWarnings("rawtypes")
+ List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors();
+
+ for (@SuppressWarnings("rawtypes")
+ Descriptor desc : descriptorsList) {
+
+ DynamicType entity = jaxbContext.getDynamicType(desc.getAlias());
+
+ LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>();
+
+ // Not all fields have key attributes
+ if (desc.getPrimaryKeyFields() != null) {
+ oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString()
+ .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", ""));
+ }
+
+ String entityName = desc.getDefaultRootElement();
+
+ // add entityName
+ oxmProperties.put("entityName", entityName);
+
+ Map<String, String> properties = entity.getDescriptor().getProperties();
+ if (properties != null) {
+ for (Map.Entry<String, String> entry : properties.entrySet()) {
+
+
+ if (entry.getKey().equalsIgnoreCase("containsSuggestibleProps")) {
+
+ oxmProperties.put("containsSuggestibleProps", "true");
+
+ Vector<DatabaseMapping> descriptorMaps = entity.getDescriptor().getMappings();
+ List<String> listOfSuggestableAttributes = new ArrayList<String>();
+
+ for (DatabaseMapping descMap : descriptorMaps) {
+ if (descMap.isAbstractDirectMapping()) {
+
+ if (descMap.getProperties().get("suggestibleOnSearch") != null) {
+ String suggestableOnSearchString =
+ String.valueOf(descMap.getProperties().get("suggestibleOnSearch"));
+
+ boolean isSuggestibleOnSearch = Boolean.valueOf(suggestableOnSearchString);
+
+ if (isSuggestibleOnSearch) {
+ /* Grab attribute types for suggestion */
+ String attributeName =
+ descMap.getField().getName().replaceAll("/text\\(\\)", "");
+ listOfSuggestableAttributes.add(attributeName);
+
+ if (descMap.getProperties().get("suggestionVerbs") != null) {
+ String suggestionVerbsString =
+ String.valueOf(descMap.getProperties().get("suggestionVerbs"));
+
+ oxmProperties.put("suggestionVerbs", suggestionVerbsString);
+ }
+ }
+ }
+ }
+ }
+
+ if (!listOfSuggestableAttributes.isEmpty()) {
+ oxmProperties.put("suggestibleAttributes",
+ String.join(",", listOfSuggestableAttributes));
+ }
+ } else if (entry.getKey().equalsIgnoreCase("suggestionAliases")) {
+ oxmProperties.put("suggestionAliases", entry.getValue());
+ }
+ }
+ }
+
+ if (oxmProperties.containsKey("containsSuggestibleProps")) {
+ suggestionSearchEntityOxmModel.put(entityName, oxmProperties);
+ }
+ }
+
+ for (Entry<String, HashMap<String, String>> suggestionEntityModel : suggestionSearchEntityOxmModel
+ .entrySet()) {
+ HashMap<String, String> attribute = suggestionEntityModel.getValue();
+
+ String entityName = attribute.get("entityName");
+ SuggestionSearchEntity suggestionSearchEntity = new SuggestionSearchEntity(filtersConfig, this);
+ suggestionSearchEntity.setEntityType(entityName);
+
+ if (attribute.get("suggestionAliases") != null) {
+ suggestionSearchEntity
+ .setSuggestionAliases(Arrays.asList(attribute.get("suggestionAliases").split(",")));
+ }
+
+ if (attribute.get("suggestibleAttributes") != null) {
+ suggestionSearchEntity.setSuggestionPropertyTypes(
+ Arrays.asList(attribute.get("suggestibleAttributes").split(",")));
+ }
+
+ SuggestionEntityDescriptor entity = new SuggestionEntityDescriptor();
+ entity.setSuggestionSearchEntity(suggestionSearchEntity);
+ entity.setEntityName(entityName);
+
+ if (attribute.get("primaryKeyAttributeNames") != null) {
+ entity.setPrimaryKeyAttributeNames(
+ Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(",")));
+ }
+
+ suggestionSearchEntityDescriptors.put(entityName, entity);
+ }
+ }
+
+ public Map<String, HashMap<String, String>> getSuggestionSearchEntityOxmModel() {
+ return suggestionSearchEntityOxmModel;
+ }
+
+ public void setSuggestionSearchEntityOxmModel(
+ Map<String, HashMap<String, String>> suggestionSearchEntityOxmModel) {
+ this.suggestionSearchEntityOxmModel = suggestionSearchEntityOxmModel;
+ }
+
+ public Map<String, SuggestionEntityDescriptor> getSuggestionSearchEntityDescriptors() {
+ return suggestionSearchEntityDescriptors;
+ }
+
+ public void setSuggestionSearchEntityDescriptors(
+ Map<String, SuggestionEntityDescriptor> suggestionSearchEntityDescriptors) {
+ this.suggestionSearchEntityDescriptors = suggestionSearchEntityDescriptors;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java
new file mode 100644
index 0000000..604c74c
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java
@@ -0,0 +1,937 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.crossentityreference.sync;
+
+import static java.util.concurrent.CompletableFuture.supplyAsync;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Deque;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentLinkedDeque;
+import java.util.concurrent.ExecutorService;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.CrossEntityReference;
+import org.onap.aai.sparky.config.oxm.CrossEntityReferenceDescriptor;
+import org.onap.aai.sparky.config.oxm.CrossEntityReferenceLookup;
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.config.oxm.SearchableEntityLookup;
+import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.dal.rest.HttpMethod;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.AbstractEntitySynchronizer;
+import org.onap.aai.sparky.sync.IndexSynchronizer;
+import org.onap.aai.sparky.sync.SynchronizerConstants;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.entity.IndexableCrossEntityReference;
+import org.onap.aai.sparky.sync.entity.MergableEntity;
+import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchPut;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchUpdate;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.slf4j.MDC;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+
+/**
+ * The Class CrossEntityReferenceSynchronizer.
+ */
+public class CrossEntityReferenceSynchronizer extends AbstractEntitySynchronizer
+ implements IndexSynchronizer {
+
+ /**
+ * The Class RetryCrossEntitySyncContainer.
+ */
+ private class RetryCrossEntitySyncContainer {
+ NetworkTransaction txn;
+ IndexableCrossEntityReference icer;
+
+ /**
+ * Instantiates a new retry cross entity sync container.
+ *
+ * @param txn the txn
+ * @param icer the icer
+ */
+ public RetryCrossEntitySyncContainer(NetworkTransaction txn,
+ IndexableCrossEntityReference icer) {
+ this.txn = txn;
+ this.icer = icer;
+ }
+
+ public NetworkTransaction getNetworkTransaction() {
+ return txn;
+ }
+
+ public IndexableCrossEntityReference getIndexableCrossEntityReference() {
+ return icer;
+ }
+ }
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(CrossEntityReferenceSynchronizer.class);
+
+ private static final String SERVICE_INSTANCE = "service-instance";
+
+ private Deque<SelfLinkDescriptor> selflinks;
+ private Deque<RetryCrossEntitySyncContainer> retryQueue;
+ private Map<String, Integer> retryLimitTracker;
+ private boolean isAllWorkEnumerated;
+ protected ExecutorService esPutExecutor;
+ private CrossEntityReferenceLookup crossEntityReferenceLookup;
+ private OxmEntityLookup oxmEntityLookup;
+ private SearchableEntityLookup searchableEntityLookup;
+
+
+ /**
+ * Instantiates a new cross entity reference synchronizer.
+ *
+ * @param indexName the index name
+ * @throws Exception the exception
+ */
+ public CrossEntityReferenceSynchronizer(ElasticSearchSchemaConfig schemaConfig,
+ int internalSyncWorkers, int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig,
+ NetworkStatisticsConfig esStatConfig, CrossEntityReferenceLookup crossEntityReferenceLookup,
+ OxmEntityLookup oxmEntityLookup, SearchableEntityLookup searchableEntityLookup) throws Exception {
+ super(LOG, "CERS", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(),
+ aaiStatConfig, esStatConfig);
+ this.crossEntityReferenceLookup = crossEntityReferenceLookup;
+ this.oxmEntityLookup = oxmEntityLookup;
+ this.searchableEntityLookup = searchableEntityLookup;
+ this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>();
+ this.retryQueue = new ConcurrentLinkedDeque<RetryCrossEntitySyncContainer>();
+ this.retryLimitTracker = new ConcurrentHashMap<String, Integer>();
+ this.synchronizerName = "Cross Reference Entity Synchronizer";
+ this.isAllWorkEnumerated = false;
+ this.esPutExecutor = NodeUtils.createNamedExecutor("CERS-ES-PUT", 5, LOG);
+ this.aaiEntityStats.intializeEntityCounters(
+ crossEntityReferenceLookup.getCrossReferenceEntityDescriptors().keySet());
+
+ this.esEntityStats.intializeEntityCounters(
+ crossEntityReferenceLookup.getCrossReferenceEntityDescriptors().keySet());
+ this.syncDurationInMs = -1;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync()
+ */
+ @Override
+ public OperationState doSync() {
+ this.syncDurationInMs = -1;
+ String txnID = NodeUtils.getRandomTxnId();
+ MdcContext.initialize(txnID, "CrossEntitySynchronizer", "", "Sync", "");
+
+ resetCounters();
+ syncStartedTimeStampInMs = System.currentTimeMillis();
+ launchSyncFlow();
+ return OperationState.OK;
+ }
+
+ @Override
+ public SynchronizerState getState() {
+ if (!isSyncDone()) {
+ return SynchronizerState.PERFORMING_SYNCHRONIZATION;
+ }
+
+ return SynchronizerState.IDLE;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean)
+ */
+ @Override
+ public String getStatReport(boolean showFinalReport) {
+ syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs;
+ return getStatReport(syncDurationInMs, showFinalReport);
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown()
+ */
+ @Override
+ public void shutdown() {
+ this.shutdownExecutors();
+ }
+
+ @Override
+ protected boolean isSyncDone() {
+ int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get();
+
+ if (totalWorkOnHand > 0 || !isAllWorkEnumerated) {
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Launch sync flow.
+ *
+ * @return the operation state
+ */
+ private OperationState launchSyncFlow() {
+ final Map<String,String> contextMap = MDC.getCopyOfContextMap();
+ Map<String, CrossEntityReferenceDescriptor> descriptorMap =
+ crossEntityReferenceLookup.getCrossReferenceEntityDescriptors();
+
+ if (descriptorMap.isEmpty()) {
+ LOG.error(AaiUiMsgs.ERROR_LOADING_OXM);
+
+ return OperationState.ERROR;
+ }
+
+ Collection<String> syncTypes = descriptorMap.keySet();
+
+ try {
+
+ /*
+ * launch a parallel async thread to process the documents for each entity-type (to max the of
+ * the configured executor anyway)
+ */
+
+ aaiWorkOnHand.set(syncTypes.size());
+
+ for (String key : syncTypes) {
+
+ supplyAsync(new Supplier<Void>() {
+
+ @Override
+ public Void get() {
+ MDC.setContextMap(contextMap);
+ OperationResult typeLinksResult = null;
+ try {
+ typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key);
+ aaiWorkOnHand.decrementAndGet();
+ processEntityTypeSelfLinks(typeLinksResult);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "An error occurred processing entity selflinks. Error: " + exc.getMessage());
+ }
+
+ return null;
+ }
+
+ }, aaiExecutor).whenComplete((result, error) -> {
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage());
+ }
+ });
+ }
+
+ while (aaiWorkOnHand.get() != 0) {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED);
+ }
+
+ Thread.sleep(1000);
+ }
+
+ aaiWorkOnHand.set(selflinks.size());
+ isAllWorkEnumerated = true;
+ performSync();
+
+ while (!isSyncDone()) {
+ performRetrySync();
+ Thread.sleep(1000);
+ }
+
+ /*
+ * Make sure we don't hang on to retries that failed which could cause issues during future
+ * syncs
+ */
+ retryLimitTracker.clear();
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "An error occurred during entity synchronization. Error: " + exc.getMessage());
+
+ }
+
+ return OperationState.OK;
+ }
+
+ /**
+ * Perform sync.
+ */
+ private void performSync() {
+ while (selflinks.peek() != null) {
+
+ SelfLinkDescriptor linkDescriptor = selflinks.poll();
+ aaiWorkOnHand.decrementAndGet();
+
+ CrossEntityReferenceDescriptor descriptor = null;
+
+ if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) {
+
+ descriptor = crossEntityReferenceLookup.getCrossReferenceEntityDescriptors()
+ .get(linkDescriptor.getEntityType());
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType());
+ // go to next element in iterator
+ continue;
+ }
+
+ if (descriptor.hasCrossEntityReferences()) {
+
+ NetworkTransaction txn = new NetworkTransaction();
+ txn.setDescriptor(descriptor);
+ txn.setLink(linkDescriptor.getSelfLink());
+ txn.setQueryParameters(linkDescriptor.getDepthModifier());
+ txn.setOperationType(HttpMethod.GET);
+ txn.setEntityType(linkDescriptor.getEntityType());
+
+ aaiWorkOnHand.incrementAndGet();
+
+ supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor)
+ .whenComplete((result, error) -> {
+
+ aaiWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_GET, error.getLocalizedMessage());
+ } else {
+ if (result == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_CROSS_REF_SYNC);
+ } else {
+ updateActiveInventoryCounters(result);
+ fetchDocumentForUpsert(result);
+ }
+ }
+ });
+ }
+ }
+ }
+ }
+
+ /**
+ * Process entity type self links.
+ *
+ * @param operationResult the operation result
+ */
+ private void processEntityTypeSelfLinks(OperationResult operationResult) {
+
+ JsonNode rootNode = null;
+
+ final String jsonResult = operationResult.getResult();
+
+ if (jsonResult != null && jsonResult.length() > 0) {
+
+ try {
+ rootNode = mapper.readTree(jsonResult);
+ } catch (IOException exc) {
+ // TODO // TODO -> LOG, waht should be logged here?
+ }
+
+ JsonNode resultData = rootNode.get("result-data");
+ ArrayNode resultDataArrayNode = null;
+
+ if (resultData.isArray()) {
+ resultDataArrayNode = (ArrayNode) resultData;
+
+ Iterator<JsonNode> elementIterator = resultDataArrayNode.elements();
+ JsonNode element = null;
+
+ while (elementIterator.hasNext()) {
+ element = elementIterator.next();
+
+ final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type");
+ final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link");
+
+ CrossEntityReferenceDescriptor descriptor = null;
+
+ if (resourceType != null && resourceLink != null) {
+ descriptor = crossEntityReferenceLookup.getCrossReferenceEntityDescriptors().get(resourceType);
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType);
+ // go to next element in iterator
+ continue;
+ }
+ if (descriptor.hasCrossEntityReferences()) {
+ selflinks.add(new SelfLinkDescriptor(
+ resourceLink,SynchronizerConstants.DEPTH_ALL_MODIFIER, resourceType));
+ }
+ }
+ }
+ }
+ }
+ }
+
+
+
+ /**
+ * By providing the entity type and a json node for the entity, determine the
+ * primary key name(s) + primary key value(s) sufficient to build an entity query string
+ * of the following format:
+ *
+ * <entityType>.<primaryKeyNames>:<primaryKeyValues>
+ *
+ * @return - a composite string in the above format or null
+ */
+ private String determineEntityQueryString(String entityType, JsonNode entityJsonNode) {
+
+ OxmEntityDescriptor entityDescriptor =
+ oxmEntityLookup.getEntityDescriptors().get(entityType);
+
+ String queryString = null;
+
+ if ( entityDescriptor != null ) {
+
+ final List<String> primaryKeyNames = entityDescriptor.getPrimaryKeyAttributeNames();
+ final List<String> keyValues = new ArrayList<String>();
+ NodeUtils.extractFieldValuesFromObject(entityJsonNode, primaryKeyNames, keyValues);
+
+ queryString = entityType + "." + NodeUtils.concatArray(primaryKeyNames,"/") + ":" + NodeUtils.concatArray(keyValues);
+
+ }
+
+ return queryString;
+
+
+ }
+
+ /**
+ * Fetch document for upsert.
+ *
+ * @param txn the txn
+ */
+ private void fetchDocumentForUpsert(NetworkTransaction txn) {
+
+ if (!txn.getOperationResult().wasSuccessful()) {
+ LOG.error(AaiUiMsgs.SELF_LINK_GET, txn.getOperationResult().getResult());
+ return;
+ }
+
+ CrossEntityReferenceDescriptor cerDescriptor = crossEntityReferenceLookup
+ .getCrossReferenceEntityDescriptors().get(txn.getDescriptor().getEntityName());
+
+ if (cerDescriptor != null && cerDescriptor.hasCrossEntityReferences()) {
+
+ final String jsonResult = txn.getOperationResult().getResult();
+
+ if (jsonResult != null && jsonResult.length() > 0) {
+
+ /**
+ * Here's what we are going to do:
+ *
+ * <li>Extract primary key name and value from the parent type.
+ * <li>Extract the primary key and value from the nested child instance.
+ * <li>Build a generic query to discover the self-link for the nested-child-instance using
+ * parent and child.
+ * <li>Set the self-link on the child.
+ * <li>Generate the id that will allow the elastic-search upsert to work.
+ * <li>Rinse and repeat.
+ */
+
+ CrossEntityReference cerDefinition = cerDescriptor.getCrossEntityReference();
+
+ if (cerDefinition != null) {
+ JsonNode convertedNode = null;
+ try {
+ convertedNode = NodeUtils.convertJsonStrToJsonNode(txn.getOperationResult().getResult());
+
+ final String parentEntityQueryString = determineEntityQueryString(txn.getEntityType(), convertedNode);
+
+ List<String> extractedParentEntityAttributeValues = new ArrayList<String>();
+
+ NodeUtils.extractFieldValuesFromObject(convertedNode,
+ cerDefinition.getReferenceAttributes(),
+ extractedParentEntityAttributeValues);
+
+ List<JsonNode> nestedTargetEntityInstances = new ArrayList<JsonNode>();
+ NodeUtils.extractObjectsByKey(convertedNode, cerDefinition.getTargetEntityType(),
+ nestedTargetEntityInstances);
+
+ for (JsonNode targetEntityInstance : nestedTargetEntityInstances) {
+
+ if (cerDescriptor != null) {
+
+ String childEntityType = cerDefinition.getTargetEntityType();
+ OxmEntityDescriptor childDesciptor = oxmEntityLookup.getEntityDescriptors().get(childEntityType);
+
+ List<String> childPrimaryKeyNames = null;
+
+ if (childDesciptor != null) {
+ childPrimaryKeyNames = childDesciptor.getPrimaryKeyAttributeNames();
+ } else {
+ childPrimaryKeyNames = new ArrayList<String>();
+ }
+
+ List<String> childKeyValues = new ArrayList<String>();
+ NodeUtils.extractFieldValuesFromObject(targetEntityInstance, childPrimaryKeyNames, childKeyValues);
+
+ String childEntityQueryKeyString = childEntityType + "." + NodeUtils.concatArray(childPrimaryKeyNames,"/") + ":" + NodeUtils.concatArray(childKeyValues);
+
+ /**
+ * Build generic-query to query child instance self-link from AAI
+ */
+ List<String> orderedQueryKeyParams = new ArrayList<String>();
+
+ /**
+ * At present, there is an issue with resolving the self-link using the
+ * generic-query with nothing more than the service-instance identifier and the
+ * service-subscription. There is another level of detail we don't have access to
+ * unless we parse it out of the service-subscription self-link, which is a
+ * coupling I would like to avoid. Fortunately, there is a workaround, but only
+ * for service-instances, which is presently our only use-case for the
+ * cross-entity-reference in R1707. Going forwards hopefully there will be other
+ * ways to resolve a child self-link using parental embedded meta data that we
+ * don't currently have.
+ *
+ * The work-around with the service-instance entity-type is that it's possible to
+ * request the self-link using only the service-instance-id because of a
+ * historical AAI functional query requirement that it be possible to query a
+ * service-instance only by it's service-instance-id. This entity type is the only
+ * one in the system that can be queried this way which makes it a very limited
+ * workaround, but good enough for the current release.
+ */
+
+ if (SERVICE_INSTANCE.equals(childEntityType)) {
+ orderedQueryKeyParams.clear();
+ orderedQueryKeyParams.add(childEntityQueryKeyString);
+ } else {
+ orderedQueryKeyParams.add(parentEntityQueryString);
+ orderedQueryKeyParams.add(childEntityQueryKeyString);
+ }
+
+ String genericQueryStr = null;
+ try {
+ genericQueryStr = aaiAdapter.getGenericQueryForSelfLink(childEntityType, orderedQueryKeyParams);
+
+ if (genericQueryStr != null) {
+ aaiWorkOnHand.incrementAndGet();
+
+ OperationResult aaiQueryResult = aaiAdapter.queryActiveInventoryWithRetries(
+ genericQueryStr, "application/json",
+ aaiAdapter.getEndpointConfig().getNumRequestRetries());
+
+ aaiWorkOnHand.decrementAndGet();
+
+ if (aaiQueryResult!= null && aaiQueryResult.wasSuccessful()) {
+
+ Collection<JsonNode> entityLinks = new ArrayList<JsonNode>();
+ JsonNode genericQueryResult = null;
+ try {
+ genericQueryResult = NodeUtils.convertJsonStrToJsonNode(aaiQueryResult.getResult());
+
+ if ( genericQueryResult != null ) {
+
+ NodeUtils.extractObjectsByKey(genericQueryResult, "resource-link", entityLinks);
+
+ String selfLink = null;
+
+ if (entityLinks.size() != 1) {
+ /**
+ * an ambiguity exists where we can't reliably determine the self
+ * link, this should be a permanent error
+ */
+ LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_SELFLINK_AMBIGUITY, String.valueOf(entityLinks.size()));
+ } else {
+ selfLink = ((JsonNode) entityLinks.toArray()[0]).asText();
+
+
+ IndexableCrossEntityReference icer =
+ getPopulatedDocument(targetEntityInstance, cerDescriptor);
+
+ for (String parentCrossEntityReferenceAttributeValue : extractedParentEntityAttributeValues) {
+ icer.addCrossEntityReferenceValue(
+ parentCrossEntityReferenceAttributeValue);
+ }
+
+ icer.setLink(ActiveInventoryAdapter.extractResourcePath(selfLink));
+
+ icer.deriveFields();
+
+ String link = null;
+ try {
+ link = elasticSearchAdapter
+ .buildElasticSearchGetDocUrl(getIndexName(), icer.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY,
+ exc.getLocalizedMessage());
+ }
+
+ if (link != null) {
+ NetworkTransaction n2 = new NetworkTransaction();
+ n2.setLink(link);
+ n2.setEntityType(txn.getEntityType());
+ n2.setDescriptor(txn.getDescriptor());
+ n2.setOperationType(HttpMethod.GET);
+
+ esWorkOnHand.incrementAndGet();
+
+ supplyAsync(
+ new PerformElasticSearchRetrieval(n2, elasticSearchAdapter),
+ esExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED,
+ error.getLocalizedMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ performDocumentUpsert(result, icer);
+ }
+ });
+ }
+
+ }
+ } else {
+ LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DURING_AAI_RESPONSE_CONVERSION);
+ }
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, JsonNode.class.toString(), exc.getLocalizedMessage());
+ }
+
+ } else {
+ String message = "Entity sync failed because AAI query failed with error " + aaiQueryResult.getResult();
+ LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message);
+ }
+
+ } else {
+ String message = "Entity Sync failed because generic query str could not be determined.";
+ LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message);
+ }
+ } catch (Exception exc) {
+ String message = "Failed to sync entity because generation of generic query failed with error = " + exc.getMessage();
+ LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message);
+ }
+
+ }
+ }
+
+ } catch (IOException ioe) {
+ LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, ioe.getMessage());
+ }
+ }
+
+ }
+
+ } else {
+ LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DESCRIPTOR_NOT_FOUND, txn.getEntityType());
+ }
+ }
+
+ /**
+ * Perform document upsert.
+ *
+ * @param esGetResult the es get result
+ * @param icer the icer
+ */
+ protected void performDocumentUpsert(NetworkTransaction esGetResult,
+ IndexableCrossEntityReference icer) {
+ /**
+ * <p>
+ * <ul>
+ * As part of the response processing we need to do the following:
+ * <li>1. Extract the version (if present), it will be the ETAG when we use the
+ * Search-Abstraction-Service
+ * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version
+ * tag
+ * <li>a) if version is null or RC=404, then standard put, no _update with version tag
+ * <li>b) if version != null, do PUT with _update?version= (versionNumber) in the URI to elastic
+ * </ul>
+ * </p>
+ */
+ String link = null;
+ try {
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), icer.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage());
+ return;
+ }
+
+ boolean wasEntryDiscovered = false;
+ String versionNumber = null;
+ if (esGetResult.getOperationResult().getResultCode() == 404) {
+ LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, icer.getEntityPrimaryKeyValue());
+ } else if (esGetResult.getOperationResult().getResultCode() == 200) {
+ wasEntryDiscovered = true;
+ try {
+ versionNumber = NodeUtils.extractFieldValueFromObject(
+ NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()),
+ "_version");
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "version Number",
+ icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage());
+ return;
+ }
+ } else {
+ /*
+ * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we
+ * return.
+ */
+ LOG.info(AaiUiMsgs.ES_OPERATION_RETURN_CODE,
+ String.valueOf(esGetResult.getOperationResult().getResultCode()));
+ return;
+ }
+
+ try {
+ String jsonPayload = null;
+ if (wasEntryDiscovered) {
+ try {
+ ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>();
+ NodeUtils.extractObjectsByKey(
+ NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()),
+ "_source", sourceObject);
+
+ if (!sourceObject.isEmpty()) {
+ String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false);
+ MergableEntity me = mapper.readValue(responseSource, MergableEntity.class);
+ ObjectReader updater = mapper.readerForUpdating(me);
+ MergableEntity merged = updater.readValue(icer.getAsJson());
+ jsonPayload = mapper.writeValueAsString(merged);
+ }
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "source value",
+ icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage());
+ return;
+ }
+ } else {
+ jsonPayload = icer.getAsJson();
+ }
+
+ if (wasEntryDiscovered) {
+ if (versionNumber != null && jsonPayload != null) {
+
+ String requestPayload = elasticSearchAdapter.buildBulkImportOperationRequest(getIndexName(),
+ "default", icer.getId(), versionNumber, jsonPayload);
+
+ NetworkTransaction transactionTracker = new NetworkTransaction();
+ transactionTracker.setEntityType(esGetResult.getEntityType());
+ transactionTracker.setDescriptor(esGetResult.getDescriptor());
+ transactionTracker.setOperationType(HttpMethod.PUT);
+
+ esWorkOnHand.incrementAndGet();
+ supplyAsync(new PerformElasticSearchUpdate(elasticSearchAdapter.getBulkUrl(),
+ requestPayload, elasticSearchAdapter, transactionTracker), esPutExecutor)
+ .whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ processStoreDocumentResult(result, esGetResult, icer);
+ }
+ });
+ }
+
+ } else {
+ if (link != null && jsonPayload != null) {
+
+ NetworkTransaction updateElasticTxn = new NetworkTransaction();
+ updateElasticTxn.setLink(link);
+ updateElasticTxn.setEntityType(esGetResult.getEntityType());
+ updateElasticTxn.setDescriptor(esGetResult.getDescriptor());
+ updateElasticTxn.setOperationType(HttpMethod.PUT);
+
+ esWorkOnHand.incrementAndGet();
+ supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter),
+ esPutExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ processStoreDocumentResult(result, esGetResult, icer);
+ }
+ });
+ }
+ }
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, exc.getLocalizedMessage());
+ }
+ }
+
+ /**
+ * Process store document result.
+ *
+ * @param esPutResult the es put result
+ * @param esGetResult the es get result
+ * @param icer the icer
+ */
+ private void processStoreDocumentResult(NetworkTransaction esPutResult,
+ NetworkTransaction esGetResult, IndexableCrossEntityReference icer) {
+
+ OperationResult or = esPutResult.getOperationResult();
+
+ if (!or.wasSuccessful()) {
+ if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) {
+
+ if (shouldAllowRetry(icer.getId())) {
+
+ esWorkOnHand.incrementAndGet();
+
+ RetryCrossEntitySyncContainer rsc = new RetryCrossEntitySyncContainer(esGetResult, icer);
+ retryQueue.push(rsc);
+
+ LOG.warn(AaiUiMsgs.ES_CROSS_REF_SYNC_VERSION_CONFLICT);
+ }
+ } else {
+ LOG.error(AaiUiMsgs.ES_CROSS_REF_SYNC_FAILURE, String.valueOf(or.getResultCode()),
+ or.getResult());
+ }
+ }
+ }
+
+ /**
+ * Perform retry sync.
+ */
+ private void performRetrySync() {
+ while (retryQueue.peek() != null) {
+
+ RetryCrossEntitySyncContainer rsc = retryQueue.poll();
+ if (rsc != null) {
+
+ IndexableCrossEntityReference icer = rsc.getIndexableCrossEntityReference();
+ NetworkTransaction txn = rsc.getNetworkTransaction();
+
+ String link = null;
+ try {
+ // In this retry flow the icer object has already
+ // derived its fields
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), icer.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage());
+ }
+
+ if (link != null) {
+ NetworkTransaction retryTransaction = new NetworkTransaction();
+ retryTransaction.setLink(link);
+ retryTransaction.setEntityType(txn.getEntityType());
+ retryTransaction.setDescriptor(txn.getDescriptor());
+ retryTransaction.setOperationType(HttpMethod.GET);
+
+ /*
+ * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow and we did
+ * that for this request already when queuing the failed PUT!
+ */
+
+ supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter),
+ esExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ performDocumentUpsert(result, icer);
+ }
+ });
+ }
+
+ }
+ }
+ }
+
+ /**
+ * Should allow retry.
+ *
+ * @param id the id
+ * @return true, if successful
+ */
+ private boolean shouldAllowRetry(String id) {
+ boolean isRetryAllowed = true;
+ if (retryLimitTracker.get(id) != null) {
+ Integer currentCount = retryLimitTracker.get(id);
+ if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) {
+ isRetryAllowed = false;
+ LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_RESYNC_LIMIT, id);
+ } else {
+ Integer newCount = new Integer(currentCount.intValue() + 1);
+ retryLimitTracker.put(id, newCount);
+ }
+
+ } else {
+ Integer firstRetryCount = new Integer(1);
+ retryLimitTracker.put(id, firstRetryCount);
+ }
+
+ return isRetryAllowed;
+ }
+
+ /**
+ * Gets the populated document.
+ *
+ * @param entityNode the entity node
+ * @param resultDescriptor the result descriptor
+ * @return the populated document
+ * @throws JsonProcessingException the json processing exception
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ protected IndexableCrossEntityReference getPopulatedDocument(JsonNode entityNode,
+ OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException {
+
+ IndexableCrossEntityReference icer = new IndexableCrossEntityReference();
+
+ icer.setEntityType(resultDescriptor.getEntityName());
+
+ List<String> primaryKeyValues = new ArrayList<String>();
+ String pkeyValue = null;
+
+ for (String keyName : resultDescriptor.getPrimaryKeyAttributeNames()) {
+ pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName);
+ if (pkeyValue != null) {
+ primaryKeyValues.add(pkeyValue);
+ } else {
+ LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName());
+ }
+ }
+
+ final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/");
+ icer.setEntityPrimaryKeyValue(primaryCompositeKeyValue);
+
+ return icer;
+
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java
new file mode 100644
index 0000000..dded79f
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java
@@ -0,0 +1,404 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URLEncoder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.UriBuilder;
+
+import org.apache.http.client.utils.URIBuilder;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.restclient.client.RestClient;
+import org.onap.aai.restclient.enums.RestAuthenticationMode;
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.config.oxm.OxmModelLoader;
+import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException;
+import org.onap.aai.sparky.dal.rest.RestClientConstructionException;
+import org.onap.aai.sparky.dal.rest.RestClientFactory;
+import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.util.NodeUtils;
+
+/**
+ * The Class ActiveInventoryAdapter.
+ */
+
+public class ActiveInventoryAdapter {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(ActiveInventoryAdapter.class);
+
+ private static final String HEADER_TRANS_ID = "X-TransactionId";
+ private static final String HEADER_FROM_APP_ID = "X-FromAppId";
+ private static final String HEADER_AUTHORIZATION = "Authorization";
+
+ private static final String HTTP_SCHEME = "http";
+ private static final String HTTPS_SCHEME = "https";
+
+ private static final String TRANSACTION_ID_PREFIX = "txnId-";
+ private static final String UI_APP_NAME = "AAI-UI";
+
+ private OxmModelLoader oxmModelLoader;
+ private OxmEntityLookup oxmEntityLookup;
+ private RestEndpointConfig endpointConfig;
+
+ private RestClient restClient;
+
+ /**
+ * Instantiates a new active inventory adapter.
+ * @throws RestClientConstructionException
+ *
+ */
+
+ public ActiveInventoryAdapter(OxmModelLoader oxmModelLoader, OxmEntityLookup oxmEntityLookup,
+ RestEndpointConfig endpointConfig)
+ throws ElasticSearchOperationException, IOException, RestClientConstructionException {
+
+ this.oxmModelLoader = oxmModelLoader;
+ this.oxmEntityLookup = oxmEntityLookup;
+ this.endpointConfig = endpointConfig;
+ this.restClient = RestClientFactory.buildClient(endpointConfig);
+
+ }
+
+ protected Map<String, List<String>> getMessageHeaders() {
+
+ Map<String, List<String>> headers = new HashMap<String, List<String>>();
+
+ headers.putIfAbsent(HEADER_FROM_APP_ID, new ArrayList<String>());
+ headers.get(HEADER_FROM_APP_ID).add(UI_APP_NAME);
+
+ headers.putIfAbsent(HEADER_TRANS_ID, new ArrayList<String>());
+ headers.get(HEADER_TRANS_ID).add(TRANSACTION_ID_PREFIX + NodeUtils.getRandomTxnId());
+
+ if (endpointConfig.getRestAuthenticationMode() == RestAuthenticationMode.SSL_BASIC) {
+
+ headers.putIfAbsent(HEADER_AUTHORIZATION, new ArrayList<String>());
+ headers.get(HEADER_AUTHORIZATION).add(getBasicAuthenticationCredentials());
+
+ }
+
+ return headers;
+ }
+
+ protected String getBasicAuthenticationCredentials() {
+ String usernameAndPassword = String.join(":", endpointConfig.getBasicAuthUserName(),
+ endpointConfig.getBasicAuthPassword());
+ return "Basic " + java.util.Base64.getEncoder().encodeToString(usernameAndPassword.getBytes());
+ }
+
+ public OxmEntityLookup getOxmEntityLookup() {
+ return oxmEntityLookup;
+ }
+
+ public void setOxmEntityLookup(OxmEntityLookup oxmEntityLookup) {
+ this.oxmEntityLookup = oxmEntityLookup;
+ }
+
+ protected String getResourceBasePath() {
+
+ String versionStr = null;
+ if (oxmModelLoader != null) {
+ versionStr = String.valueOf(oxmModelLoader.getLatestVersionNum());
+ }
+
+ return "/aai/v" + versionStr;
+
+ }
+
+ public static String extractResourcePath(String selflink) {
+ try {
+ return new URI(selflink).getRawPath();
+ } catch (URISyntaxException uriSyntaxException) {
+ LOG.error(AaiUiMsgs.ERROR_EXTRACTING_RESOURCE_PATH_FROM_LINK,
+ uriSyntaxException.getMessage());
+ return selflink;
+ }
+ }
+
+
+ /**
+ * Gets the full url.
+ *
+ * @param resourceUrl the resource url
+ * @return the full url
+ * @throws Exception the exception
+ */
+ private String getFullUrl(String resourceUrl) throws Exception {
+ final String basePath = getResourceBasePath();
+ return String.format("https://%s:%s%s%s", endpointConfig.getEndpointIpAddress(),
+ endpointConfig.getEndpointServerPort(), basePath, resourceUrl);
+ }
+
+ public String getGenericQueryForSelfLink(String startNodeType, List<String> queryParams)
+ throws Exception {
+
+ URIBuilder urlBuilder = new URIBuilder(getFullUrl("/search/generic-query"));
+
+ for (String queryParam : queryParams) {
+ urlBuilder.addParameter("key", queryParam);
+ }
+
+ urlBuilder.addParameter("start-node-type", startNodeType);
+ urlBuilder.addParameter("include", startNodeType);
+
+ final String constructedLink = urlBuilder.toString();
+
+ return constructedLink;
+
+ }
+
+
+ public OperationResult getSelfLinksByEntityType(String entityType) throws Exception {
+
+ /*
+ * For this one, I want to dynamically construct the nodes-query for self-link discovery as a
+ * utility method that will use the OXM model entity data to drive the query as well.
+ */
+
+ if (entityType == null) {
+ throw new NullPointerException(
+ "Failed to getSelfLinksByEntityType() because entityType is null");
+ }
+
+ OxmEntityDescriptor entityDescriptor = oxmEntityLookup.getEntityDescriptors().get(entityType);
+
+ if (entityDescriptor == null) {
+ throw new NoSuchElementException("Failed to getSelfLinksByEntityType() because could"
+ + " not find entity descriptor from OXM with type = " + entityType);
+ }
+
+ String link = null;
+ final String primaryKeyStr =
+ NodeUtils.concatArray(entityDescriptor.getPrimaryKeyAttributeNames(), "/");
+
+ link = getFullUrl("/search/nodes-query?search-node-type=" + entityType + "&filter="
+ + primaryKeyStr + ":EXISTS");
+
+
+ return restClient.get(link, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE);
+
+ }
+
+ public OperationResult getSelfLinkForEntity(String entityType, String primaryKeyName,
+ String primaryKeyValue) throws Exception {
+
+ if (entityType == null) {
+ throw new NullPointerException("Failed to getSelfLinkForEntity() because entityType is null");
+ }
+
+ if (primaryKeyName == null) {
+ throw new NullPointerException(
+ "Failed to getSelfLinkForEntity() because primaryKeyName is null");
+ }
+
+ if (primaryKeyValue == null) {
+ throw new NullPointerException(
+ "Failed to getSelfLinkForEntity() because primaryKeyValue is null");
+ }
+
+ /*
+ * Try to protect ourselves from illegal URI formatting exceptions caused by characters that
+ * aren't natively supported in a URI, but can be escaped to make them legal.
+ */
+
+ String encodedEntityType = URLEncoder.encode(entityType, "UTF-8");
+ String encodedPrimaryKeyName = URLEncoder.encode(primaryKeyName, "UTF-8");
+ String encodedPrimaryKeyValue = URLEncoder.encode(primaryKeyValue, "UTF-8");
+
+ String link = null;
+
+ if ("service-instance".equals(entityType)) {
+
+ link = getFullUrl("/search/generic-query?key=" + encodedEntityType + "."
+ + encodedPrimaryKeyName + ":" + encodedPrimaryKeyValue + "&start-node-type="
+ + encodedEntityType + "&include=customer&depth=2");
+
+ } else {
+
+ link =
+ getFullUrl("/search/generic-query?key=" + encodedEntityType + "." + encodedPrimaryKeyName
+ + ":" + encodedPrimaryKeyValue + "&start-node-type=" + encodedEntityType);
+
+ }
+
+ return queryActiveInventoryWithRetries(link, "application/json",
+ endpointConfig.getNumRequestRetries());
+
+ }
+
+
+ /**
+ * Our retry conditions should be very specific.
+ *
+ * @param r the r
+ * @return true, if successful
+ */
+ private boolean shouldRetryRequest(OperationResult r) {
+
+ if (r == null) {
+ return true;
+ }
+
+ int rc = r.getResultCode();
+
+ if (rc == 200) {
+ return false;
+ }
+
+ if (rc == 404) {
+ return false;
+ }
+
+ return true;
+
+ }
+
+ /**
+ * Query active inventory.
+ *
+ * @param url the url
+ * @param acceptContentType the accept content type
+ * @return the operation result
+ */
+ // package protected for test classes instead of private
+ OperationResult queryActiveInventory(String url, String acceptContentType) {
+
+ return restClient.get(url, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE);
+
+ }
+
+ public RestEndpointConfig getEndpointConfig() {
+ return endpointConfig;
+ }
+
+ public void setEndpointConfig(RestEndpointConfig endpointConfig) {
+ this.endpointConfig = endpointConfig;
+ }
+
+ public OperationResult queryActiveInventoryWithRetries(String url, String responseType,
+ int numRetries) {
+
+ OperationResult result = null;
+
+ for (int retryCount = 0; retryCount < numRetries; retryCount++) {
+
+ LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_SEQ, url, String.valueOf(retryCount + 1));
+
+ result = queryActiveInventory(url, responseType);
+
+ /**
+ * Record number of times we have attempted the request to later summarize how many times we
+ * are generally retrying over thousands of messages in a sync.
+ *
+ * If the number of retries is surprisingly high, then we need to understand why that is as
+ * the number of retries is also causing a heavier load on AAI beyond the throttling controls
+ * we already have in place in term of the transaction rate controller and number of
+ * parallelized threads per task processor.
+ */
+
+ result.setNumRetries(retryCount);
+
+ if (!shouldRetryRequest(result)) {
+
+ result.setFromCache(false);
+ LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_DONE_SEQ, url, String.valueOf(retryCount + 1));
+
+ return result;
+ }
+
+ try {
+ /*
+ * Sleep between re-tries to be nice to the target system.
+ */
+ Thread.sleep(50);
+ } catch (InterruptedException exc) {
+ LOG.error(AaiUiMsgs.QUERY_AAI_WAIT_INTERRUPTION, exc.getLocalizedMessage());
+ break;
+ }
+ LOG.error(AaiUiMsgs.QUERY_AAI_RETRY_FAILURE_WITH_SEQ, url, String.valueOf(retryCount + 1));
+
+ }
+
+ LOG.info(AaiUiMsgs.QUERY_AAI_RETRY_MAXED_OUT, url);
+
+ return result;
+
+ }
+
+ public String repairSelfLink(String selfLink) {
+ return repairSelfLink(selfLink, null);
+ }
+
+ /**
+ * This method adds a scheme, host and port (if missing) to the passed-in URI.
+ * If these parts of the URI are already present, they will not be duplicated.
+ *
+ * @param selflink The URI to repair
+ * @param queryParams The query parameters as a single string
+ * @return The corrected URI (i.e. includes a scheme/host/port)
+ */
+ public String repairSelfLink(String selflink, String queryParams) {
+ if (selflink == null) {
+ return selflink;
+ }
+
+ UriBuilder builder = UriBuilder.fromPath(selflink).host(endpointConfig.getEndpointIpAddress())
+ .port(Integer.parseInt(endpointConfig.getEndpointServerPort()));
+
+ switch (endpointConfig.getRestAuthenticationMode()) {
+
+ case SSL_BASIC:
+ case SSL_CERT: {
+ builder.scheme(HTTPS_SCHEME);
+ break;
+ }
+
+ default: {
+ builder.scheme(HTTP_SCHEME);
+ }
+ }
+
+ boolean includeQueryParams = ( (null != queryParams) && (!"".equals(queryParams)) );
+
+ /* builder.build().toString() will encode special characters to hexadecimal pairs prefixed with a '%'
+ so we're adding the query parameters separately, in their UTF-8 representations, so that
+ characters such as '?', '&', etc. remain intact as needed by the synchronizer */
+ return (builder.build().toString() + (includeQueryParams ? queryParams : ""));
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java
new file mode 100644
index 0000000..3f5a273
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java
@@ -0,0 +1,157 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.restclient.client.RestClient;
+import org.onap.aai.sparky.dal.rest.RestClientConstructionException;
+import org.onap.aai.sparky.dal.rest.RestClientFactory;
+import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig;
+
+/**
+ * The Class ElasticSearchAdapter.
+
+ */
+public class ElasticSearchAdapter {
+
+ private static final String BULK_IMPORT_INDEX_TEMPLATE =
+ "{\"index\":{\"_index\":\"%s\",\"_type\":\"%s\",\"_id\":\"%s\", \"_version\":\"%s\"}}\n";
+
+ private static final String BULK_API = "_bulk";
+
+ private static final String DEFAULT_TYPE = "default";
+
+ private RestClient restClient;
+ private RestEndpointConfig endpointConfig;
+
+ /**
+ * Instantiates a new elastic search adapter.
+ * @throws RestClientConstructionException
+ */
+ public ElasticSearchAdapter(RestEndpointConfig endpointConfig) throws RestClientConstructionException {
+
+ this.restClient = RestClientFactory.buildClient(endpointConfig);
+ this.endpointConfig = endpointConfig;
+
+ }
+
+ protected Map<String, List<String>> getMessageHeaders() {
+ Map<String, List<String>> headers = new HashMap<String, List<String>>();
+ // insert mandatory headers if there are any
+ return headers;
+ }
+
+ public OperationResult doGet(String url, MediaType acceptContentType) {
+ return restClient.get(url, getMessageHeaders(), acceptContentType);
+ }
+
+ public OperationResult doDelete(String url, MediaType acceptContentType) {
+ return restClient.delete(url, getMessageHeaders(), acceptContentType);
+ }
+
+ public OperationResult doPost(String url, String jsonPayload, MediaType acceptContentType) {
+ return restClient.post(url, jsonPayload, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE,
+ acceptContentType);
+ }
+
+ public OperationResult doPut(String url, String jsonPayload, MediaType acceptContentType) {
+ return restClient.put(url, jsonPayload, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE,
+ acceptContentType);
+ }
+
+ public OperationResult doPatch(String url, String jsonPayload, MediaType acceptContentType) {
+
+ Map<String,List<String>> headers = getMessageHeaders();
+ headers.putIfAbsent("X-HTTP-Method-Override", new ArrayList<String>());
+ headers.get("X-HTTP-Method-Override").add("PATCH");
+
+ return restClient.post(url, jsonPayload, headers, MediaType.APPLICATION_JSON_TYPE, acceptContentType);
+ }
+
+ public OperationResult doHead(String url, MediaType acceptContentType) {
+ return restClient.head(url, getMessageHeaders(), acceptContentType);
+ }
+
+ public OperationResult doBulkOperation(String url, String payload) {
+ return restClient.put(url, payload, getMessageHeaders(),
+ MediaType.APPLICATION_FORM_URLENCODED_TYPE, MediaType.APPLICATION_JSON_TYPE);
+ }
+
+ public String buildBulkImportOperationRequest(String index, String type, String id,
+ String version, String payload) {
+
+ StringBuilder requestPayload = new StringBuilder(128);
+
+ requestPayload.append(String.format(BULK_IMPORT_INDEX_TEMPLATE, index, type, id, version));
+ requestPayload.append(payload).append("\n");
+
+ return requestPayload.toString();
+
+ }
+
+ public OperationResult retrieveEntityById(String host, String port, String indexName,
+ String docType, String resourceUrl) {
+ String esUrl =
+ String.format("http://%s:%s/%s/%s/%s", host, port, indexName, docType, resourceUrl);
+ return doGet(esUrl, MediaType.APPLICATION_JSON_TYPE);
+ }
+
+ public String buildElasticSearchUrlForApi(String indexName, String api) {
+ return String.format("http://%s:%s/%s/%s", endpointConfig.getEndpointIpAddress(),
+ endpointConfig.getEndpointServerPort(), indexName, api);
+ }
+
+ public String buildElasticSearchUrl(String indexName, String docType) {
+ return String.format("http://%s:%s/%s/%s", endpointConfig.getEndpointIpAddress(),
+ endpointConfig.getEndpointServerPort(), indexName, docType);
+ }
+
+ public String buildElasticSearchGetDocUrl(String indexName, String docType, String docId) {
+ return String.format("http://%s:%s/%s/%s/%s", endpointConfig.getEndpointIpAddress(),
+ endpointConfig.getEndpointServerPort(), indexName, docType, docId);
+ }
+
+ public String buildElasticSearchGetDocUrl(String indexName, String docId) {
+ return buildElasticSearchGetDocUrl(indexName, DEFAULT_TYPE, docId);
+ }
+
+ public String buildElasticSearchPostUrl(String indexName) {
+ return String.format("http://%s:%s/%s/%s", endpointConfig.getEndpointIpAddress(),
+ endpointConfig.getEndpointServerPort(), indexName, DEFAULT_TYPE);
+ }
+
+ public String getBulkUrl() {
+ return String.format("http://%s:%s/%s", endpointConfig.getEndpointIpAddress(),
+ endpointConfig.getEndpointServerPort(), BULK_API);
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/GizmoAdapter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/GizmoAdapter.java
new file mode 100644
index 0000000..4ceb0d6
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/GizmoAdapter.java
@@ -0,0 +1,336 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.UriBuilder;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.restclient.client.RestClient;
+import org.onap.aai.restclient.enums.RestAuthenticationMode;
+import org.onap.aai.sparky.config.oxm.OxmModelLoader;
+import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException;
+import org.onap.aai.sparky.dal.rest.RestClientConstructionException;
+import org.onap.aai.sparky.dal.rest.RestClientFactory;
+import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.util.NodeUtils;
+
+/**
+ * The Class GizmoAdapter.
+ */
+
+public class GizmoAdapter {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(GizmoAdapter.class);
+
+ private static final String HEADER_TRANS_ID = "X-TransactionId";
+ private static final String HEADER_FROM_APP_ID = "X-FromAppId";
+ private static final String HEADER_AUTHORIZATION = "Authorization";
+
+ private static final String HTTP_SCHEME = "http";
+ private static final String HTTPS_SCHEME = "https";
+
+ private static final String TRANSACTION_ID_PREFIX = "txnId-";
+ private static final String UI_APP_NAME = "AAI-UI";
+
+ private OxmModelLoader oxmModelLoader;
+
+ private RestEndpointConfig endpointConfig;
+
+ private RestClient restClient;
+
+ private String inventoryBasePath;
+ private String relationshipsBasePath;
+
+ /**
+ * Instantiates a new active inventory adapter.
+ *
+ * @throws RestClientConstructionException
+ *
+ */
+
+ public GizmoAdapter(OxmModelLoader oxmModelLoader, RestEndpointConfig endpointConfig)
+ throws ElasticSearchOperationException, IOException, RestClientConstructionException {
+
+ this.oxmModelLoader = oxmModelLoader;
+ this.endpointConfig = endpointConfig;
+ this.restClient = RestClientFactory.buildClient(endpointConfig);
+
+ }
+
+ public String getRelationshipsBasePath() {
+ return relationshipsBasePath;
+ }
+
+ public void setRelationshipsBasePath(String relationshipsBasePath) {
+ this.relationshipsBasePath = relationshipsBasePath;
+ }
+
+ public String getInventoryBasePath() {
+ return inventoryBasePath;
+ }
+
+ public void setInventoryBasePath(String inventoryBasePath) {
+ this.inventoryBasePath = inventoryBasePath;
+ }
+
+ public String getFullInventoryUrl(String resourceUrl) throws Exception {
+ final String host = endpointConfig.getEndpointIpAddress();
+ final String port = endpointConfig.getEndpointServerPort();
+ final String basePath = getInventoryBasePath();
+ return String.format("https://%s:%s%s%s", host, port, basePath, resourceUrl);
+ }
+
+ public String addServerDetailsToUrl(String resourceUrl) throws Exception {
+ final String host = endpointConfig.getEndpointIpAddress();
+ final String port = endpointConfig.getEndpointServerPort();
+ return String.format("https://%s:%s/%s", host, port, resourceUrl);
+ }
+
+ public String getFullRelationshipUrl(String resourceUrl) throws Exception {
+ final String host = endpointConfig.getEndpointIpAddress();
+ final String port = endpointConfig.getEndpointServerPort();
+ final String basePath = getRelationshipsBasePath();
+ return String.format("https://%s:%s%s%s", host, port, basePath, resourceUrl);
+ }
+
+ protected Map<String, List<String>> getMessageHeaders() {
+
+ Map<String, List<String>> headers = new HashMap<String, List<String>>();
+
+ headers.putIfAbsent(HEADER_FROM_APP_ID, new ArrayList<String>());
+ headers.get(HEADER_FROM_APP_ID).add(UI_APP_NAME);
+
+ headers.putIfAbsent(HEADER_TRANS_ID, new ArrayList<String>());
+ headers.get(HEADER_TRANS_ID).add(TRANSACTION_ID_PREFIX + NodeUtils.getRandomTxnId());
+
+ if (endpointConfig.getRestAuthenticationMode() == RestAuthenticationMode.SSL_BASIC) {
+
+ headers.putIfAbsent(HEADER_AUTHORIZATION, new ArrayList<String>());
+ headers.get(HEADER_AUTHORIZATION).add(getBasicAuthenticationCredentials());
+
+ }
+
+ return headers;
+ }
+
+ protected String getBasicAuthenticationCredentials() {
+ String usernameAndPassword = String.join(":", endpointConfig.getBasicAuthUserName(),
+ endpointConfig.getBasicAuthPassword());
+ return "Basic " + java.util.Base64.getEncoder().encodeToString(usernameAndPassword.getBytes());
+ }
+
+ /**
+ * Our retry conditions should be very specific.
+ *
+ * @param r
+ * the r
+ * @return true, if successful
+ */
+ private boolean shouldRetryRequest(OperationResult r) {
+
+ if (r == null) {
+ return true;
+ }
+
+ int rc = r.getResultCode();
+
+ if (rc == 200) {
+ return false;
+ }
+
+ if (rc == 404) {
+ return false;
+ }
+
+ return true;
+
+ }
+
+ /**
+ * Query active inventory.
+ *
+ * @param url
+ * the url
+ * @param acceptContentType
+ * the accept content type
+ * @return the operation result
+ */
+ OperationResult queryGizmo(String url, String acceptContentType) {
+
+ return restClient.get(url, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE);
+
+ }
+
+ public RestEndpointConfig getEndpointConfig() {
+ return endpointConfig;
+ }
+
+ public void setEndpointConfig(RestEndpointConfig endpointConfig) {
+ this.endpointConfig = endpointConfig;
+ }
+
+ public OperationResult queryGizmoWithRetries(String url, String responseType, int numRetries) {
+
+ OperationResult result = null;
+
+ for (int retryCount = 0; retryCount < numRetries; retryCount++) {
+
+ LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_SEQ, url, String.valueOf(retryCount + 1));
+
+ result = queryGizmo(url, responseType);
+
+ /**
+ * Record number of times we have attempted the request to later
+ * summarize how many times we are generally retrying over thousands
+ * of messages in a sync.
+ *
+ * If the number of retries is surprisingly high, then we need to
+ * understand why that is as the number of retries is also causing a
+ * heavier load on AAI beyond the throttling controls we already
+ * have in place in term of the transaction rate controller and
+ * number of parallelized threads per task processor.
+ */
+
+ result.setNumRetries(retryCount);
+
+ if (!shouldRetryRequest(result)) {
+
+ result.setFromCache(false);
+ LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_DONE_SEQ, url, String.valueOf(retryCount + 1));
+
+ return result;
+ }
+
+ try {
+ /*
+ * Sleep between re-tries to be nice to the target system.
+ */
+ Thread.sleep(50);
+ } catch (InterruptedException exc) {
+ LOG.error(AaiUiMsgs.QUERY_AAI_WAIT_INTERRUPTION, exc.getLocalizedMessage());
+ break;
+ }
+ LOG.error(AaiUiMsgs.QUERY_AAI_RETRY_FAILURE_WITH_SEQ, url, String.valueOf(retryCount + 1));
+
+ }
+
+ LOG.info(AaiUiMsgs.QUERY_AAI_RETRY_MAXED_OUT, url);
+
+ return result;
+
+ }
+
+ /**
+ * This method adds a scheme, host and port (if missing) to the passed-in
+ * URI. If these parts of the URI are already present, they will not be
+ * duplicated.
+ *
+ * @param selflink
+ * The URI to repair
+ * @param queryParams
+ * The query parameters as a single string
+ * @return The corrected URI (i.e. includes a scheme/host/port)
+ */
+
+ private String repairGizmoSelfLink(String baseUrlPath, String selfLink, String queryParams) {
+
+ if (selfLink == null) {
+ return selfLink;
+ }
+
+ if (selfLink.startsWith("http") || selfLink.startsWith("https")) {
+ return selfLink;
+ }
+
+ UriBuilder builder = UriBuilder.fromPath(baseUrlPath + "/" + selfLink)
+ .host(endpointConfig.getEndpointIpAddress())
+ .port(Integer.parseInt(endpointConfig.getEndpointServerPort()));
+
+ switch (endpointConfig.getRestAuthenticationMode()) {
+
+ case SSL_BASIC:
+ case SSL_CERT: {
+ builder.scheme(HTTPS_SCHEME);
+ break;
+ }
+
+ default: {
+ builder.scheme(HTTP_SCHEME);
+ }
+ }
+
+ boolean includeQueryParams = ((null != queryParams) && (!"".equals(queryParams)));
+
+ /*
+ * builder.build().toString() will encode special characters to hexadecimal pairs prefixed with
+ * a '%' so we're adding the query parameters separately, in their UTF-8 representations, so
+ * that characters such as '?', '&', etc. remain intact as needed by the synchronizer
+ */
+ return (builder.build().toString() + (includeQueryParams ? queryParams : ""));
+
+ }
+
+ public String repairRelationshipSelfLink(String selflink, String queryParams) {
+ return repairGizmoSelfLink(relationshipsBasePath, selflink, queryParams);
+ }
+
+ public String repairInventorySelfLink(String selflink, String queryParams) {
+ return repairGizmoSelfLink(inventoryBasePath, selflink, queryParams);
+ }
+
+ public OperationResult getSelfLinksByEntityType(String entityType) throws Exception {
+
+ if (entityType == null) {
+ throw new NullPointerException("Failed to getSelfLinksByEntityType() because entityType is null");
+ }
+
+ String link = getFullInventoryUrl(entityType);
+
+ return queryGizmoWithRetries(link, "application/json", endpointConfig.getNumRequestRetries());
+
+ }
+
+ public static String extractResourcePath(String selflink) {
+ try {
+ return new URI(selflink).getRawPath();
+ } catch (URISyntaxException uriSyntaxException) {
+ LOG.error(AaiUiMsgs.ERROR_EXTRACTING_RESOURCE_PATH_FROM_LINK, uriSyntaxException.getMessage());
+ return selflink;
+ }
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java
new file mode 100644
index 0000000..0fc4a4e
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java
@@ -0,0 +1,159 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal;
+
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.dal.rest.HttpMethod;
+
+
+/**
+ * The Class NetworkTransaction.
+ */
+public class NetworkTransaction {
+
+ private OperationResult operationResult;
+
+ private String entityType;
+
+ private String link;
+
+ private String queryParameters;
+
+ private HttpMethod operationType;
+
+ private OxmEntityDescriptor descriptor;
+
+ private long createdTimeStampInMs;
+
+ private long opTimeInMs;
+
+ private long taskAgeInMs;
+
+ /**
+ * Instantiates a new network transaction.
+ */
+ public NetworkTransaction() {
+ this.createdTimeStampInMs = System.currentTimeMillis();
+ this.opTimeInMs = 0L;
+ }
+
+ /**
+ * Instantiates a new network transaction.
+ *
+ * @param method the method
+ * @param entityType the entity type
+ * @param or the or
+ */
+ public NetworkTransaction(HttpMethod method, String entityType, OperationResult or) {
+ this();
+ this.operationType = method;
+ this.entityType = entityType;
+ this.operationResult = or;
+ this.opTimeInMs = 0L;
+ }
+
+ public HttpMethod getOperationType() {
+ return operationType;
+ }
+
+ public long getTaskAgeInMs() {
+ return taskAgeInMs;
+ }
+
+ /**
+ * Sets the task age in ms.
+ */
+ public void setTaskAgeInMs() {
+ this.taskAgeInMs = (System.currentTimeMillis() - createdTimeStampInMs);
+ }
+
+ public void setOperationType(HttpMethod operationType) {
+ this.operationType = operationType;
+ }
+
+ public OperationResult getOperationResult() {
+ return operationResult;
+ }
+
+ public void setOperationResult(OperationResult operationResult) {
+ this.operationResult = operationResult;
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public String getLink() {
+ return link;
+ }
+
+ public void setLink(String link) {
+ this.link = link;
+ }
+
+ public String getQueryParameters() {
+ return queryParameters;
+ }
+
+ public void setQueryParameters(String queryParameters) {
+ this.queryParameters = queryParameters;
+ }
+
+ public long getOpTimeInMs() {
+ return opTimeInMs;
+ }
+
+ public void setOpTimeInMs(long opTimeInMs) {
+ this.opTimeInMs = opTimeInMs;
+ }
+
+ public OxmEntityDescriptor getDescriptor() {
+ return descriptor;
+ }
+
+ public void setDescriptor(OxmEntityDescriptor descriptor) {
+ this.descriptor = descriptor;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "NetworkTransaction [operationResult=" + operationResult.toString() + ", entityType="
+ + entityType + ", link=" + link + ", operationType=" + operationType + ", descriptor="
+ + descriptor.toString() + ", createdTimeStampInMs=" + createdTimeStampInMs
+ + ", taskAgeInMs=" + taskAgeInMs + "]";
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java
new file mode 100644
index 0000000..5ec7318
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java
@@ -0,0 +1,285 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal.aai;
+
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+
+
+/**
+ * The Class ActiveInventoryEntityStatistics.
+ */
+public class ActiveInventoryEntityStatistics {
+
+ private static final String TOTAL = "Total";
+
+ private static final String FOUND = "Found";
+
+ private static final String NO_PAYLOAD = "NoPayload";
+
+ private static final String NOT_FOUND = "NotFound";
+
+ private static final String NUM_RETRIES = "NumRetries";
+
+ private static final String ERROR = "Error";
+
+ private Map<String, HashMap<String, AtomicInteger>> activeInventoryEntityStatistics;
+
+ /**
+ * Creates the entity op stats.
+ *
+ * @return the hash map
+ */
+ private HashMap<String, AtomicInteger> createEntityOpStats() {
+
+ HashMap<String, AtomicInteger> opStats = new HashMap<String, AtomicInteger>();
+
+ opStats.put(TOTAL, new AtomicInteger());
+ opStats.put(FOUND, new AtomicInteger());
+ opStats.put(NO_PAYLOAD, new AtomicInteger());
+ opStats.put(NOT_FOUND, new AtomicInteger());
+ opStats.put(NUM_RETRIES, new AtomicInteger());
+ opStats.put(ERROR, new AtomicInteger());
+
+ return opStats;
+
+ }
+
+ /**
+ * Initializecreate active inventory entity statistics.
+ */
+ private void initializecreateActiveInventoryEntityStatistics() {
+ Set<String> keys = activeInventoryEntityStatistics.keySet();
+
+ Set<String> opStatKeySet = null;
+ Map<String, AtomicInteger> opStats = null;
+
+ for (String k : keys) {
+
+ opStats = activeInventoryEntityStatistics.get(k);
+
+ opStatKeySet = opStats.keySet();
+
+ for (String opStatKey : opStatKeySet) {
+ opStats.get(opStatKey).set(0);
+ }
+ }
+ }
+
+ /**
+ * Instantiates a new active inventory entity statistics.
+ *
+ * @param loader the loader
+ */
+ public ActiveInventoryEntityStatistics() {
+ activeInventoryEntityStatistics = new HashMap<String, HashMap<String, AtomicInteger>>();
+ reset();
+ }
+
+ /**
+ * Initialize counters from oxm entity descriptors.
+ *
+ * @param descriptors the descriptors
+ */
+ public void intializeEntityCounters(
+ String... entityTypes) {
+
+ if (entityTypes != null && entityTypes.length > 0) {
+ for (String entityType : entityTypes) {
+ activeInventoryEntityStatistics.put(entityType, createEntityOpStats());
+ }
+
+ }
+
+ }
+
+ public void intializeEntityCounters(
+ Set<String> entityTypes) {
+
+ if (entityTypes != null && entityTypes.size() > 0) {
+ for (String entityType : entityTypes) {
+ activeInventoryEntityStatistics.put(entityType, createEntityOpStats());
+ }
+ }
+
+ }
+
+
+
+ /**
+ * Reset.
+ */
+ public void reset() {
+ initializecreateActiveInventoryEntityStatistics();
+ }
+
+ /**
+ * Gets the result code.
+ *
+ * @param txn the txn
+ * @return the result code
+ */
+ private int getResultCode(NetworkTransaction txn) {
+
+
+ if (txn == null) {
+ return -1;
+ }
+
+ OperationResult or = txn.getOperationResult();
+
+ if (or == null) {
+ return -1;
+ }
+
+ return or.getResultCode();
+
+ }
+
+ /**
+ * Update active inventory entity counters.
+ *
+ * @param txn the txn
+ */
+ private void updateActiveInventoryEntityCounters(NetworkTransaction txn) {
+
+ if (txn == null) {
+ return;
+ }
+
+ Map<String, AtomicInteger> opStats = activeInventoryEntityStatistics.get(txn.getEntityType());
+
+ int rc = getResultCode(txn);
+
+ switch (txn.getOperationType()) {
+
+ case GET: {
+
+ opStats.get(TOTAL).incrementAndGet();
+
+ if (200 <= rc && rc <= 299) {
+ opStats.get(FOUND).incrementAndGet();
+ } else if (rc == 404) {
+ opStats.get(NOT_FOUND).incrementAndGet();
+ } else {
+ opStats.get(ERROR).incrementAndGet();
+ }
+
+ break;
+ }
+
+ default: {
+ // nothing else for now
+ }
+
+ }
+
+ OperationResult or = txn.getOperationResult();
+
+ if (or != null && or.wasSuccessful()) {
+
+ if (or.getResult() == null || or.getResult().length() == 0) {
+ opStats.get(NO_PAYLOAD).incrementAndGet();
+ }
+
+ if (or.getNumRetries() > 0) {
+ opStats.get(NUM_RETRIES).addAndGet(or.getNumRetries());
+ }
+
+ }
+
+
+ }
+
+ /**
+ * Update counters.
+ *
+ * @param txn the txn
+ */
+ public void updateCounters(NetworkTransaction txn) {
+
+ updateActiveInventoryEntityCounters(txn);
+
+ }
+
+ public String getStatisticsReport() {
+
+ StringBuilder sb = new StringBuilder(128);
+
+ /*
+ * sort entities, then sort nested op codes
+ */
+
+ TreeMap<String, HashMap<String, AtomicInteger>> activeInventoryEntitySortedTreeMap =
+ new TreeMap<String, HashMap<String, AtomicInteger>>(new Comparator<String>() {
+
+ @Override
+ public int compare(String o1, String o2) {
+ return o1.toLowerCase().compareTo(o2.toLowerCase());
+ }
+ });
+
+ activeInventoryEntitySortedTreeMap.putAll(activeInventoryEntityStatistics);
+
+ for (String counterEntityKey : activeInventoryEntitySortedTreeMap.keySet()) {
+
+ HashMap<String, AtomicInteger> entityCounters =
+ activeInventoryEntitySortedTreeMap.get(counterEntityKey);
+
+ AtomicInteger total = entityCounters.get(TOTAL);
+ AtomicInteger found = entityCounters.get(FOUND);
+ AtomicInteger noPayload = entityCounters.get(NO_PAYLOAD);
+ AtomicInteger notFound = entityCounters.get(NOT_FOUND);
+ AtomicInteger numRetries = entityCounters.get(NUM_RETRIES);
+ AtomicInteger error = entityCounters.get(ERROR);
+
+ int totalValue = (total == null) ? 0 : total.get();
+ int foundValue = (found == null) ? 0 : found.get();
+ int noPayloadValue = (noPayload == null) ? 0 : noPayload.get();
+ int notFoundValue = (notFound == null) ? 0 : notFound.get();
+ int numRetriesValue = (numRetries == null) ? 0 : numRetries.get();
+ int errorValue = (error == null) ? 0 : error.get();
+
+ sb.append("\n ")
+ .append(String.format(
+ "%-30s TOTAL: %-12d FOUND: %-12d NO_PAYLOAD:"
+ + " %-12d NOT_FOUND: %-12d NUM_RETRIES: %-12d ERROR: %-12d",
+ counterEntityKey, totalValue, foundValue, noPayloadValue, notFoundValue,
+ numRetriesValue, errorValue));
+ }
+
+ return sb.toString();
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java
new file mode 100644
index 0000000..b05b12c
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java
@@ -0,0 +1,139 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal.aai;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.analytics.AbstractStatistics;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+
+/**
+ * The Class ActiveInventoryProcessingExceptionStatistics.
+ */
+public class ActiveInventoryProcessingExceptionStatistics extends AbstractStatistics {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(ActiveInventoryAdapter.class);
+
+ private static final String NATIVE_SOCKET_CONNECT_EXCEPTION = "NativeSocketConnectException";
+ private static final String NATIVE_SOCKET_CONNECTION_RESET = "NativeSocketConnectionReset";
+ private static final String NATIVE_SOCKET_CONNECTION_REFUSED = "NativeSocketConnectionRefused";
+ private static final String CLIENT_TIMEOUT_EXCEPTION = "JerseyClientTimoutException";
+ private static final String UNKNOWN_EXCEPTION = "UnknownException";
+
+ /**
+ * Creates the counters.
+ */
+ private void createCounters() {
+ addCounter(NATIVE_SOCKET_CONNECT_EXCEPTION);
+ addCounter(NATIVE_SOCKET_CONNECTION_RESET);
+ addCounter(NATIVE_SOCKET_CONNECTION_REFUSED);
+ addCounter(CLIENT_TIMEOUT_EXCEPTION);
+ addCounter(UNKNOWN_EXCEPTION);
+ }
+
+ /**
+ * Instantiates a new active inventory processing exception statistics.
+ */
+ public ActiveInventoryProcessingExceptionStatistics() {
+ createCounters();
+ reset();
+ }
+
+ /**
+ * Update counters.
+ *
+ * @param txn the txn
+ */
+ public void updateCounters(NetworkTransaction txn) {
+
+ if (txn == null) {
+ return;
+ }
+
+ OperationResult or = txn.getOperationResult();
+
+ if (or != null && !or.wasSuccessful()) {
+
+ if (or.getResultCode() != 404) {
+
+ String result = or.getResult();
+
+ if (result != null) {
+
+ /*
+ * Try to classify exceptions and peg counters
+ */
+
+ if (result.contains("java.net.SocketTimeoutException: connect timed out")) {
+ pegCounter(CLIENT_TIMEOUT_EXCEPTION);
+ } else if (result.contains("java.net.ConnectException: Connection timed out: connect")) {
+ pegCounter(NATIVE_SOCKET_CONNECT_EXCEPTION);
+ } else if (result.contains("java.net.ConnectException: Connection refused: connect")) {
+ pegCounter(NATIVE_SOCKET_CONNECTION_REFUSED);
+ } else if (result.contains("java.net.SocketException: Connection reset")) {
+ pegCounter(NATIVE_SOCKET_CONNECTION_RESET);
+ } else {
+ pegCounter(UNKNOWN_EXCEPTION);
+ LOG.error(AaiUiMsgs.PEGGING_ERROR, result.toString());
+ }
+
+ }
+ }
+
+ }
+
+ }
+
+ public String getStatisticsReport() {
+
+ StringBuilder sb = new StringBuilder(128);
+
+ int nativeConnect = getCounterValue(NATIVE_SOCKET_CONNECT_EXCEPTION);
+ int nativeCxnReset = getCounterValue(NATIVE_SOCKET_CONNECTION_RESET);
+ int nativeCxnRefused = getCounterValue(NATIVE_SOCKET_CONNECTION_REFUSED);
+ int clientTimeout = getCounterValue(CLIENT_TIMEOUT_EXCEPTION);
+ int unknown = getCounterValue(UNKNOWN_EXCEPTION);
+
+ sb.append("\n ")
+ .append(String.format("%-40s: %-12d", NATIVE_SOCKET_CONNECT_EXCEPTION, nativeConnect));
+ sb.append("\n ")
+ .append(String.format("%-40s: %-12d", NATIVE_SOCKET_CONNECTION_RESET, nativeCxnReset));
+ sb.append("\n ")
+ .append(String.format("%-40s: %-12d", NATIVE_SOCKET_CONNECTION_REFUSED, nativeCxnRefused));
+ sb.append("\n ")
+ .append(String.format("%-40s: %-12d", CLIENT_TIMEOUT_EXCEPTION, clientTimeout));
+ sb.append("\n ").append(String.format("%-40s: %-12d", UNKNOWN_EXCEPTION, unknown));
+
+ return sb.toString();
+
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java
new file mode 100644
index 0000000..0d46c2a
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java
@@ -0,0 +1,265 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal.elasticsearch;
+
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.dal.rest.HttpMethod;
+
+
+/**
+ * The Class ElasticSearchEntityStatistics.
+ */
+public class ElasticSearchEntityStatistics {
+
+ private static final String TOTAL = "Total";
+ private static final String CREATED = "Created";
+ private static final String MODIFIED = "Modified";
+ private static final String OTHERSUCCESS = "OTHERSUCCESS";
+ private static final String DELETED = "DELETED";
+ private static final String ERROR = "ERROR";
+
+ private Map<String, HashMap<String, AtomicInteger>> entityStatistics;
+
+ /**
+ * Creates the entity op stats.
+ *
+ * @return the hash map
+ */
+ private HashMap<String, AtomicInteger> createEntityOpStats() {
+
+ HashMap<String, AtomicInteger> opStats = new HashMap<String, AtomicInteger>();
+
+ opStats.put(TOTAL, new AtomicInteger());
+ opStats.put(CREATED, new AtomicInteger());
+ opStats.put(MODIFIED, new AtomicInteger());
+ opStats.put(OTHERSUCCESS, new AtomicInteger());
+ opStats.put(DELETED, new AtomicInteger());
+ opStats.put(ERROR, new AtomicInteger());
+
+ return opStats;
+
+ }
+
+ /**
+ * Initializecreate active inventory entity statistics.
+ */
+ private void initializecreateActiveInventoryEntityStatistics() {
+ Set<String> keys = entityStatistics.keySet();
+
+ Set<String> opStatKeySet = null;
+ Map<String, AtomicInteger> opStats = null;
+
+ for (String k : keys) {
+
+ opStats = entityStatistics.get(k);
+
+ opStatKeySet = opStats.keySet();
+
+ for (String opStatKey : opStatKeySet) {
+ opStats.get(opStatKey).set(0);
+ }
+ }
+ }
+
+ /**
+ * Instantiates a new elastic search entity statistics.
+ *
+ * @param loader the loader
+ */
+ public ElasticSearchEntityStatistics() {
+ entityStatistics = new HashMap<String, HashMap<String, AtomicInteger>>();
+ reset();
+ }
+
+ /**
+ * Initialize counters from oxm entity descriptors.
+ *
+ * @param descriptors the descriptors
+ */
+ public void intializeEntityCounters(
+ String... entityTypes) {
+
+ if (entityTypes != null && entityTypes.length > 0) {
+ for (String entityType : entityTypes) {
+ entityStatistics.put(entityType, createEntityOpStats());
+ }
+
+ }
+
+ }
+
+ public void intializeEntityCounters(
+ Set<String> entityTypes) {
+
+ if (entityTypes != null && entityTypes.size() > 0) {
+ for (String entityType : entityTypes) {
+ entityStatistics.put(entityType, createEntityOpStats());
+ }
+ }
+
+ }
+
+ /**
+ * Reset.
+ */
+ public void reset() {
+ initializecreateActiveInventoryEntityStatistics();
+ }
+
+ /**
+ * Gets the result code.
+ *
+ * @param txn the txn
+ * @return the result code
+ */
+ private int getResultCode(NetworkTransaction txn) {
+
+
+ if (txn == null) {
+ return -1;
+ }
+
+ OperationResult or = txn.getOperationResult();
+
+ if (or == null) {
+ return -1;
+ }
+
+ return or.getResultCode();
+
+ }
+
+ /**
+ * Update elastic search entity counters.
+ *
+ * @param txn the txn
+ */
+ private void updateElasticSearchEntityCounters(NetworkTransaction txn) {
+
+ if (txn == null) {
+ return;
+ }
+
+ Map<String, AtomicInteger> entityOpStats = entityStatistics.get(txn.getEntityType());
+
+ int resultCode = getResultCode(txn);
+
+ if (txn.getOperationType() == HttpMethod.PUT) {
+
+ entityOpStats.get(TOTAL).incrementAndGet();
+
+ if (resultCode == 201) {
+ entityOpStats.get(CREATED).incrementAndGet();
+ } else if (resultCode == 200) {
+ entityOpStats.get(MODIFIED).incrementAndGet();
+ } else if (202 <= resultCode && resultCode <= 299) {
+ entityOpStats.get(OTHERSUCCESS).incrementAndGet();
+ } else {
+ entityOpStats.get(ERROR).incrementAndGet();
+ }
+
+ } else if (txn.getOperationType() == HttpMethod.DELETE) {
+
+ entityOpStats.get(TOTAL).incrementAndGet();
+
+ if (200 <= resultCode && resultCode <= 299) {
+ entityOpStats.get(DELETED).incrementAndGet();
+ } else {
+ entityOpStats.get(ERROR).incrementAndGet();
+ }
+ }
+
+ }
+
+ /**
+ * Update counters.
+ *
+ * @param txn the txn
+ */
+ public void updateCounters(NetworkTransaction txn) {
+
+ updateElasticSearchEntityCounters(txn);
+
+ }
+
+ public String getStatisticsReport() {
+
+ StringBuilder sb = new StringBuilder(128);
+
+ /*
+ * sort entities, then sort nested op codes
+ */
+
+ TreeMap<String, HashMap<String, AtomicInteger>> elasticEntitySortedTreeMap =
+ new TreeMap<String, HashMap<String, AtomicInteger>>(new Comparator<String>() {
+
+ @Override
+ public int compare(String o1, String o2) {
+ return o1.toLowerCase().compareTo(o2.toLowerCase());
+ }
+ });
+
+ elasticEntitySortedTreeMap.putAll(entityStatistics);
+
+ for (String counterEntityKey : elasticEntitySortedTreeMap.keySet()) {
+
+ HashMap<String, AtomicInteger> entityCounters =
+ elasticEntitySortedTreeMap.get(counterEntityKey);
+
+ AtomicInteger total = entityCounters.get(TOTAL);
+ AtomicInteger created = entityCounters.get(CREATED);
+ AtomicInteger modified = entityCounters.get(MODIFIED);
+ AtomicInteger otherSuccess = entityCounters.get(OTHERSUCCESS);
+ AtomicInteger deleted = entityCounters.get(DELETED);
+ AtomicInteger error = entityCounters.get(ERROR);
+
+ int totalValue = (total == null) ? 0 : total.get();
+ int createdValue = (created == null) ? 0 : created.get();
+ int modifiedValue = (modified == null) ? 0 : modified.get();
+ int otherSuccessValue = (otherSuccess == null) ? 0 : otherSuccess.get();
+ int deletedValue = (deleted == null) ? 0 : deleted.get();
+ int errorValue = (error == null) ? 0 : error.get();
+
+ sb.append("\n ")
+ .append(String.format(
+ "%-30s TOTAL: %-12d CREATED: %-12d MODIFIED:"
+ + " %-12d OTHER_2XX: %-12d DELETED: %-12d ERROR: %-12d",
+ counterEntityKey, totalValue, createdValue, modifiedValue, otherSuccessValue,
+ deletedValue, errorValue));
+ }
+ return sb.toString();
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/exception/ElasticSearchOperationException.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/exception/ElasticSearchOperationException.java
new file mode 100644
index 0000000..5ad7fd0
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/exception/ElasticSearchOperationException.java
@@ -0,0 +1,53 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal.exception;
+
+/**
+ * The Class ElasticSearchOperationException.
+ */
+public class ElasticSearchOperationException extends Exception {
+
+ private static final long serialVersionUID = -7689309913743200670L;
+
+ /**
+ * Instantiates a new elastic search operation exception.
+ *
+ * @param message the message
+ * @param exc the exc
+ */
+ public ElasticSearchOperationException(String message, Exception exc) {
+ super(message, exc);
+ }
+
+ /**
+ * Instantiates a new elastic search operation exception.
+ *
+ * @param message the message
+ */
+ public ElasticSearchOperationException(String message) {
+ super(message);
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java
new file mode 100644
index 0000000..bae0784
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java
@@ -0,0 +1,207 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal.proxy.processor;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.json.Json;
+import javax.json.JsonObjectBuilder;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.camel.Exchange;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.restclient.client.RestClient;
+import org.onap.aai.restclient.rest.HttpUtil;
+import org.onap.aai.sparky.dal.rest.RestClientConstructionException;
+import org.onap.aai.sparky.dal.rest.RestClientFactory;
+import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+import org.slf4j.MDC;
+
+/**
+ * The Class AaiUiProxyProcessor.
+ */
+public class AaiUiProxyProcessor {
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(AaiUiProxyProcessor.class);
+ private static Logger auditLogger =
+ LoggerFactory.getInstance().getAuditLogger(AaiUiProxyProcessor.class.getName());
+
+ private RestClient client;
+ private String synapseBaseUrl;
+
+ private OperationResult operationResult = null;
+
+ private String xTransactionId;
+ private String xFromAppId;
+
+ private static final String ROUTER_SERVICE = "routerService";
+
+
+ /**
+ * Instantiates a new AaiUiProxyProcessor.
+ *
+ * @throws RestClientConstructionException
+ */
+
+ public AaiUiProxyProcessor(RestEndpointConfig endpointConfig, String apiGatewayEndpoint)
+ throws RestClientConstructionException {
+ client = RestClientFactory.buildClient(endpointConfig);
+ synapseBaseUrl = "https://" + endpointConfig.getEndpointIpAddress() + ":"
+ + endpointConfig.getEndpointServerPort() + "/" + apiGatewayEndpoint;
+ }
+
+
+ void setUpMdcContext(final Exchange exchange, final HttpServletRequest request) {
+
+ Object xTransactionId = exchange.getIn().getHeader("X-TransactionId");
+ if (xTransactionId == null) {
+ this.xTransactionId = NodeUtils.getRandomTxnId();
+ } else {
+ this.xTransactionId = (String) xTransactionId;
+ }
+
+ Object partnerName = exchange.getIn().getHeader("X-FromAppId");
+ if (partnerName == null) {
+ xFromAppId = "Browser";
+ } else {
+ xFromAppId = (String) partnerName;
+ }
+
+ MdcContext.initialize((String) xTransactionId, "AAI-UI", "", xFromAppId,
+ request.getRequestURI() + ":" + request.getLocalPort());
+ }
+
+ private Map<String, List<String>> getHeaders() {
+ Map<String, List<String>> headers = new HashMap<>();
+ headers.put("X-FromAppId", Arrays.asList(SparkyConstants.APP_NAME));
+ headers.put("X-TransactionId", Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID)));
+ headers.put("X-FromAppId", Arrays.asList(MDC.get(MdcContext.MDC_PARTNER_NAME)));
+ return headers;
+ }
+
+ private String getProxyPayloadAsString(final Exchange exchange) {
+ JsonObjectBuilder jsonBuilder = Json.createObjectBuilder();
+ String srcUri = "";
+ try {
+ srcUri = (String) exchange.getIn().getHeader(Exchange.HTTP_URI);
+ jsonBuilder.add("origin-uri", srcUri);
+
+ String body = exchange.getIn().getBody(String.class);
+
+ if (body != null && body.length() != 0) {
+ jsonBuilder.add("origin-payload", body);
+ }
+
+ } catch (Exception e) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "Failed to extract payload for proxying.\n" + "Requestor URL: " + srcUri);
+ }
+
+ return jsonBuilder.build().toString();
+ }
+
+ private String getSynapseUrl(String requestUri) {
+ String url = "";
+ int pos = requestUri.indexOf(ROUTER_SERVICE);
+ if (pos != -1) {
+ url = synapseBaseUrl + requestUri.substring(pos + ROUTER_SERVICE.length());
+ } else {
+ LOG.error(AaiUiMsgs.DR_REQUEST_URI_FOR_PROXY_UNKNOWN, requestUri);
+ }
+ return url;
+ }
+
+ public void proxyMessage(Exchange exchange) {
+ HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class);
+
+ setUpMdcContext(exchange, request);
+
+ try {
+ Map<String, List<String>> headers = getHeaders();
+ String proxyPayload = getProxyPayloadAsString(exchange);
+ String fromUrl = (String) exchange.getIn().getHeader(Exchange.HTTP_URI);
+ String toUrl = getSynapseUrl(fromUrl);
+ auditLogger.info(AaiUiMsgs.DR_PROXY_FROM_TO, fromUrl, toUrl);
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Proxying request:\n" + proxyPayload + "\n" + "Target URL:\n" + toUrl);
+
+ long startTimeInMs = System.currentTimeMillis();
+
+ operationResult = client.post(toUrl, proxyPayload, headers,
+ javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE,
+ javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE);
+
+ long drOpTime = (System.currentTimeMillis() - startTimeInMs);
+ int rc = operationResult.getResultCode();
+ String result = "";
+
+ if (HttpUtil.isHttpResponseClassSuccess(rc)) {
+ result = operationResult.getResult();
+ } else {
+ result = operationResult.getFailureCause();
+ LOG.info(AaiUiMsgs.DR_PROCESSING_FAILURE, String.valueOf(rc), proxyPayload);
+ }
+
+ auditLogger.info(AaiUiMsgs.DR_PROCESSING_TIME, String.valueOf(drOpTime));
+
+ exchange.getOut().setHeader("X-TransactionId", xTransactionId);
+ exchange.getOut().setHeader("X-FromAppId", xFromAppId);
+ exchange.getOut().setHeader("RequestUrl", request.getRequestURI());
+ exchange.getOut().setHeader("RequestPort", request.getLocalPort());
+ exchange.getOut().setBody(result);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_PROCESSING_REQUEST, exc);
+ }
+ }
+
+ public String getSynapseBaseUrl() {
+ return synapseBaseUrl;
+ }
+
+ public void setSynapseBaseUrl(String synapseBaseUrl) {
+ this.synapseBaseUrl = synapseBaseUrl;
+ }
+
+ public RestClient getClient() {
+ return client;
+ }
+
+ public void setClient(RestClient client) {
+ this.client = client;
+ }
+
+ protected OperationResult getOperationResult() {
+ return operationResult;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/HttpMethod.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/HttpMethod.java
new file mode 100644
index 0000000..a891d20
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/HttpMethod.java
@@ -0,0 +1,33 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal.rest;
+
+
+/**
+ * The Enum HttpMethod.
+ */
+public enum HttpMethod {
+ GET, PUT, POST, DELETE, PATCH, HEAD
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientConstructionException.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientConstructionException.java
new file mode 100644
index 0000000..830e624
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientConstructionException.java
@@ -0,0 +1,38 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal.rest;
+
+public class RestClientConstructionException extends Exception {
+
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1L;
+
+ public RestClientConstructionException(String message) {
+ super(message);
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientFactory.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientFactory.java
new file mode 100644
index 0000000..30e48b7
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientFactory.java
@@ -0,0 +1,97 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal.rest;
+
+import org.onap.aai.restclient.client.RestClient;
+import org.onap.aai.sparky.config.SparkyResourceLoader;
+import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig;
+import org.onap.aai.sparky.util.Encryptor;
+
+public class RestClientFactory {
+
+ public static RestClient buildClient(RestEndpointConfig restEndpointConfig)
+ throws RestClientConstructionException {
+
+ if (restEndpointConfig == null) {
+ throw new RestClientConstructionException(
+ "Failed to build RestClient because RestEndpointConfig is null.");
+ }
+
+ if (restEndpointConfig.getRestAuthenticationMode() == null) {
+ throw new RestClientConstructionException(
+ "Failed to build RestClient because RestAuthenticationMode is null.");
+ }
+
+ SparkyResourceLoader resourceLoader = restEndpointConfig.getResourceLoader();
+
+ switch (restEndpointConfig.getRestAuthenticationMode()) {
+
+ case SSL_CERT: {
+
+ Encryptor enc = new Encryptor();
+ String certFileNameFullPath = resourceLoader.getAbsolutePath(restEndpointConfig.getCertFileName());
+ String decryptedCertPassword = enc.decryptValue(restEndpointConfig.getCertPassword());
+ String truststoreFileNameFullPath =
+ resourceLoader.getAbsolutePath(restEndpointConfig.getTruststoreFileName());
+
+ return new RestClient() //
+ .authenticationMode(restEndpointConfig.getRestAuthenticationMode()) //
+ .validateServerCertChain(restEndpointConfig.isValidateServerCertChain()) //
+ .validateServerHostname(restEndpointConfig.isValidateServerHostname()) //
+ .clientCertFile(certFileNameFullPath) //
+ .clientCertPassword(decryptedCertPassword) //
+ .trustStore(truststoreFileNameFullPath) //
+ .connectTimeoutMs(restEndpointConfig.getConnectTimeoutInMs()) //
+ .readTimeoutMs(restEndpointConfig.getReadTimeoutInMs());
+ }
+
+ case SSL_BASIC: {
+
+ return new RestClient() //
+ .authenticationMode(restEndpointConfig.getRestAuthenticationMode()) //
+ .basicAuthUsername(restEndpointConfig.getBasicAuthUserName()) //
+ .basicAuthPassword(restEndpointConfig.getBasicAuthPassword()) //
+ .connectTimeoutMs(restEndpointConfig.getConnectTimeoutInMs()) //
+ .readTimeoutMs(restEndpointConfig.getReadTimeoutInMs());
+
+ }
+
+ case HTTP_NOAUTH:
+ case UNKNOWN_MODE:
+ default: {
+
+ return new RestClient() //
+ .authenticationMode(restEndpointConfig.getRestAuthenticationMode()) //
+ .connectTimeoutMs(restEndpointConfig.getConnectTimeoutInMs()) //
+ .readTimeoutMs(restEndpointConfig.getReadTimeoutInMs());
+
+ }
+
+
+ }
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestOperationalStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestOperationalStatistics.java
new file mode 100644
index 0000000..dde68b8
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestOperationalStatistics.java
@@ -0,0 +1,255 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal.rest;
+
+import org.onap.aai.sparky.analytics.AbstractStatistics;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+
+/**
+ * The Class RestOperationalStatistics.
+ */
+public class RestOperationalStatistics extends AbstractStatistics {
+
+ private static final String GET_1XX = "GET_1XX";
+ private static final String GET_2XX = "GET_2XX";
+ private static final String GET_3XX = "GET_3XX";
+ private static final String GET_4XX = "GET_4XX";
+ private static final String GET_5XX = "GET_5XX";
+ private static final String GET_6XX = "GET_6XX";
+
+ private static final String PUT_1XX = "PUT_1XX";
+ private static final String PUT_2XX = "PUT_2XX";
+ private static final String PUT_3XX = "PUT_3XX";
+ private static final String PUT_4XX = "PUT_4XX";
+ private static final String PUT_5XX = "PUT_5XX";
+ private static final String PUT_6XX = "PUT_6XX";
+
+ private static final String POST_1XX = "POST_1XX";
+ private static final String POST_2XX = "POST_2XX";
+ private static final String POST_3XX = "POST_3XX";
+ private static final String POST_4XX = "POST_4XX";
+ private static final String POST_5XX = "POST_5XX";
+ private static final String POST_6XX = "POST_6XX";
+
+ private static final String DELETE_1XX = "DELETE_1XX";
+ private static final String DELETE_2XX = "DELETE_2XX";
+ private static final String DELETE_3XX = "DELETE_3XX";
+ private static final String DELETE_4XX = "DELETE_4XX";
+ private static final String DELETE_5XX = "DELETE_5XX";
+ private static final String DELETE_6XX = "DELETE_6XX";
+
+ /**
+ * Creates the counters.
+ */
+ private void createCounters() {
+
+ addCounter(GET_1XX);
+ addCounter(GET_2XX);
+ addCounter(GET_3XX);
+ addCounter(GET_4XX);
+ addCounter(GET_5XX);
+ addCounter(GET_6XX);
+
+ addCounter(PUT_1XX);
+ addCounter(PUT_2XX);
+ addCounter(PUT_3XX);
+ addCounter(PUT_4XX);
+ addCounter(PUT_5XX);
+ addCounter(PUT_6XX);
+
+ addCounter(POST_1XX);
+ addCounter(POST_2XX);
+ addCounter(POST_3XX);
+ addCounter(POST_4XX);
+ addCounter(POST_5XX);
+ addCounter(POST_6XX);
+
+ addCounter(DELETE_1XX);
+ addCounter(DELETE_2XX);
+ addCounter(DELETE_3XX);
+ addCounter(DELETE_4XX);
+ addCounter(DELETE_5XX);
+ addCounter(DELETE_6XX);
+
+
+ }
+
+ /**
+ * Gets the result code.
+ *
+ * @param txn the txn
+ * @return the result code
+ */
+ private int getResultCode(NetworkTransaction txn) {
+
+ if (txn == null) {
+ return -1;
+ }
+
+ if (txn.getOperationResult() == null) {
+ return -1;
+ }
+
+ return txn.getOperationResult().getResultCode();
+
+ }
+
+ /**
+ * Update counters.
+ *
+ * @param txn the txn
+ */
+ public void updateCounters(NetworkTransaction txn) {
+
+ if (txn == null) {
+ return;
+ }
+
+ int rc = getResultCode(txn);
+
+ switch (txn.getOperationType()) {
+
+ case GET: {
+
+ if (100 <= rc && rc <= 199) {
+ pegCounter(GET_1XX);
+ } else if (200 <= rc && rc <= 299) {
+ pegCounter(GET_2XX);
+ } else if (300 <= rc && rc <= 399) {
+ pegCounter(GET_3XX);
+ } else if (400 <= rc && rc <= 499) {
+ pegCounter(GET_4XX);
+ } else if (500 <= rc && rc <= 599) {
+ pegCounter(GET_5XX);
+ } else if (600 <= rc && rc <= 699) {
+ pegCounter(GET_6XX);
+ }
+
+ break;
+ }
+
+ case PUT: {
+
+ if (100 <= rc && rc <= 199) {
+ pegCounter(PUT_1XX);
+ } else if (200 <= rc && rc <= 299) {
+ pegCounter(PUT_2XX);
+ } else if (300 <= rc && rc <= 399) {
+ pegCounter(PUT_3XX);
+ } else if (400 <= rc && rc <= 499) {
+ pegCounter(PUT_4XX);
+ } else if (500 <= rc && rc <= 599) {
+ pegCounter(PUT_5XX);
+ } else if (600 <= rc && rc <= 699) {
+ pegCounter(PUT_6XX);
+ }
+
+ break;
+ }
+
+ case POST: {
+
+ if (100 <= rc && rc <= 199) {
+ pegCounter(POST_1XX);
+ } else if (200 <= rc && rc <= 299) {
+ pegCounter(POST_2XX);
+ } else if (300 <= rc && rc <= 399) {
+ pegCounter(POST_3XX);
+ } else if (400 <= rc && rc <= 499) {
+ pegCounter(POST_4XX);
+ } else if (500 <= rc && rc <= 599) {
+ pegCounter(POST_5XX);
+ } else if (600 <= rc && rc <= 699) {
+ pegCounter(POST_6XX);
+ }
+
+ break;
+ }
+
+ case DELETE: {
+
+ if (100 <= rc && rc <= 199) {
+ pegCounter(DELETE_1XX);
+ } else if (200 <= rc && rc <= 299) {
+ pegCounter(DELETE_2XX);
+ } else if (300 <= rc && rc <= 399) {
+ pegCounter(DELETE_3XX);
+ } else if (400 <= rc && rc <= 499) {
+ pegCounter(DELETE_4XX);
+ } else if (500 <= rc && rc <= 599) {
+ pegCounter(DELETE_5XX);
+ } else if (600 <= rc && rc <= 699) {
+ pegCounter(DELETE_6XX);
+ }
+
+ break;
+ }
+
+ default: {
+ // not expecting anything else yet
+ }
+
+ }
+
+ }
+
+ /**
+ * Instantiates a new rest operational statistics.
+ */
+ public RestOperationalStatistics() {
+ createCounters();
+ }
+
+ public String getStatisticsReport() {
+
+ StringBuilder sb = new StringBuilder(128);
+
+ sb.append("\n ")
+ .append(String.format(
+ "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ",
+ HttpMethod.DELETE, getCounterValue(DELETE_1XX), getCounterValue(DELETE_2XX),
+ getCounterValue(DELETE_3XX), getCounterValue(DELETE_4XX), getCounterValue(DELETE_5XX),
+ getCounterValue(DELETE_6XX)));
+
+ sb.append("\n ").append(String.format(
+ "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", HttpMethod.PUT,
+ getCounterValue(PUT_1XX), getCounterValue(PUT_2XX), getCounterValue(PUT_3XX),
+ getCounterValue(PUT_4XX), getCounterValue(PUT_5XX), getCounterValue(PUT_6XX)));
+
+ sb.append("\n ").append(String.format(
+ "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", HttpMethod.POST,
+ getCounterValue(POST_1XX), getCounterValue(POST_2XX), getCounterValue(POST_3XX),
+ getCounterValue(POST_4XX), getCounterValue(POST_5XX), getCounterValue(POST_6XX)));
+
+ sb.append("\n ").append(String.format(
+ "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", HttpMethod.GET,
+ getCounterValue(GET_1XX), getCounterValue(GET_2XX), getCounterValue(GET_3XX),
+ getCounterValue(GET_4XX), getCounterValue(GET_5XX), getCounterValue(GET_6XX)));
+
+ return sb.toString();
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/config/RestEndpointConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/config/RestEndpointConfig.java
new file mode 100644
index 0000000..8859f02
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/config/RestEndpointConfig.java
@@ -0,0 +1,179 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.dal.rest.config;
+
+import org.onap.aai.restclient.enums.RestAuthenticationMode;
+import org.onap.aai.sparky.config.SparkyResourceLoader;
+
+public class RestEndpointConfig {
+
+ private String endpointIpAddress;
+ private String endpointServerPort;
+ private int numRequestRetries;
+ private String basicAuthUserName;
+ private String basicAuthPassword;
+ private RestAuthenticationMode restAuthenticationMode;
+ private int connectTimeoutInMs;
+ private int readTimeoutInMs;
+ private String certFileName;
+ private String certPassword;
+ private String truststoreFileName;
+ private boolean validateServerCertChain;
+ private boolean validateServerHostname;
+ private SparkyResourceLoader resourceLoader;
+
+ public boolean isValidateServerCertChain() {
+ return validateServerCertChain;
+ }
+
+ public void setValidateServerCertChain(boolean validateServerCertChain) {
+ this.validateServerCertChain = validateServerCertChain;
+ }
+
+ public boolean isValidateServerHostname() {
+ return validateServerHostname;
+ }
+
+ public void setValidateServerHostname(boolean validateServerHostname) {
+ this.validateServerHostname = validateServerHostname;
+ }
+
+ public String getEndpointIpAddress() {
+ return endpointIpAddress;
+ }
+
+ public void setEndpointIpAddress(String endpointIpAddress) {
+ this.endpointIpAddress = endpointIpAddress;
+ }
+
+ public String getEndpointServerPort() {
+ return endpointServerPort;
+ }
+
+ public void setEndpointServerPort(String endpointServerPort) {
+ this.endpointServerPort = endpointServerPort;
+ }
+
+ public int getNumRequestRetries() {
+ return numRequestRetries;
+ }
+
+ public void setNumRequestRetries(int numRequestRetries) {
+ this.numRequestRetries = numRequestRetries;
+ }
+
+ public String getBasicAuthUserName() {
+ return basicAuthUserName;
+ }
+
+ public void setBasicAuthUserName(String basicAuthUserName) {
+ this.basicAuthUserName = basicAuthUserName;
+ }
+
+ public String getBasicAuthPassword() {
+ return basicAuthPassword;
+ }
+
+ public void setBasicAuthPassword(String basicAuthPassword) {
+ this.basicAuthPassword = basicAuthPassword;
+ }
+
+ public RestAuthenticationMode getRestAuthenticationMode() {
+ return restAuthenticationMode;
+ }
+
+ public void setRestAuthenticationMode(RestAuthenticationMode restAuthenticationMode) {
+ this.restAuthenticationMode = restAuthenticationMode;
+ }
+
+ public int getConnectTimeoutInMs() {
+ return connectTimeoutInMs;
+ }
+
+ public void setConnectTimeoutInMs(int connectTimeoutInMs) {
+ this.connectTimeoutInMs = connectTimeoutInMs;
+ }
+
+ public int getReadTimeoutInMs() {
+ return readTimeoutInMs;
+ }
+
+ public void setReadTimeoutInMs(int readTimeoutInMs) {
+ this.readTimeoutInMs = readTimeoutInMs;
+ }
+
+ public String getCertFileName() {
+ return certFileName;
+ }
+
+ public void setCertFileName(String certFileName) {
+ this.certFileName = certFileName;
+ }
+
+ public String getCertPassword() {
+ return certPassword;
+ }
+
+ public void setCertPassword(String certPassword) {
+ this.certPassword = certPassword;
+ }
+
+ public String getTruststoreFileName() {
+ return truststoreFileName;
+ }
+
+ public void setTruststoreFileName(String truststoreFileName) {
+ this.truststoreFileName = truststoreFileName;
+ }
+
+ public SparkyResourceLoader getResourceLoader() {
+ return resourceLoader;
+ }
+
+ public void setResourceLoader(SparkyResourceLoader resourceLoader) {
+ this.resourceLoader = resourceLoader;
+ }
+
+ @Override
+ public String toString() {
+ return "RestEndpointConfig ["
+ + (endpointIpAddress != null ? "endpointIpAddress=" + endpointIpAddress + ", " : "")
+ + (endpointServerPort != null ? "endpointServerPort=" + endpointServerPort + ", " : "")
+ + "numRequestRetries=" + numRequestRetries + ", "
+ + (basicAuthUserName != null ? "basicAuthUserName=" + basicAuthUserName + ", " : "")
+ + (basicAuthPassword != null ? "basicAuthPassword=" + basicAuthPassword + ", " : "")
+ + (restAuthenticationMode != null
+ ? "restAuthenticationMode=" + restAuthenticationMode + ", " : "")
+ + "connectTimeoutInMs=" + connectTimeoutInMs + ", readTimeoutInMs=" + readTimeoutInMs + ", "
+ + (certFileName != null ? "certFileName=" + certFileName + ", " : "")
+ + (certPassword != null ? "certPassword=" + certPassword + ", " : "")
+ + (truststoreFileName != null ? "truststoreFileName=" + truststoreFileName + ", " : "")
+ + "validateServerCertChain=" + validateServerCertChain + ", validateServerHostname="
+ + validateServerHostname + "]";
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java
new file mode 100644
index 0000000..8b35d7c
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java
@@ -0,0 +1,182 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.editattributes;
+
+import java.io.UnsupportedEncodingException;
+import java.util.Map;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.component.restlet.RestletConstants;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.editattributes.entity.EditRequest;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.restlet.Request;
+import org.restlet.Response;
+import org.restlet.data.ClientInfo;
+import org.restlet.data.Cookie;
+import org.restlet.data.MediaType;
+import org.restlet.data.Status;
+import org.restlet.util.Series;
+
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+/**
+ * The Class AttributeEditProcessor.
+ */
+public class AttributeEditProcessor {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(AttributeEditProcessor.class);
+
+ private ObjectMapper mapper;
+ private AttributeUpdater attrUpdater;
+
+ public AttributeEditProcessor(AttributeUpdater attributeUpdater) {
+ this.attrUpdater = attributeUpdater;
+
+ this.mapper = new ObjectMapper();
+ mapper.setSerializationInclusion(Include.NON_EMPTY);
+ }
+
+ public void editAttribute(Exchange exchange) {
+
+ Object xTransactionId = exchange.getIn().getHeader("X-TransactionId");
+
+ if (xTransactionId == null) {
+ xTransactionId = NodeUtils.getRandomTxnId();
+ }
+
+ Object partnerName = exchange.getIn().getHeader("X-FromAppId");
+ if (partnerName == null) {
+ partnerName = "Browser";
+ }
+
+ Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class);
+
+ /*
+ * Disables automatic Apache Camel Restlet component logging which prints out an undesirable log
+ * entry which includes client (e.g. browser) information
+ */
+ request.setLoggable(false);
+
+ ClientInfo clientInfo = request.getClientInfo();
+ MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName,
+ clientInfo.getAddress() + ":" + clientInfo.getPort());
+
+ String payload = exchange.getIn().getBody(String.class);
+ EditRequest editRequest = null;
+ OperationResult operationResult = new OperationResult();
+
+ Response response = exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class);
+ response.setStatus(Status.SUCCESS_OK); // 200 is assumed unless an actual exception occurs (a failure is still a valid response)
+
+ boolean wasErrorDuringProcessing = false;
+ String errorMessage = null;
+
+
+ try {
+
+ if (payload != null && !payload.isEmpty()) {
+ editRequest = mapper.readValue(payload, EditRequest.class);
+
+ if (editRequest != null) {
+
+ String attUid = getAttUid(request.getCookies());
+ String objectUri = editRequest.getEntityUri();
+ Map<String, Object> attributeValues = editRequest.getAttributes();
+
+ if (attUid != null && !attUid.isEmpty() && objectUri != null && !objectUri.isEmpty()
+ && attributeValues != null && !attributeValues.isEmpty()) {
+
+ LOG.info(AaiUiMsgs.ATTRIBUTES_HANDLING_EDIT, objectUri, editRequest.toString());
+
+ operationResult = attrUpdater.updateObjectAttribute(objectUri, attributeValues, attUid);
+
+ boolean wasSuccess = (operationResult.getResultCode() == 200);
+ String message = String.format("Edit Attributes completed with Result Code : %s (%s).",
+ operationResult.getResultCode(), wasSuccess ? "success" : "failed");
+
+ LOG.info(AaiUiMsgs.INFO_GENERIC, message);
+ }
+ }
+ } else {
+ wasErrorDuringProcessing = true;
+ errorMessage = "Empty payload provided, need details to complete request";
+ }
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_EXCEPTION, exc.getLocalizedMessage());
+ operationResult.setResult(500, "Error encountered while trying to update attributes.");
+ response.setStatus(Status.SERVER_ERROR_INTERNAL);
+ }
+
+ if(wasErrorDuringProcessing) {
+ LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_MESSAGE, errorMessage);
+ }
+
+ response.setEntity(operationResult.getResult(), MediaType.APPLICATION_JSON);
+ exchange.getOut().setBody(response);
+ }
+
+ /**
+ * Gets the att uid.
+ *
+ * @param request the request
+ * @return the att uid
+ * @throws UnsupportedEncodingException the unsupported encoding exception
+ */
+ public String getAttUid(Series<Cookie> cookies) throws UnsupportedEncodingException {
+ String attId = "";
+ if (cookies == null) {
+ LOG.error(AaiUiMsgs.COOKIE_NOT_FOUND);
+ return attId;
+ }
+ for (Cookie cookie : cookies) {
+ if (cookie.getName().equals("attESHr")) {
+ // This cookie is of the form :
+ // "FIRSTNAME|LASTNAME|emailname@domain.com|||ab1234||fl6789,RBFMSKQ,"
+ // + "Z9V2298,9762186|YNNNNNNNNNNNNNYNNYYNNNNN|FIRSTNAME|EY6SC9000|"
+ // we are to extract fl6789 from this which would be the attuid for the user.
+ String value = cookie.getValue();
+ value = java.net.URLDecoder.decode(value, "UTF-8");
+ LOG.info(AaiUiMsgs.COOKIE_FOUND, value);
+ String[] values = value.split("\\|");
+ if (values.length > 7) {
+ attId = (values[7].split(","))[0];
+
+ String initials = (values[0].substring(0, 1) + values[1].substring(0, 1)).toLowerCase();
+ if (attId.startsWith(initials)) {
+ return attId;
+ }
+ }
+ }
+ }
+ return attId;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java
new file mode 100644
index 0000000..5d71135
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java
@@ -0,0 +1,362 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.editattributes;
+
+import java.net.URI;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.ws.rs.core.UriBuilder;
+
+import org.eclipse.persistence.dynamic.DynamicType;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.config.oxm.OxmModelLoader;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.editattributes.exception.AttributeUpdateException;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.PropertyNamingStrategy;
+
+/**
+ * Class to process attribute updates on AAI objects.
+ *
+ *
+ */
+public class AttributeUpdater {
+
+ /**
+ * The Class AaiEditObject.
+ */
+ public class AaiEditObject {
+ String objectType;
+ String rootElement;
+ String keyName;
+ String keyValue;
+ String schemaVersion;
+
+ /**
+ * Instantiates a new aai edit object.
+ */
+ public AaiEditObject() {
+
+ }
+
+ /**
+ * Instantiates a new aai edit object.
+ *
+ * @param objectType the object type
+ * @param idName the id name
+ * @param schemaVersion the schema version
+ */
+ public AaiEditObject(String objectType, String idName, String schemaVersion) {
+ super();
+ this.objectType = objectType;
+ this.keyName = idName;
+ this.schemaVersion = schemaVersion;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ public void setObjectType(String objectType) {
+ this.objectType = objectType;
+ }
+
+ public String getKeyName() {
+ return keyName;
+ }
+
+ public void setKeyName(String idName) {
+ this.keyName = idName;
+ }
+
+ public String getSchemaVersion() {
+ return schemaVersion;
+ }
+
+ public void setSchemaVersion(String schemaVersion) {
+ this.schemaVersion = schemaVersion;
+ }
+
+ public void setKeyValue(String keyValue) {
+ this.keyValue = keyValue;
+ }
+
+ public String getKeyValue() {
+ return keyValue;
+ }
+
+ public String getRootElement() {
+ return rootElement;
+ }
+
+ public void setRootElement(String rootElement) {
+ this.rootElement = rootElement;
+ }
+
+ }
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(AttributeUpdater.class);
+ private static final String MESSAGE_VERSION_EXTRACTION_REGEX = "\\/(v[0-9]+)";
+ private static final String ATTRIBUTES_UPDATED_SUCCESSFULLY = "Attributes updated successfully";
+ private static final String ATTRIBUTES_NOT_UPDATED = "Attributes not updated. ";
+
+ private ActiveInventoryAdapter aaiAdapter;
+ private UserValidator validator;
+ private OxmModelLoader oxmModelLoader;
+ private OxmEntityLookup oxmEntityLookup;
+
+ /**
+ * Instantiates a new attribute updater.
+ * @throws AttributeUpdateException
+ */
+ public AttributeUpdater(OxmModelLoader oxmModelLoader, OxmEntityLookup oxmEntityLookup, ActiveInventoryAdapter activeInventoryAdapter) throws AttributeUpdateException {
+ super();
+ this.oxmModelLoader = oxmModelLoader;
+ this.oxmEntityLookup = oxmEntityLookup;
+ this.aaiAdapter = activeInventoryAdapter;
+
+ try {
+ this.validator = new UserValidator();
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ATTRIBUTES_ERROR_GETTING_AAI_CONFIG_OR_ADAPTER, exc.getLocalizedMessage());
+ throw new AttributeUpdateException(exc);
+ }
+ }
+
+ protected String getResourceBasePath() {
+
+ String versionStr = null;
+ if (oxmModelLoader != null) {
+ versionStr = String.valueOf(oxmModelLoader.getLatestVersionNum());
+ }
+
+ return "/aai/v" + versionStr;
+
+ }
+
+ protected URI getBaseUri() {
+ return UriBuilder
+ .fromUri("https://" + aaiAdapter.getEndpointConfig().getEndpointIpAddress() + ":"
+ + aaiAdapter.getEndpointConfig().getEndpointServerPort() + getResourceBasePath())
+ .build();
+ }
+
+ /**
+ * Update object attribute.
+ *
+ * @param objectUri - Valid URI of the object as per OXM model.
+ * @param attributeValues - Map of (attribute-name & attribute-value) for
+ * any attributes to be updated to the value.
+ * @param attUid - ATTUID of the user requesting the update.
+ * @return - OperationResult with success or failure reason.
+ */
+ public OperationResult updateObjectAttribute(String objectUri, Map<String, Object> attributeValues, String attUid) {
+ OperationResult result = new OperationResult();
+ LOG.info(AaiUiMsgs.ATTRIBUTES_UPDATE_METHOD_CALLED, objectUri, attUid, String.valueOf(attributeValues));
+ if (!validator.isAuthorizedUser(attUid)) {
+ result.setResultCode(403);
+ result.setResult(String.format("User %s is not authorized for Attributes update ", attUid));
+ LOG.error(AaiUiMsgs.ATTRIBUTES_USER_NOT_AUTHORIZED_TO_UPDATE, attUid);
+ return result;
+ }
+
+ AaiEditObject object = null;
+
+ try {
+ object = getEditObjectFromUri(objectUri);
+ } catch (AttributeUpdateException exc) {
+ result.setResultCode(400);
+ result.setResult(ATTRIBUTES_NOT_UPDATED);
+ LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_EXCEPTION, exc.getLocalizedMessage());
+ return result;
+ }
+ try {
+ String jsonPayload = convertEditRequestToJson(object, attributeValues);
+ String patchUri = getBaseUri().toString() + getRelativeUri(objectUri);
+
+
+ /*
+ * FIX ME: Dave Adams, 8-Nov-2017
+ */
+
+ //result = aaiAdapter.doPatch(patchUri, jsonPayload, MediaType.APPLICATION_JSON);
+
+ result = new OperationResult();
+ result.setResultCode(404);
+
+ if (result.getResultCode() == 200) {
+ result.setResult(ATTRIBUTES_UPDATED_SUCCESSFULLY);
+ String message = result.getResult() + " for " + objectUri;
+ LOG.info(AaiUiMsgs.INFO_GENERIC, message);
+ } else {
+ String message = ATTRIBUTES_NOT_UPDATED + " For: " + objectUri + ". AAI PATCH Status Code : "
+ + result.getResultCode() + ". Error : " + result.getResult();
+ LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_MESSAGE, message);
+ }
+ } catch (AttributeUpdateException exc) {
+ result.setResultCode(500);
+ result.setResult(ATTRIBUTES_NOT_UPDATED + exc.getLocalizedMessage());
+ LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_EXCEPTION, exc.getLocalizedMessage());
+ }
+ return result;
+
+ }
+
+ /**
+ * Gets the relative uri.
+ *
+ * @param objectUri the object uri
+ * @return the relative uri
+ */
+ public String getRelativeUri(String objectUri) {
+ String tempUri = objectUri;
+ final Pattern pattern = Pattern.compile(MESSAGE_VERSION_EXTRACTION_REGEX, Pattern.DOTALL);
+ Matcher matcher = pattern.matcher(objectUri);
+ while (matcher.find()) {
+ tempUri = objectUri.substring(matcher.end());
+ }
+ if (!tempUri.startsWith("/")) {
+ tempUri = "/" + tempUri;
+ }
+ return tempUri;
+ }
+
+ /**
+ * Gets the edits the object from uri.
+ *
+ * @param objectUri the object uri
+ * @return the edits the object from uri
+ * @throws AttributeUpdateException the attribute update exception
+ */
+ public AaiEditObject getEditObjectFromUri(String objectUri) throws AttributeUpdateException {
+
+ AaiEditObject object = new AaiEditObject();
+ String version = getVersionFromUri(objectUri);
+
+ if ( null == version ) {
+ version = "v" + String.valueOf(oxmModelLoader.getLatestVersionNum());
+ }
+ object.setSchemaVersion(version);
+
+ String[] values = objectUri.split("/");
+ if (values.length < 2) {
+ throw new AttributeUpdateException("Invalid or malformed object URI : " + objectUri);
+ }
+ String keyValue = values[values.length - 1];
+ String rootElement = values[values.length - 2];
+
+ object.setKeyValue(keyValue);
+ object.setRootElement(rootElement);
+
+ String objectJavaType = null;
+ Map<String, DynamicType> entityTypeLookup = oxmEntityLookup.getEntityTypeLookup();
+ DynamicType entity = entityTypeLookup.get(rootElement);
+ if ( null != entity ) {
+ objectJavaType = entity.getName();
+ String message = "Descriptor: Alias: " + objectJavaType + " : DefaultRootElement: "
+ + rootElement;
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, message);
+ }
+
+
+ if (objectJavaType == null) {
+ throw new AttributeUpdateException(
+ "Object type could not be determined from the URI : " + objectUri);
+ }
+ object.setObjectType(objectJavaType);
+
+ // Set key attribute name
+ final List<String> primaryKeys = entity.getDescriptor().getPrimaryKeyFieldNames();
+
+ if (primaryKeys.isEmpty()) {
+ throw new AttributeUpdateException("Object primary key not found in OXM version " + version);
+ }
+
+ for (int i = 0; i < primaryKeys.size(); i++) {
+ final String primaryKey = primaryKeys.get(i);
+ if (primaryKey.indexOf("/text()") != -1) {
+ primaryKeys.set(i, primaryKey.replace("/text()", ""));
+ }
+ }
+ object.setKeyName(primaryKeys.iterator().next());
+
+ return object;
+ }
+
+ /**
+ * Gets the version from uri.
+ *
+ * @param objectUri the object uri
+ * @return the version from uri
+ * @throws AttributeUpdateException the attribute update exception
+ */
+ private String getVersionFromUri(String objectUri) throws AttributeUpdateException {
+ final Pattern pattern = Pattern.compile(MESSAGE_VERSION_EXTRACTION_REGEX, Pattern.DOTALL);
+ Matcher matcher = pattern.matcher(objectUri);
+ String messageSchemaVersion = null;
+ while (matcher.find()) {
+ messageSchemaVersion = matcher.group(1);
+ break;
+ }
+ return messageSchemaVersion;
+ }
+
+ /**
+ * Convert edit request to json.
+ *
+ * @param object the object
+ * @param attributeValues the attribute values
+ * @return the string
+ * @throws AttributeUpdateException the attribute update exception
+ */
+ private static String convertEditRequestToJson(AaiEditObject object,
+ Map<String, Object> attributeValues) throws AttributeUpdateException {
+
+ ObjectMapper mapper = new ObjectMapper();
+ mapper.setPropertyNamingStrategy(new PropertyNamingStrategy.KebabCaseStrategy());
+ ObjectWriter ow = mapper.writer();
+
+ Map<String, Object> patchAttributes = new HashMap<>();
+ patchAttributes.put(object.getKeyName(), object.getKeyValue());
+ patchAttributes.putAll(attributeValues);
+
+ try {
+ return ow.writeValueAsString(patchAttributes);
+ } catch (JsonProcessingException exc) {
+ throw new AttributeUpdateException("Caught a JPE while creating PATCH request body = ", exc);
+ }
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java
new file mode 100644
index 0000000..a5c251e
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java
@@ -0,0 +1,79 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.editattributes;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+/**
+ * Reads user IDs from a file. Each line in the user authorization file should contain a single user
+ * ID. For example,
+ *
+ * <pre>
+ * user1
+ * user2
+ * </pre>
+ */
+public class UserAuthorizationReader {
+
+ private File userAuthorizationFile;
+
+ /**
+ * Set the user authorization file.
+ *
+ * @param file a user authorization file
+ */
+ public UserAuthorizationReader(File file) {
+ this.userAuthorizationFile = file;
+ }
+
+ /**
+ * Gets user IDs from a file.
+ *
+ * @return a list of user IDs
+ * @throws IOException if there is a problem reading the user configuration file
+ */
+ public List<String> getUsers() throws IOException {
+ List<String> userList = new ArrayList<>();
+ try (Stream<String> stream = Files.lines(getUserAuthorizationFile().toPath())) {
+ userList.addAll(stream.map(String::trim).collect(Collectors.toList()));
+ }
+ return userList;
+ }
+
+ // Getters and setters
+ public File getUserAuthorizationFile() {
+ return userAuthorizationFile;
+ }
+
+ public void setUserAuthorizationFile(File file) {
+ this.userAuthorizationFile = file;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java
new file mode 100644
index 0000000..8999105
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java
@@ -0,0 +1,67 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.editattributes;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+
+/**
+ * Validates users against a user authorization file.
+ */
+public class UserValidator {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(UserValidator.class);
+ private static final String USER_AUTH_FILE =
+ SparkyConstants.AUTHORIZED_USERS_FILE_LOCATION;
+
+ private UserAuthorizationReader userAuthorizationReader =
+ new UserAuthorizationReader(new File(USER_AUTH_FILE));
+
+ /**
+ * Returns true if the user is authorized.
+ *
+ * @param userId a user identifier
+ * @return true if the user ID is present in the user authorization file
+ */
+ public boolean isAuthorizedUser(String userId) {
+ if (userId != null && !userId.isEmpty()) {
+ try {
+ List<String> users = userAuthorizationReader.getUsers();
+ return users.contains(userId);
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.USER_AUTHORIZATION_FILE_UNAVAILABLE, userId);
+ return false;
+ }
+ } else {
+ return false;
+ }
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java
new file mode 100644
index 0000000..0e8ce17
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java
@@ -0,0 +1,69 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.editattributes.entity;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * The Class EditRequest.
+ */
+public class EditRequest {
+
+ @JsonProperty("entity-uri")
+ private String entityUri;
+
+ @JsonProperty("entity-type")
+ private String entityType;
+
+ @JsonProperty("attributes")
+ private Map<String, Object> attributes = new HashMap<>();
+
+ public String getEntityUri() {
+ return entityUri;
+ }
+
+ public void setEntityUri(String entityUri) {
+ this.entityUri = entityUri;
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public Map<String, Object> getAttributes() {
+ return attributes;
+ }
+
+ public void setAttributes(Map<String, Object> attributes) {
+ this.attributes = attributes;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java
new file mode 100644
index 0000000..119d680
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java
@@ -0,0 +1,62 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.editattributes.exception;
+
+/**
+ * The Class AttributeUpdateException.
+ */
+public class AttributeUpdateException extends Exception {
+
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * Attribute Edit specific Exception Class.
+ *
+ * @param exc the exc
+ */
+
+ public AttributeUpdateException(Exception exc) {
+ super(exc);
+ }
+
+ /**
+ * Instantiates a new attribute update exception.
+ *
+ * @param message the message
+ */
+ public AttributeUpdateException(String message) {
+ super(message);
+ }
+
+ /**
+ * Instantiates a new attribute update exception.
+ *
+ * @param message the message
+ * @param exc the exc
+ */
+ public AttributeUpdateException(String message, Exception exc) {
+ super(message, exc);
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java
new file mode 100644
index 0000000..a2039b4
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java
@@ -0,0 +1,143 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.inventory;
+
+import javax.json.Json;
+import javax.json.JsonArray;
+import javax.json.JsonArrayBuilder;
+import javax.json.JsonObject;
+import javax.json.JsonObjectBuilder;
+
+/**
+ * The Class EntityHistoryQueryBuilder.
+ */
+public class EntityHistoryQueryBuilder {
+
+ private static final String TABLE = "table";
+ private static final String GRAPH = "graph";
+
+ /**
+ * Gets the query.
+ *
+ * @param type the type
+ * @return the query
+ */
+ public static JsonObject getQuery(String type) {
+ if (type.equalsIgnoreCase(TABLE)) {
+ return createTableQuery();
+ } else if (type.equalsIgnoreCase(GRAPH)) {
+ return createGraphQuery();
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Creates the graph query.
+ *
+ * @return the json object
+ */
+ public static JsonObject createGraphQuery() {
+ JsonObjectBuilder jsonBuilder = Json.createObjectBuilder();
+
+ jsonBuilder.add("aggs",
+ Json.createObjectBuilder().add("group_by_entityType",
+ Json.createObjectBuilder()
+ .add("terms", Json.createObjectBuilder().add("field", "entityType").add("size", 0))
+ .add("aggs", Json.createObjectBuilder().add("group_by_date",
+ Json.createObjectBuilder().add("date_histogram", createDateHistogram())
+ .add("aggs", Json.createObjectBuilder().add("sort_by_date",
+ Json.createObjectBuilder().add("top_hits", createTopHitsBlob())))))));
+ jsonBuilder.add("size", 0);
+
+ return jsonBuilder.build();
+ }
+
+ /**
+ * Creates the table query.
+ *
+ * @return the json object
+ */
+ public static JsonObject createTableQuery() {
+ JsonObjectBuilder jsonBuilder = Json.createObjectBuilder();
+
+ jsonBuilder.add("aggs",
+ Json.createObjectBuilder().add("group_by_entityType",
+ Json.createObjectBuilder()
+ .add("terms", Json.createObjectBuilder().add("field", "entityType").add("size", 0))
+ .add("aggs", Json.createObjectBuilder().add("sort_by_date",
+ Json.createObjectBuilder().add("top_hits", createTopHitsBlob())))));
+ jsonBuilder.add("size", 0);
+
+ return jsonBuilder.build();
+ }
+
+ /**
+ * Creates the date histogram.
+ *
+ * @return the json object
+ */
+ private static JsonObject createDateHistogram() {
+ JsonObjectBuilder jsonBuilder = Json.createObjectBuilder();
+
+ jsonBuilder.add("field", "timestamp");
+ jsonBuilder.add("min_doc_count", 1);
+ jsonBuilder.add("interval", "day");
+ jsonBuilder.add("format", "epoch_millis");
+
+ return jsonBuilder.build();
+ }
+
+ /**
+ * Creates the top hits blob.
+ *
+ * @return the json object
+ */
+ private static JsonObject createTopHitsBlob() {
+ JsonObjectBuilder builder = Json.createObjectBuilder();
+ builder.add("size", 1);
+ builder.add("sort", getSortCriteria());
+ return builder.build();
+ }
+
+ public static JsonArray getSortCriteria() {
+ JsonArrayBuilder jsonBuilder = Json.createArrayBuilder();
+ jsonBuilder.add(Json.createObjectBuilder().add("timestamp",
+ Json.createObjectBuilder().add("order", "desc")));
+
+ return jsonBuilder.build();
+ }
+
+ /**
+ * The main method.
+ *
+ * @param args the arguments
+ */
+ public static void main(String[] args) {
+ System.out.println("TABLE-QUERY: " + createTableQuery().toString());
+ System.out.println("GRAPH_QUERY: " + createGraphQuery().toString());
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java
new file mode 100644
index 0000000..a0e0630
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java
@@ -0,0 +1,180 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.inventory;
+
+import java.io.IOException;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.component.restlet.RestletConstants;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.restlet.Request;
+import org.restlet.Response;
+import org.restlet.data.ClientInfo;
+import org.restlet.data.Form;
+import org.restlet.data.MediaType;
+import org.restlet.data.Parameter;
+import org.restlet.data.Status;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+/**
+ * The Class GeoVisualizationServlet.
+ */
+public class GeoVisualizationProcessor {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(GeoVisualizationProcessor.class);
+
+ private ObjectMapper mapper;
+ private ElasticSearchAdapter elasticSearchAdapter = null;
+ private String topographicalSearchIndexName;
+
+ private static final String SEARCH_STRING = "_search";
+ private static final String SEARCH_PARAMETER = "?filter_path=hits.hits._source&_source=location&size=5000&q=entityType:";
+ private static final String PARAMETER_KEY = "entity";
+
+ /**
+ * Instantiates a new geo visualization processor
+ */
+ public GeoVisualizationProcessor(ElasticSearchAdapter elasticSearchAdapter, String topographicalSearchIndexName) {
+ this.mapper = new ObjectMapper();
+ this.elasticSearchAdapter = elasticSearchAdapter;
+ this.topographicalSearchIndexName = topographicalSearchIndexName;
+ }
+
+ /**
+ * Gets the geo visualization results.
+ *
+ * @param response the response
+ * @param entityType the entity type
+ * @return the geo visualization results
+ * @throws Exception the exception
+ */
+ protected OperationResult getGeoVisualizationResults(Exchange exchange) throws Exception {
+ OperationResult operationResult = new OperationResult();
+
+
+ Object xTransactionId = exchange.getIn().getHeader("X-TransactionId");
+ if (xTransactionId == null) {
+ xTransactionId = NodeUtils.getRandomTxnId();
+ }
+
+ Object partnerName = exchange.getIn().getHeader("X-FromAppId");
+ if (partnerName == null) {
+ partnerName = "Browser";
+ }
+
+ Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class);
+
+ /* Disables automatic Apache Camel Restlet component logging which prints out an undesirable log entry
+ which includes client (e.g. browser) information */
+ request.setLoggable(false);
+
+ ClientInfo clientInfo = request.getClientInfo();
+ MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName, clientInfo.getAddress() + ":" + clientInfo.getPort());
+
+ String entityType = "";
+
+ Form form = request.getResourceRef().getQueryAsForm();
+ for (Parameter parameter : form) {
+ if(PARAMETER_KEY.equals(parameter.getName())) {
+ entityType = parameter.getName();
+ }
+ }
+
+ String api = SEARCH_STRING + SEARCH_PARAMETER + entityType;
+
+ final String requestUrl = elasticSearchAdapter.buildElasticSearchUrlForApi(topographicalSearchIndexName, api);
+
+ try {
+
+ OperationResult opResult =
+ elasticSearchAdapter.doGet(requestUrl, javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE);
+
+ JSONObject finalOutputJson = formatOutput(opResult.getResult());
+
+ Response response = exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class);
+ response.setStatus(Status.SUCCESS_OK);
+ response.setEntity(String.valueOf(finalOutputJson), MediaType.APPLICATION_JSON);
+ exchange.getOut().setBody(response);
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "Error processing Geo Visualization request");
+ }
+
+ return operationResult;
+ }
+
+ /**
+ * Format output.
+ *
+ * @param results the results
+ * @return the JSON object
+ */
+ private JSONObject formatOutput(String results) {
+ JsonNode resultNode = null;
+ JSONObject finalResult = new JSONObject();
+ JSONArray entitiesArr = new JSONArray();
+
+ try {
+ resultNode = mapper.readTree(results);
+
+ final JsonNode hitsNode = resultNode.get("hits").get("hits");
+ if (hitsNode.isArray()) {
+
+ for (final JsonNode arrayNode : hitsNode) {
+ JsonNode sourceNode = arrayNode.get("_source");
+ if (sourceNode.get("location") != null) {
+ JsonNode locationNode = sourceNode.get("location");
+ if (NodeUtils.isNumeric(locationNode.get("lon").asText())
+ && NodeUtils.isNumeric(locationNode.get("lat").asText())) {
+ JSONObject location = new JSONObject();
+ location.put("longitude", locationNode.get("lon").asText());
+ location.put("latitude", locationNode.get("lat").asText());
+
+ entitiesArr.put(location);
+ }
+
+ }
+ }
+ }
+ finalResult.put("plotPoints", entitiesArr);
+
+ } catch (IOException exc) {
+ LOG.warn(AaiUiMsgs.ERROR_BUILDING_SEARCH_RESPONSE, exc.getLocalizedMessage());
+ }
+
+ return finalResult;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java
new file mode 100644
index 0000000..86918ad
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java
@@ -0,0 +1,289 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.inventory.entity;
+
+import java.io.Serializable;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.List;
+
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.sync.entity.IndexDocument;
+import org.onap.aai.sparky.util.NodeUtils;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+/**
+ * The Class GeoIndexDocument.
+ */
+public class GeoIndexDocument implements Serializable, IndexDocument {
+
+ @JsonIgnore
+ private static final long serialVersionUID = -5188479658230319058L;
+
+ protected String entityType;
+ protected String entityPrimaryKeyValue;
+ protected String entityPrimaryKeyName;
+ protected String latitude;
+ protected String longitude;
+ protected String selfLink;
+
+ @JsonIgnore
+ protected OxmEntityLookup oxmEntityLookup;
+
+ @JsonIgnore
+ protected ObjectMapper mapper = new ObjectMapper();
+ // generated, SHA-256 digest
+ @JsonIgnore
+ protected String id;
+
+ /**
+ * Convert bytes to hex string.
+ *
+ * @param bytesToConvert the bytes to convert
+ * @return the string
+ */
+ private static String convertBytesToHexString(byte[] bytesToConvert) {
+ StringBuffer hexString = new StringBuffer();
+ for (int i = 0; i < bytesToConvert.length; i++) {
+ hexString.append(Integer.toHexString(0xFF & bytesToConvert[i]));
+ }
+ return hexString.toString();
+ }
+
+
+ @JsonIgnore
+ public boolean isValidGeoDocument() {
+
+ boolean isValid = true;
+
+ isValid &= (this.getEntityType() != null);
+ isValid &= (this.getLatitude() != null);
+ isValid &= (this.getLongitude() != null);
+ isValid &= (this.getId() != null);
+ isValid &= (this.getSelfLink() != null);
+
+ isValid &= NodeUtils.isNumeric(this.getLatitude());
+ isValid &= NodeUtils.isNumeric(this.getLongitude());
+
+ return isValid;
+ }
+
+ /**
+ * Concat array.
+ *
+ * @param list the list
+ * @param delimiter the delimiter
+ * @return the string
+ */
+ private static String concatArray(List<String> list, char delimiter) {
+
+ if (list == null || list.size() == 0) {
+ return "";
+ }
+
+ StringBuilder result = new StringBuilder(64);
+
+ int listSize = list.size();
+ boolean firstValue = true;
+
+ for (String item : list) {
+
+ if (firstValue) {
+ result.append(item);
+ firstValue = false;
+ } else {
+ result.append(delimiter).append(item);
+ }
+
+ }
+
+ return result.toString();
+
+ }
+
+ /*
+ * We'll try and create a unique identity key that we can use for differencing the previously
+ * imported record sets as we won't have granular control of what is created/removed and when. The
+ * best we can hope for is identification of resources by generated Id until the Identity-Service
+ * UUID is tagged against all resources, then we can use that instead.
+ */
+
+ /**
+ * Generate unique sha digest.
+ *
+ * @param entityType the entity type
+ * @param fieldName the field name
+ * @param fieldValue the field value
+ * @return the string
+ * @throws NoSuchAlgorithmException the no such algorithm exception
+ */
+ public static String generateUniqueShaDigest(String entityType, String fieldName,
+ String fieldValue) throws NoSuchAlgorithmException {
+
+ /*
+ * Basically SHA-256 will result in an identity with a guaranteed uniqueness compared to just a
+ * java hashcode value.
+ */
+ MessageDigest digest = MessageDigest.getInstance("SHA-256");
+ digest.update(String.format("%s.%s.%s", entityType, fieldName, fieldValue).getBytes());
+ return convertBytesToHexString(digest.digest());
+ }
+
+ /**
+ * Instantiates a new geo index document.
+ */
+ public GeoIndexDocument() {}
+
+ /*
+ * (non-Javadoc)
+ *
+ */
+
+ @Override
+ @JsonIgnore
+ public String getAsJson() throws JsonProcessingException {
+
+ if (latitude != null && longitude != null) {
+
+ /**
+ * A valid entry from this class is one that has both lat and long. If one or both is missing
+ * we shouldn't be indexing anything.
+ */
+
+ return NodeUtils.convertObjectToJson(this, true);
+
+ }
+
+ return null;
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields()
+ */
+ @Override
+ public void deriveFields() {
+
+ /*
+ * We'll try and create a unique identity key that we can use for differencing the previously
+ * imported record sets as we won't have granular control of what is created/removed and when.
+ * The best we can hope for is identification of resources by generated Id until the
+ * Identity-Service UUID is tagged against all resources, then we can use that instead.
+ */
+
+ OxmEntityDescriptor descriptor = oxmEntityLookup.getEntityDescriptors().get(entityType);
+ String entityPrimaryKeyName = NodeUtils.concatArray(
+ descriptor.getPrimaryKeyAttributeNames(), "/");
+
+ this.id =
+ NodeUtils.generateUniqueShaDigest(entityType, entityPrimaryKeyName, entityPrimaryKeyValue);
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "TopographicalEntity [" + ("entityType=" + entityType + ", ")
+ + ("entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", ")
+ + ("latitude=" + latitude + ", ") + ("longitude=" + longitude + ", ") + ("ID=" + id + ", ")
+ + ("selfLink=" + selfLink) + "]";
+ }
+
+ @Override
+ @JsonIgnore
+ public String getId() {
+ return this.id;
+ }
+
+ @JsonProperty("entityType")
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ @JsonProperty("entityPrimaryKeyValue")
+ public String getEntityPrimaryKeyValue() {
+ return entityPrimaryKeyValue;
+ }
+
+ public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) {
+ this.entityPrimaryKeyValue = entityPrimaryKeyValue;
+ }
+
+ @JsonProperty("entityPrimaryKeyName")
+ public String getEntityPrimaryKeyName() {
+ return entityPrimaryKeyName;
+ }
+
+ public void setEntityPrimaryKeyName(String entityPrimaryKeyName) {
+ this.entityPrimaryKeyName = entityPrimaryKeyName;
+ }
+
+ @JsonProperty("lat")
+ public String getLatitude() {
+ return latitude;
+ }
+
+ public void setLatitude(String latitude) {
+ this.latitude = latitude;
+ }
+
+ @JsonProperty("long")
+ public String getLongitude() {
+ return longitude;
+ }
+
+ public void setLongitude(String longitude) {
+ this.longitude = longitude;
+ }
+
+ @JsonProperty("link")
+ public String getSelfLink() {
+ return selfLink;
+ }
+
+ public void setSelfLink(String selfLink) {
+ this.selfLink = selfLink;
+ }
+
+ @JsonIgnore
+ public static long getSerialversionuid() {
+ return serialVersionUID;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java
new file mode 100644
index 0000000..ac89c6b
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java
@@ -0,0 +1,219 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.inventory.entity;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.List;
+
+import javax.json.Json;
+import javax.json.JsonObject;
+
+/**
+ * The Class TopographicalEntity.
+ */
+public class TopographicalEntity implements Serializable {
+
+ private static final long serialVersionUID = -5188479658230319058L;
+
+ protected String entityType;
+ protected String entityPrimaryKeyValue;
+ protected String entityPrimaryKeyName;
+ protected String latitude;
+ protected String longitude;
+ protected String selfLink;
+
+ // generated, SHA-256 digest
+ protected String id;
+
+ /**
+ * Convert bytes to hex string.
+ *
+ * @param bytesToConvert the bytes to convert
+ * @return the string
+ */
+ private static String convertBytesToHexString(byte[] bytesToConvert) {
+ StringBuffer hexString = new StringBuffer();
+ for (int i = 0; i < bytesToConvert.length; i++) {
+ hexString.append(Integer.toHexString(0xFF & bytesToConvert[i]));
+ }
+ return hexString.toString();
+ }
+
+ /**
+ * Concat array.
+ *
+ * @param list the list
+ * @param delimiter the delimiter
+ * @return the string
+ */
+ private static String concatArray(List<String> list, char delimiter) {
+
+ if (list == null || list.size() == 0) {
+ return "";
+ }
+
+ StringBuilder result = new StringBuilder(64);
+
+ int listSize = list.size();
+ boolean firstValue = true;
+
+ for (String item : list) {
+
+ if (firstValue) {
+ result.append(item);
+ firstValue = false;
+ } else {
+ result.append(delimiter).append(item);
+ }
+
+ }
+
+ return result.toString();
+
+ }
+
+ /*
+ * We'll try and create a unique identity key that we can use for differencing the previously
+ * imported record sets as we won't have granular control of what is created/removed and when. The
+ * best we can hope for is identification of resources by generated Id until the Identity-Service
+ * UUID is tagged against all resources, then we can use that instead.
+ */
+
+ /**
+ * Generate unique sha digest.
+ *
+ * @param entityType the entity type
+ * @param fieldName the field name
+ * @param fieldValue the field value
+ * @return the string
+ * @throws NoSuchAlgorithmException the no such algorithm exception
+ */
+ public static String generateUniqueShaDigest(String entityType, String fieldName,
+ String fieldValue) throws NoSuchAlgorithmException {
+
+ /*
+ * Basically SHA-256 will result in an identity with a guaranteed uniqueness compared to just a
+ * java hashcode value.
+ */
+ MessageDigest digest = MessageDigest.getInstance("SHA-256");
+ digest.update(String.format("%s.%s.%s", entityType, fieldName, fieldValue).getBytes());
+ return convertBytesToHexString(digest.digest());
+ }
+
+ /**
+ * Instantiates a new topographical entity.
+ */
+ public TopographicalEntity() {}
+
+ /*
+ * (non-Javadoc)
+ *
+ */
+ public String getAsJson() throws IOException {
+
+ JsonObject obj =
+ Json.createObjectBuilder().add("entityType", entityType).add("pkey", entityPrimaryKeyValue)
+ .add("location", Json.createObjectBuilder().add("lat", latitude).add("lon", longitude))
+ .add("selfLink", selfLink).build();
+
+ return obj.toString();
+ }
+
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "TopographicalEntity [" + ("entityType=" + entityType + ", ")
+ + ("entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", ")
+ + ("latitude=" + latitude + ", ") + ("longitude=" + longitude + ", ") + ("ID=" + id + ", ")
+ + ("selfLink=" + selfLink) + "]";
+ }
+
+ public String getId() {
+ return this.id;
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public String getEntityPrimaryKeyValue() {
+ return entityPrimaryKeyValue;
+ }
+
+ public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) {
+ this.entityPrimaryKeyValue = entityPrimaryKeyValue;
+ }
+
+ public String getEntityPrimaryKeyName() {
+ return entityPrimaryKeyName;
+ }
+
+ public void setEntityPrimaryKeyName(String entityPrimaryKeyName) {
+ this.entityPrimaryKeyName = entityPrimaryKeyName;
+ }
+
+ public String getLatitude() {
+ return latitude;
+ }
+
+ public void setLatitude(String latitude) {
+ this.latitude = latitude;
+ }
+
+ public String getLongitude() {
+ return longitude;
+ }
+
+ public void setLongitude(String longitude) {
+ this.longitude = longitude;
+ }
+
+ public String getSelfLink() {
+ return selfLink;
+ }
+
+ public void setSelfLink(String selfLink) {
+ this.selfLink = selfLink;
+ }
+
+ public static long getSerialversionuid() {
+ return serialVersionUID;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java
new file mode 100644
index 0000000..7ae73a1
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java
@@ -0,0 +1,472 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.logging;
+
+import org.onap.aai.cl.eelf.LogMessageEnum;
+
+import com.att.eelf.i18n.EELFResourceManager;
+
+/**
+ * The Enum AaiUiMsgs.
+ */
+public enum AaiUiMsgs implements LogMessageEnum {
+ /** Arguments: {0} = Exception/error. */
+ FAILURE_TO_PROCESS_REQUEST,
+ /** Arguments: {0} = Message and or error body. */
+ FAILED_TO_DETERMINE,
+ /** Arguments: {0} = Exception/error. */
+ UNKNOWN_SERVER_ERROR,
+ /** Arguments: {0} = Message and or error body. */
+ FAILED_TO_ANALYZE,
+ /** Arguments: {0} = Exception/error. */
+ FAILED_TO_GET_NODES_QUERY_RESULT,
+ /** Arguments: {0} = Expected link count, {1} = Actual link count. */
+ UNEXPECTED_NUMBER_OF_LINKS,
+ /** Arguments: {0} = Reason. */
+ DANGLING_NODE_WARNING,
+ /** Arguments: {0} = Node count, {1} = Link count. */
+ VISUALIZATION_GRAPH_OUTPUT,
+ /** Arguments: {0} = JsonNode. */
+ ITEM_TYPE_NULL,
+ /** Arguments: {0} = Filter property. */
+ UNEXPECTED_TOKEN_COUNT,
+ /** Arguments: {0} = Error/exception message. */
+ ADD_SEARCH_TARGET_ATTRIBUTES_FAILED,
+ /** Arguments: {0} = Error/exception message. */
+ NODE_INTEGRITY_OVERLAY_ERROR,
+ /** Arguments: {0} = Node ID. */
+ NODE_INTEGRITY_ALREADY_PROCESSED,
+ /** Arguments: {0} = Node ID. */
+ SKIPPING_PROCESS_NODE_INTEGRITY,
+ /** Arguments: {0} = Error/exception message. */
+ FAILED_TO_PROCESS_NODE_INTEGRITY,
+ /** No argument */
+ MAX_EVALUATION_ATTEMPTS_EXCEEDED,
+ /** Arguments: {0} = Error/exception message. */
+ VISUALIZATION_OUTPUT_ERROR,
+ /** Arguments: {0} = Total resolve time, {1} = Total links retrieved, {2} = Op time. */
+ ALL_TRANSACTIONS_RESOLVED,
+ /** Arguments: {0} = Error/exception message. */
+ PROCESSING_LOOP_INTERUPTED,
+ /** Arguments: {0} = Node ID. */
+ IGNORING_SKELETON_NODE,
+ /** Arguments: {0} = Node count. */
+ OUTSTANDING_WORK_PENDING_NODES,
+ /** Arguments: {0} = Reason. */
+ FAILED_TO_ADD_SKELETON_NODE,
+ /** Arguments: {0} = Reason. */
+ FAILED_TO_PROCESS_SKELETON_NODE,
+ INVALID_RESOLVE_STATE_DURING_INIT,
+ /** Arguments: {0} = Reason. */
+ FAILED_TO_PROCESS_INITIAL_STATE,
+ /** Arguments: {0} = Relationship. */
+ SKIPPING_RELATIONSHIP,
+ /** Arguments: {0} = Failure reason. */
+ FAILED_TO_DETERMINE_NODE_ID,
+ /** Arguments: {0} = Error/exception message. */
+ EXTRACTION_ERROR,
+ /** Arguments: {0} = Error/exception message. */
+ SELF_LINK_NODE_PARSE_ERROR,
+ /** Arguments: {0} = Node ID. */
+ ROOT_NODE_DISCOVERED,
+ /** Arguments: {0} = Error/exception message. */
+ SELF_LINK_PROCESS_NEIGHBORS_ERROR,
+ /** Arguments: {0} = Error/exception message. */
+ SELF_LINK_JSON_PARSE_ERROR,
+ /** Arguments: {0} = Error/exception message. */
+ SELF_LINK_PROCESSING_ERROR,
+ /** Arguments: {0} = Entity type. */
+ UNHANDLED_OBJ_TYPE_FOR_ENTITY_TYPE,
+ /** Arguments: {0} = Attribute group. */
+ ATTRIBUTE_GROUP_FAILURE,
+ /** Arguments: {0} = Situational description, {1} = Exception message. */
+ EXCEPTION_CAUGHT,
+ /** Arguments: {0} = Operation name, {1} = Operation time. */
+ OPERATION_TIME,
+ /** Arguments: {0} = Error message. */
+ SEARCH_SERVLET_ERROR,
+ /** Arguments: {0} = Exception message. */
+ SEARCH_RESPONSE_BUILDING_EXCEPTION,
+ /** Arguments: {0} = Error message, {1} = Error message. */
+ SEARCH_TAG_ANNOTATION_ERROR,
+ /** Arguments: {0} = App type. */
+ QUERY_FAILED_UNHANDLED_APP_TYPE,
+ /** Arguments: {0} = Entity type. */
+ ENTITY_NOT_FOUND_IN_OXM,
+ /** Arguments: {0} = JSON conversion type, {1} = Error thrown. */
+ JSON_CONVERSION_ERROR,
+ /** Arguments: {0} = Node ID */
+ NO_RELATIONSHIP_DISCOVERED,
+ /** No argument */
+ SELF_LINK_NULL_EMPTY_RESPONSE,
+ /** Arguments: {0} = Error message. */
+ SELF_LINK_RELATIONSHIP_LIST_ERROR,
+ /** Arguments: {0} = AIN id, {1} = old depth, {2} = new depth. */
+ ACTIVE_INV_NODE_CHANGE_DEPTH,
+ /** Arguments: {0} = Node ID, {1} = Current state, {2} = New state {3} = Triggering action */
+ ACTIVE_INV_NODE_CHANGE_STATE,
+ /** Arguments: {0} = Current state, {1} = New state {2} = Triggering action */
+ ACTIVE_INV_NODE_CHANGE_STATE_NO_NODE_ID,
+ /** Arguments: {0} = Count Key {1} = Aggregation Key. */
+ AGGREGATION_KEY_ERROR,
+ /** Arguments: {0} Configuration */
+ CONFIGURATION_ERROR,
+ /** Arguments: {0} = Source. */
+ ERROR_PARSING_JSON_PAYLOAD_NONVERBOSE,
+ /** Arguments: {0} = Payload. */
+ ERROR_PARSING_JSON_PAYLOAD_VERBOSE,
+ /** Arguments: {0} = Key {1} = JSON Blob. */
+ ERROR_FETCHING_JSON_VALUE,
+ /** Arguments: {0} = Error. */
+ ERROR_PARSING_PARAMS,
+ /** No argument */
+ INVALID_REQUEST_PARAMS,
+ /** Arguments: {0} = Key. */
+ ERROR_SORTING_VIOLATION_DATA,
+ /** Arguments: {0} = exception */
+ ERROR_SERVLET_PROCESSSING,
+ /** Arguments: {0} = exception */
+ ERROR_BUILDING_RESPONSE_FOR_TABLE_QUERY,
+ /** Arguments: {0} = exception */
+ ERROR_BUILDING_SEARCH_RESPONSE,
+ /** No argument */
+ ERROR_CSP_CONFIG_FILE,
+ /** Arguments: {0} = exception */
+ ERROR_SHUTDOWN_EXECUTORS,
+ /** No argument */
+ ERROR_LOADING_OXM,
+ /** Arguments: {0} = exception */
+ ERROR_GETTING_DATA_FROM_AAI,
+ /** No argument */
+ WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED,
+ /** Arguments: {0} = Entity Type */
+ MISSING_ENTITY_DESCRIPTOR,
+ /** Arguments: {0} = Error */
+ SELF_LINK_GET,
+ /** Arguments: {0} = Error */
+ ES_FAILED_TO_CONSTRUCT_QUERY,
+ /** Arguments: {0} = Error */
+ ES_RETRIEVAL_FAILED,
+ /** Arguments: {0} = Error */
+ ES_LINK_UPSERT,
+ /** Arguments: {0} = Element */
+ ES_SIMPLE_PUT,
+ /** Arguments: {0} = Value {1} = Element {2} = Error */
+ ES_ABORT_CROSS_ENTITY_REF_SYNC,
+ /** Arguments: {0} Return Code */
+ ES_OPERATION_RETURN_CODE,
+ /** Arguments: {0} = Error */
+ ES_CROSS_ENTITY_REF_PUT,
+ /** No argument */
+ ES_CROSS_REF_SYNC_VERSION_CONFLICT,
+ /** Arguments: {0} Result Code {1} = Error */
+ ES_CROSS_REF_SYNC_FAILURE,
+ /** Arguments: {0} = Error */
+ ES_FAILED_TO_CONSTRUCT_URI,
+ /** No argument */
+ ES_RETRIEVAL_FAILED_RESYNC,
+ /** Arguments: {0} = Entity */
+ ES_CROSS_ENTITY_RESYNC_LIMIT,
+ /** Arguments: {0} Entity Name */
+ ES_PKEYVALUE_NULL,
+ /** Arguments: {0} = Error */
+ ES_STORE_FAILURE,
+ /** Arguments: {0} Index Name {1} = Error */
+ ES_PRE_SYNC_FAILURE,
+ /** Arguments: {0} Index Name */
+ ES_SYNC_CLEAN_UP,
+ /** Arguments: {0} Index Name {1} Size before clean up {2} = Size after clean up */
+ ES_SYNC_CLEAN_UP_SIZE,
+ /** Arguments: {0} Index Name {1} Index Type {2} = Size before delete */
+ ES_SYNC_SELECTIVE_DELETE,
+ /** Arguments: {0} Index Name {1} Number of records */
+ ES_BULK_DELETE,
+ /** Arguments: {0} Index name {1} = Error */
+ ES_BULK_DELETE_ERROR,
+ /** Arguments: {0} Type of retrieval {1} Completion Time */
+ COLLECT_TIME_WITH_ERROR,
+ /** Arguments: {0} Type of retrieval {1} Completion Time */
+ COLLECT_TIME_WITH_SUCCESS,
+ /** Arguments: {0} Type of retrieval {1} Number of records */
+ COLLECT_TOTAL,
+ /** Arguments: {0} Number of required fetches */
+ SYNC_NUMBER_REQ_FETCHES,
+ /** Arguments: {0} Number of total fetches {1} Number of available records*/
+ SYNC_NUMBER_TOTAL_FETCHES,
+ /** Arguments: {0} Completion Time */
+ COLLECT_TOTAL_TIME,
+ /** Arguments: {0} = Error */
+ ES_SCROLL_CONTEXT_ERROR,
+ /** No argument */
+ ES_BULK_DELETE_SKIP,
+ /** Arguments: {0} = Number of docs */
+ ES_BULK_DELETE_START,
+ /** No argument */
+ SELF_LINK_CROSS_REF_SYNC,
+ /** Arguments: {0} = message */
+ ERROR_GENERIC,
+ /** Arguments: {0} = error */
+ JSON_PROCESSING_ERROR,
+ /** Arguments: {0} = exception */
+ ERROR_PROCESSING_REQUEST,
+ /** Arguments: {0} = Self Link */
+ SELF_LINK_GET_NO_RESPONSE,
+ /** Arguments: {0} = error */
+ HISTORICAL_COLLECT_ERROR,
+ /** Arguments: {0} = Time */
+ HISTORICAL_ENTITY_COUNT_SUMMARIZER_STARTING,
+ /** No argument */
+ HISTORICAL_ENTITY_COUNT_SUMMARIZER_NOT_STARTED,
+ /** Arguments: {0} = Controller {1} = Time */
+ HISTORICAL_SYNC_DURATION,
+ /** No argument */
+ HISTORICAL_SYNC_PENDING,
+ /** Arguments: {0} = Time */
+ HISTORICAL_SYNC_TO_BEGIN,
+ /** Arguments: {0} = message */
+ DEBUG_GENERIC,
+ /** Arguments: {0} = message */
+ INFO_GENERIC,
+ /** Arguments: {0} = message */
+ WARN_GENERIC,
+ /** Arguments: {0} = context {1} = Exception*/
+ INTERRUPTED,
+ /** Arguments: {0} = Entity Type {1} Entity */
+ GEO_SYNC_IGNORING_ENTITY,
+ /** Arguments: {0} = reason */
+ OXM_LOADING_ERROR,
+ /** Arguments: {0} = type */
+ OXM_FAILED_RETRIEVAL,
+ OXM_FILE_NOT_FOUND,
+ /** No argument */
+ OXM_READ_ERROR_NONVERBOSE,
+ /** Arguments: {0} = OXM File name */
+ OXM_READ_ERROR_VERBOSE,
+ /** No argument */
+ OXM_PARSE_ERROR_NONVERBOSE,
+ /** Arguments: {0} = OXM File name {1} = Exception*/
+ OXM_PARSE_ERROR_VERBOSE,
+ /** Arguments: {0} = Numerical value for loaded OXM version */
+ OXM_LOAD_SUCCESS,
+ /** Arguments: {0} = Entity {1} = Found property-value*/
+ OXM_PROP_DEF_ERR_CROSS_ENTITY_REF,
+ /** Arguments: {0} = Sequence Number */
+ ETAG_RETRY_SEQ,
+ /** Arguments: {0} = Reason */
+ ETAG_WAIT_INTERRUPTION,
+ /** Arguments: {0} = URL {1} = Sequence Number */
+ QUERY_AAI_RETRY_SEQ,
+ /** Arguments: {0} = URL {1} = Sequence Number */
+ QUERY_AAI_RETRY_DONE_SEQ,
+ /** Arguments: {0} = Reason */
+ QUERY_AAI_WAIT_INTERRUPTION,
+ /** Arguments: {0} = URL {1} = Sequence Number */
+ QUERY_AAI_RETRY_FAILURE_WITH_SEQ,
+ /** Arguments: {0} = URL */
+ QUERY_AAI_RETRY_MAXED_OUT,
+ /** Arguments: {0} = Reason */
+ PEGGING_ERROR,
+ /** Arguments: {0} = Key */
+ DATA_CACHE_SUCCESS,
+ /** Arguments: {0} = URL {1} = Sequence Number */
+ EXECUTOR_SERV_EXCEPTION,
+ /** Arguments: {0} = Exception */
+ DISK_CACHE_READ_IO_ERROR,
+ /** Arguments: {0} = Exception */
+ DISK_CREATE_DIR_IO_ERROR,
+ /** Arguments: {0} = Exception */
+ DISK_DATA_WRITE_IO_ERROR,
+ /** Arguments: {0} = Data Item {1} = Exception */
+ DISK_NAMED_DATA_WRITE_IO_ERROR,
+ /** Arguments: {0} = Data Item {1} = Exception */
+ DISK_NAMED_DATA_READ_IO_ERROR,
+ /** No argument */
+ OFFLINE_STORAGE_PATH_ERROR,
+ /** Arguments: {0} = URL {1} = Error */
+ RESTFULL_OP_ERROR_VERBOSE,
+ /** Arguments: {0} = Method {1} = Time {2} = URL {3} = Result Code */
+ RESTFULL_OP_COMPLETE,
+ /** No argument */
+ INITIALIZE_OXM_MODEL_LOADER,
+ /** Arguments: {0} = Exception */
+ AAI_RETRIEVAL_FAILED_GENERIC,
+ /** Arguments: {0} = Self Link */
+ AAI_RETRIEVAL_FAILED_FOR_SELF_LINK,
+ /** Arguments: {0} = Exception */
+ ATTRIBUTES_NOT_UPDATED_EXCEPTION,
+ /** Arguments: {0} = Message */
+ ATTRIBUTES_NOT_UPDATED_MESSAGE,
+ /** Arguments: {0} = Exception */
+ ATTRIBUTES_ERROR_GETTING_AAI_CONFIG_OR_ADAPTER,
+ /** Arguments: {0} = Schema File URI */
+ ATTRIBUTES_ERROR_LOADING_MODEL_VERSION,
+ /** Arguments: {0} = Request URI {1} = Edit Request Body */
+ ATTRIBUTES_HANDLING_EDIT,
+ /** Arguments: {0} = Object URI {1} = Attribute ID {2} Attribute Values */
+ ATTRIBUTES_UPDATE_METHOD_CALLED,
+ /** Arguments: {0} = Attribute ID */
+ ATTRIBUTES_USER_NOT_AUTHORIZED_TO_UPDATE,
+ /** Arguments: {0} = Cookie */
+ COOKIE_FOUND,
+ /** No argument */
+ COOKIE_NOT_FOUND,
+ /** Arguments: {0} = Message */
+ INVALID_REQUEST,
+ /** Arguments: {0} = User ID */
+ USER_AUTHORIZATION_FILE_UNAVAILABLE,
+ /** Arguments: {0} = URL {1} = Cause */
+ INVALID_URL_VERBOSE,
+ /** Arguments: {0} = Row ID */
+ DI_DATA_NOT_FOUND_NONVERBOSE,
+ /** Arguments: {0} = Row ID {1} Attempt count */
+ DI_DATA_NOT_FOUND_VERBOSE,
+ /** Arguments: {0} = Time in ms {1} Status */
+ DI_MS_TIME_FOR_DATA_FETCH,
+ /** Arguments: {0} = Number of Entity Links */
+ ENTITY_SYNC_FAILED_SELFLINK_AMBIGUITY,
+ /** Arguments: {0} = Message */
+ ERROR_EXTRACTING_FROM_RESPONSE,
+ /** No argument */
+ ERROR_LOADING_OXM_SEARCHABLE_ENTITIES,
+ /** Arguments: {0} = Message */
+ ES_SEARCHABLE_ENTITY_SYNC_ERROR,
+ /** Arguments: {0} = Message */
+ FAILED_TO_REGISTER_DUE_TO_NULL,
+ /** Arguments: {0} = File Path */
+ FAILED_TO_RESTORE_TXN_FILE_MISSING,
+ /** Arguments: {0} = Index Name */
+ INDEX_ALREADY_EXISTS,
+ /** Arguments: {0} = Index Name */
+ INDEX_EXISTS,
+ /** Arguments: {0} = Index Name {1} = Operation Result */
+ INDEX_INTEGRITY_CHECK_FAILED,
+ /** Arguments: {0} = Index Name */
+ INDEX_NOT_EXIST,
+ /** Arguments: {0} = Index Name */
+ INDEX_RECREATED,
+ /** Arguments: {0} = Time */
+ SEARCH_ENGINE_SYNC_STARTED,
+ /** Arguments: {0} = Time */
+ SKIP_PERIODIC_SYNC_AS_SYNC_DIDNT_FINISH,
+ /** Arguments: {0} = Message */
+ SYNC_DURATION,
+ /** Arguments: {0} = Entity Type */
+ ENTITY_SYNC_FAILED_DESCRIPTOR_NOT_FOUND,
+ /** Arguments: {0} = AAI Query Result */
+ ENTITY_SYNC_FAILED_DURING_AAI_RESPONSE_CONVERSION,
+ /** Arguments: {0} = Message */
+ ENTITY_SYNC_FAILED_QUERY_ERROR,
+ /** Arguments: {0} = Self Link Query */
+ SELF_LINK_DETERMINATION_FAILED_GENERIC,
+ /** Arguments: {0} = Number of Entity Links */
+ SELF_LINK_DETERMINATION_FAILED_UNEXPECTED_LINKS,
+ /** Arguments: {1} = Query {2} = Operation Result Code {3} = Operation Result */
+ SELF_LINK_RETRIEVAL_FAILED,
+ /** Arguments: {0} = Controller {1} = Synchronizer Current Internal State {2} = New State {3} = Caused By Action */
+ SYNC_INTERNAL_STATE_CHANGED,
+ /** Arguments: {0} = Message */
+ SYNC_INVALID_CONFIG_PARAM,
+ /** Arguments: {0} = Synchronizer Current Internal State */
+ SYNC_NOT_VALID_STATE_DURING_REQUEST,
+ /** No argument */
+ SYNC_SKIPPED_SYNCCONTROLLER_NOT_INITIALIZED,
+ /** No argument */
+ SYNC_START_TIME,
+ /** Arguments: {0} = Controller {1} = Time */
+ SYNC_TO_BEGIN,
+ /** Arguments: {0} = File Path */
+ WILL_RETRIEVE_TXN,
+ /** Arguments: {0} = Configuration file name {1} = Exception */
+ CONFIG_NOT_FOUND_VERBOSE,
+ /** Arguments: {0} = File name */
+ FILE_NOT_FOUND,
+ /** Arguments: {0} = File name */
+ FILE_READ_IN_PROGRESS,
+ ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES,
+ /** Arguments: {0} = Error message */
+ ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR,
+ /** Arguments: {0} = Error message */
+ ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR,
+ /** Arguments: {0} = Error message. */
+ ENTITY_SYNC_SEARCH_TAG_ANNOTATION_FAILED,
+ /** Arguments: {0} = Error message */
+ SEARCH_ADAPTER_ERROR,
+ /** Arguments: {0} = Decoding exception message */
+ UNSUPPORTED_URL_ENCODING,
+ /** Arguments: {0} = Invalid URL */
+ INVALID_REDIRECT_URL,
+ /** Arguments: {0} = Valid login URL */
+ VALID_REDIRECT_URL,
+ /** Arguments: {0} = Query Parameter Self-Link Extraction Error */
+ QUERY_PARAM_EXTRACTION_ERROR,
+ /** Arguments: {0} = Info message */
+ LOGIN_FILTER_INFO,
+ /** Arguments: {0} = Debug message */
+ LOGIN_FILTER_DEBUG,
+ /** Arguments: {0} = URL to extract parameter from */
+ ERROR_REMOVING_URL_PARAM,
+ /** Arguments: {0} = Hash value */
+ ERROR_INVALID_HASH,
+ ERROR_HASH_NOT_FOUND,
+ ERROR_FILTERS_NOT_FOUND,
+ ERROR_READING_HTTP_REQ_PARAMS,
+ /** Arguments: {0} = Exception */
+ ERROR_D3_GRAPH_VISUALIZATION,
+ /** Arguments: {0} = Exception */
+ ERROR_AAI_QUERY_WITH_RETRY,
+ /** Arguments: Error extracting resource path from self-link. Error = {0} */
+ ERROR_EXTRACTING_RESOURCE_PATH_FROM_LINK,
+ /** Arguments: {0} = Schema file location */
+ ERROR_READING_JSON_SCHEMA,
+ /** Arguments: {0} = UI view name */
+ VIEW_NAME_NOT_SUPPORTED,
+ /** Arguments: {0} = response code, {1} = filter name */
+ ERROR_FETCHING_FILTER_VALUES,
+ /** Arguments: {0} = query type, {1} = view name */
+ ERROR_PROCESSING_WIDGET_REQUEST,
+ /** Arguments: {0} = Time in ms */
+ DR_PROCESSING_TIME,
+ /** Arguments: {0} = Response code {1} = payload */
+ DR_PROCESSING_FAILURE,
+ /** Arguments: {0} = request uri */
+ DR_REQUEST_URI_FOR_PROXY_UNKNOWN,
+ /** Arguments: {0} = origin-url {1} = dr-url */
+ DR_PROXY_FROM_TO,
+ /** Arguments: {0} = Exception */
+ URI_DECODING_EXCEPTION,
+ /** Arguments: {0} = Value {1} = Error */
+ ENCRYPTION_ERROR,
+ /** Arguments: {0} = Encrypted value {1} = Error */
+ DECRYPTION_ERROR,
+ /** Arguments: {0} = URI */
+ RESOURCE_NOT_FOUND;
+
+ /**
+ * Static initializer to ensure the resource bundles for this class are loaded...
+ */
+ static {
+ EELFResourceManager.loadMessageBundle("logging/AAIUIMsgs");
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/LoggingUtils.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/LoggingUtils.java
new file mode 100644
index 0000000..04ad83a
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/LoggingUtils.java
@@ -0,0 +1,43 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.logging.util;
+
+/**
+ * The Class LoggingUtils.
+ */
+public class LoggingUtils {
+
+ /**
+ * Sets the duration.
+ *
+ * @param startTime the start time
+ * @param stopTime the stop time
+ * @return the string
+ */
+ public static String setDuration(long startTime, long stopTime) {
+ return String.valueOf(stopTime - startTime);
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java
new file mode 100644
index 0000000..44068a1
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java
@@ -0,0 +1,204 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.logging.util;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.camel.Exchange;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.search.SearchServiceAdapter;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+import org.slf4j.MDC;
+
+/**
+ * The Class ServletUtils.
+ */
+public class ServletUtils {
+
+ /**
+ * Execute get query.
+ *
+ * @param logger the logger
+ * @param search the search
+ * @param response the response
+ * @param requestUrl the request url
+ * @return the operation result
+ * @throws Exception the exception
+ */
+ public static OperationResult executeGetQuery(Logger logger, SearchServiceAdapter search,
+ HttpServletResponse response, String requestUrl) throws Exception {
+
+ OperationResult opResult = search.doGet(requestUrl, "application/json");
+
+ if (opResult.getResultCode() > 300) {
+ setServletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult());
+ } else {
+ response.setStatus(opResult.getResultCode());
+ }
+
+ return opResult;
+
+ }
+
+ /**
+ * Execute post query.
+ *
+ * @param logger the logger
+ * @param search the search
+ * @param response the response
+ * @param requestUrl the request url
+ * @param requestJsonPayload the request json payload
+ * @return the operation result
+ * @throws Exception the exception
+ */
+ public static OperationResult executePostQuery(Logger logger, SearchServiceAdapter search,
+ HttpServletResponse response, String requestUrl, String requestJsonPayload) throws Exception {
+
+ OperationResult opResult = search.doPost(requestUrl, requestJsonPayload, "application/json");
+
+ if (opResult.getResultCode() > 300) {
+ setServletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult());
+
+ } else {
+ response.setStatus(opResult.getResultCode());
+ }
+
+ return opResult;
+ }
+
+ /**
+ * Handle search servlet errors.
+ *
+ * @param logger the logger
+ * @param errorMsg the error msg
+ * @param exc the exc
+ * @param response the response
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ public static void handleSearchServletErrors(Logger logger, String errorMsg, Exception exc,
+ HttpServletResponse response) throws IOException {
+ String errorLogMsg = (exc == null ? errorMsg : errorMsg + ". Error:"
+ + exc.getLocalizedMessage());
+ logger.error(AaiUiMsgs.ERROR_GENERIC, errorLogMsg);
+ response.setContentType("application/json");
+ PrintWriter out = response.getWriter();
+ out.println(generateJsonErrorResponse(errorMsg));
+ out.close();
+ }
+
+ /**
+ * Generate json error response.
+ *
+ * @param message the message
+ * @return the string
+ */
+ public static String generateJsonErrorResponse(String message) {
+ return String.format("{ \"errorMessage\" : %s }", message);
+ }
+
+ /**
+ * Sets the servlet response.
+ *
+ * @param logger the logger
+ * @param isError the is error
+ * @param responseCode the response code
+ * @param response the response
+ * @param postPayload the post payload
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ public static void setServletResponse(Logger logger, boolean isError, int responseCode,
+ HttpServletResponse response, String postPayload) throws IOException {
+
+ if (isError) {
+ logger.error(AaiUiMsgs.ERROR_GENERIC, postPayload);
+ }
+
+ response.setStatus(responseCode);
+
+ if (postPayload != null) {
+ response.setContentType("application/json");
+ PrintWriter out = response.getWriter();
+ out.println(postPayload);
+ out.close();
+ }
+ }
+
+ /**
+ * Gets the full url.
+ *
+ * @param elasticConfig the elastic config
+ * @param resourceUrl the resource url
+ * @return the full url
+ */
+ public static String getFullUrl(String eHost,String ePort, String resourceUrl) {
+ final String host = eHost;
+ final String port = ePort;
+ return String.format("http://%s:%s%s", host, port, resourceUrl);
+ }
+
+ public static void setUpMdcContext(final Exchange exchange, final HttpServletRequest request) {
+
+ String txnId;
+
+ Object xTransactionId = exchange.getIn().getHeader("X-TransactionId");
+ if (xTransactionId == null) {
+ txnId = NodeUtils.getRandomTxnId();
+ } else {
+ txnId = (String) xTransactionId;
+ }
+
+ String fromAppId;
+
+ Object partnerName = exchange.getIn().getHeader("X-FromAppId");
+ if (partnerName == null) {
+ fromAppId = SparkyConstants.APP_NAME;
+ } else {
+ fromAppId = (String) partnerName;
+ }
+
+ MdcContext.initialize(txnId, "AAI-UI", "", fromAppId,
+ request.getRequestURI() + ":" + request.getLocalPort());
+ }
+
+ public static Map<String, List<String>> getTxnHeaders() {
+ Map<String, List<String>> headers = new HashMap<String, List<String>>();
+ headers.put("X-TransactionId", Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID)));
+ headers.put("X-FromAppId", Arrays.asList(MDC.get(MdcContext.MDC_PARTNER_NAME)));
+ return headers;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java
new file mode 100644
index 0000000..4c393e1
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java
@@ -0,0 +1,407 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TreeMap;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.Processor;
+import org.apache.camel.component.restlet.RestletConstants;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.inventory.EntityHistoryQueryBuilder;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.util.RestletUtils;
+import org.restlet.Request;
+import org.restlet.Response;
+import org.restlet.data.ClientInfo;
+import org.restlet.data.MediaType;
+import org.restlet.data.Status;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+
+/**
+ * Receives and processes Entity Count History requests
+ */
+public class EntityCountHistoryProcessor implements Processor {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(EntityCountHistoryProcessor.class);
+
+ private static final long serialVersionUID = 1L;
+
+ private ElasticSearchAdapter elasticSearchAdapter = null;
+ private ObjectMapper mapper;
+
+ private static final String SEARCH_PRETTY_STRING = "_search?pretty";
+ private static final String TYPE = "type";
+ private static final String TABLE = "table";
+ private static final String GRAPH = "graph";
+
+ private List<String> entityTypesToSummarize;
+ private List<String> vnfEntityTypes;
+
+ private String entityCountHistoryIndexName;
+
+ private boolean summarizeVnfs = false;
+
+ private RestletUtils restletUtils = new RestletUtils();
+
+ /**
+ * Instantiates a new Entity Count History
+ */
+
+ public EntityCountHistoryProcessor(ElasticSearchAdapter elasticSearchAdapter,
+ String entityTypesToSummarizeDelimitedList, String vnfEntityTypesDelimitedList, String entityCountHistoryIndexName) {
+
+ this.elasticSearchAdapter = elasticSearchAdapter;
+ this.entityCountHistoryIndexName = entityCountHistoryIndexName;
+
+ entityTypesToSummarize =
+ Arrays.asList(entityTypesToSummarizeDelimitedList.toLowerCase().split("[\\s,]+"));
+
+ vnfEntityTypes =
+ Arrays.asList(vnfEntityTypesDelimitedList.toLowerCase().split("[\\s,]+"));
+
+ summarizeVnfs = vnfEntityTypesDelimitedList.toLowerCase().contains("vnf");
+
+ this.mapper = new ObjectMapper();
+ this.mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
+ }
+
+ /**
+ * Processes a entity count history search request
+ *
+ * @param exchange The Exchange object generated by Apache Camel for the incoming request
+ */
+
+ @Override
+ public void process(Exchange exchange) throws Exception {
+
+ Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class);
+ Response restletResponse =
+ exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class);
+
+ Object xTransactionId = exchange.getIn().getHeader("X-TransactionId");
+ if (xTransactionId == null) {
+ xTransactionId = NodeUtils.getRandomTxnId();
+ }
+
+ Object partnerName = exchange.getIn().getHeader("X-FromAppId");
+ if (partnerName == null) {
+ partnerName = "Browser";
+ }
+
+ /*
+ * Disables automatic Apache Camel Restlet component logging which prints out an undesirable log
+ * entry which includes client (e.g. browser) information
+ */
+ request.setLoggable(false);
+
+ ClientInfo clientInfo = request.getClientInfo();
+ MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName,
+ clientInfo.getAddress() + ":" + clientInfo.getPort());
+
+ String typeParameter = getTypeParameter(exchange);
+
+ if (null != typeParameter && !typeParameter.isEmpty()) {
+ OperationResult operationResult = null;
+
+ try {
+ operationResult = getResults(restletResponse, typeParameter);
+ restletResponse.setEntity(operationResult.getResult(), MediaType.APPLICATION_JSON);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.CONFIGURATION_ERROR, exc.getLocalizedMessage());
+ }
+ } else {
+ LOG.error(AaiUiMsgs.RESOURCE_NOT_FOUND, request.getOriginalRef().toString());
+ String errorMessage =
+ restletUtils.generateJsonErrorResponse("Unsupported request. Resource not found.");
+ restletResponse.setEntity(errorMessage, MediaType.APPLICATION_JSON);
+ restletResponse.setStatus(Status.CLIENT_ERROR_NOT_FOUND);
+ }
+
+ exchange.getOut().setBody(restletResponse);
+ }
+
+
+ /**
+ * Format line graph output
+ *
+ * @param results The results
+ * @return The JSON object
+ * @throws JsonProcessingException The JSON processing exception
+ */
+ public JSONObject formatLineGraphOutput(String results) throws JsonProcessingException {
+ Map<Long, Long> countByDateMap = new HashMap<Long, Long>();
+
+ JsonNode resultNode = null;
+
+ JSONObject finalResult = new JSONObject();
+ JSONArray finalResultArr = new JSONArray();
+
+ try {
+ resultNode = mapper.readTree(results);
+
+ final JsonNode bucketsNode = getBucketsNode(resultNode);
+
+ if (bucketsNode.isArray()) {
+
+ for (final JsonNode entityNode : bucketsNode) {
+ final JsonNode dateBucketNode = entityNode.get("group_by_date").get("buckets");
+ if (dateBucketNode.isArray()) {
+ for (final JsonNode dateBucket : dateBucketNode) {
+ Long date = dateBucket.get("key").asLong();
+ final JsonNode countBucketNode =
+ dateBucket.get("sort_by_date").get("hits").get("hits");
+
+ if (countBucketNode.isArray()) {
+ final JsonNode latestEntityNode = countBucketNode.get(0);
+
+ long currentCount = latestEntityNode.get("_source").get("count").asLong();
+ if (countByDateMap.containsKey(date)) {
+ // add to the value if map already contains this date
+ currentCount += countByDateMap.get(date);
+ }
+
+ countByDateMap.put(date, currentCount);
+ }
+ }
+
+ }
+ }
+ }
+
+ /*
+ * Sort the map by epoch timestamp
+ */
+ Map<Long, Long> sortedMap = new TreeMap<Long, Long>(countByDateMap);
+ for (Entry<Long, Long> entry : sortedMap.entrySet()) {
+ JSONObject dateEntry = new JSONObject();
+ dateEntry.put("date", entry.getKey());
+ dateEntry.put("count", entry.getValue());
+ finalResultArr.put(dateEntry);
+ }
+
+ } catch (Exception exc) {
+ LOG.warn(AaiUiMsgs.ERROR_BUILDING_SEARCH_RESPONSE, exc.getLocalizedMessage());
+ }
+
+ return finalResult.put("result", finalResultArr);
+ }
+
+ /**
+ * Format table output
+ *
+ * @param results The results
+ * @return The JSON object
+ * @throws JsonProcessingException The JSON processing exception
+ */
+ public JSONObject formatTableOutput(String results) throws JsonProcessingException {
+ JsonNode resultNode = null;
+
+ JSONObject finalResult = new JSONObject();
+ JSONArray entitiesArr = new JSONArray();
+
+ Map<String, Long> entityCountInTable = initializeEntityMap();
+
+ long vnfCount = 0;
+
+ try {
+ resultNode = mapper.readTree(results);
+
+ final JsonNode bucketsNode = getBucketsNode(resultNode);
+ if (bucketsNode.isArray()) {
+
+ for (final JsonNode entityNode : bucketsNode) {
+ String entityType = entityNode.get("key").asText();
+ boolean isAVnf = vnfEntityTypes.contains(entityType);
+ long countValue = 0;
+
+ if (isAVnf || entityCountInTable.get(entityType) != null) {
+ final JsonNode hitsBucketNode = entityNode.get("sort_by_date").get("hits").get("hits");
+ if (hitsBucketNode.isArray()) {
+ // the first bucket will be the latest
+ final JsonNode hitNode = hitsBucketNode.get(0);
+
+ countValue = hitNode.get("_source").get("count").asLong();
+
+ /*
+ * Special case: Add all the VNF types together to get aggregate count
+ */
+ if (summarizeVnfs && isAVnf) {
+ vnfCount += countValue;
+ countValue = vnfCount;
+ entityType = "vnf";
+ }
+
+ entityCountInTable.replace(entityType, countValue);
+ }
+ }
+
+ }
+ }
+ for (Entry<String, Long> entry : entityCountInTable.entrySet()) {
+ JSONObject entityType = new JSONObject();
+ entityType.put("key", entry.getKey());
+ entityType.put("doc_count", entry.getValue());
+ entitiesArr.put(entityType);
+ }
+
+ finalResult.put("result", entitiesArr);
+
+ } catch (Exception exc) {
+ LOG.warn(AaiUiMsgs.ERROR_BUILDING_RESPONSE_FOR_TABLE_QUERY, exc.getLocalizedMessage());
+ }
+
+ return finalResult;
+ }
+
+ /**
+ * Gets the results
+ *
+ * @param response The response
+ * @param type The type
+ * @return The results
+ */
+ public OperationResult getResults(Response response, String type) {
+ OperationResult operationResult = new OperationResult();
+
+ String reqPayload = EntityHistoryQueryBuilder.getQuery(type).toString();
+
+ try {
+ final String fullUrlStr = elasticSearchAdapter
+ .buildElasticSearchUrlForApi(entityCountHistoryIndexName, SEARCH_PRETTY_STRING);
+
+ OperationResult opResult = elasticSearchAdapter.doPost(fullUrlStr, reqPayload,
+ javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE);
+
+ JSONObject finalOutput = null;
+ if (type.equalsIgnoreCase(TABLE)) {
+ finalOutput = formatTableOutput(opResult.getResult());
+ } else if (type.equalsIgnoreCase(GRAPH)) {
+ finalOutput = formatLineGraphOutput(opResult.getResult());
+ }
+
+ if (finalOutput != null) {
+ response.setEntity(finalOutput.toString(), MediaType.APPLICATION_JSON);
+ operationResult.setResult(finalOutput.toString());
+ }
+ } catch (JsonProcessingException exc) {
+ restletUtils.handleRestletErrors(LOG, "Unable to map JSONpayload", exc, response);
+ }
+
+ return operationResult;
+ }
+
+ /**
+ * Gets the buckets node
+ *
+ * @param node The node
+ * @return The buckets node
+ * @throws Exception The exception
+ */
+ public JsonNode getBucketsNode(JsonNode node) throws Exception {
+ if (node.get("aggregations").get("group_by_entityType").get("buckets") != null) {
+ return node.get("aggregations").get("group_by_entityType").get("buckets");
+ } else {
+ throw new Exception("Failed to map JSON response");
+ }
+ }
+
+ /**
+ * Initialize entity map
+ *
+ * @return the map
+ */
+ private Map<String, Long> initializeEntityMap() {
+ Map<String, Long> entityMap = new HashMap<String, Long>();
+ for (String entity : entityTypesToSummarize) {
+ entityMap.put(entity, (long) 0);
+ }
+
+ return entityMap;
+ }
+
+ /**
+ * Extracts the "type" query parameter from the request URI
+ *
+ * @param exchange
+ * @return String containing the value of the "type" query parameter of the request. Returns null
+ * if no "type" parameter found
+ */
+ public String getTypeParameter(Exchange exchange) {
+ String typeParameter = null;
+
+ String requestUriParameterString = exchange.getIn().getHeader("CamelHttpQuery", String.class);
+
+ if (null != requestUriParameterString) {
+ String[] requestParameterParts = requestUriParameterString.split("&");
+
+ String[] parameter = requestParameterParts[0].split("=");
+ String currentParameterKey = parameter[0];
+
+ if (null != currentParameterKey && !currentParameterKey.isEmpty()) {
+ // Check if we're looking at the "type" parameter key
+ if (currentParameterKey.equals(TYPE)) {
+ boolean uriIncludesTypeParameterValue =
+ (parameter.length >= 2) && !parameter[1].isEmpty();
+
+ if (uriIncludesTypeParameterValue) {
+ String typeParameterValue = parameter[1];
+
+ // Is the parameter value one that we return data for?
+ if (typeParameterValue.equalsIgnoreCase(TABLE)
+ || typeParameterValue.equalsIgnoreCase(GRAPH)) {
+ typeParameter = typeParameterValue;
+ }
+ }
+ }
+ }
+ }
+
+ return typeParameter;
+ }
+
+
+ public void setRestletUtils(RestletUtils restletUtils) {
+ this.restletUtils = restletUtils;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummary.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummary.java
new file mode 100644
index 0000000..b36753e
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummary.java
@@ -0,0 +1,53 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class EntityTypeSummary {
+ private int totalChartHits;
+ private List<EntityTypeSummaryBucket> buckets = new ArrayList<>();
+
+ public int getTotalChartHits() {
+ return totalChartHits;
+ }
+
+ public List<EntityTypeSummaryBucket> getBuckets() {
+ return buckets;
+ }
+
+ public void setTotalChartHits(int totalChartHits) {
+ this.totalChartHits = totalChartHits;
+ }
+
+ public void setBuckets(List<EntityTypeSummaryBucket> buckets) {
+ this.buckets = buckets;
+ }
+
+ public void addBucket(EntityTypeSummaryBucket bucket) {
+ this.buckets.add(bucket);
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummaryBucket.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummaryBucket.java
new file mode 100644
index 0000000..9568232
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummaryBucket.java
@@ -0,0 +1,46 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search;
+
+public class EntityTypeSummaryBucket {
+ private int count;
+ private String key;
+
+ public int getCount() {
+ return count;
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public void setCount(int count) {
+ this.count = count;
+ }
+
+ public void setKey(String key) {
+ this.key = key;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchEntityProperties.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchEntityProperties.java
new file mode 100644
index 0000000..c790bb1
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchEntityProperties.java
@@ -0,0 +1,49 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class SearchEntityProperties {
+ private String type;
+ private Map<String, String> fields = new HashMap<>();
+
+ public String getType() {
+ return type;
+ }
+
+ public Map<String, String> getFields() {
+ return fields;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public void setFields(Map<String, String> field) {
+ this.fields = field;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchResponse.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchResponse.java
new file mode 100644
index 0000000..201c154
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchResponse.java
@@ -0,0 +1,102 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.aai.sparky.search.entity.SearchSuggestion;
+
+/**
+ * The Class SearchResponse.
+ */
+public class SearchResponse {
+
+ private long processingTimeInMs;
+ private int totalFound;
+
+ private List<SearchSuggestion> suggestions;
+
+ /**
+ * Instantiates a new search response.
+ */
+ public SearchResponse() {
+ this.suggestions = new ArrayList<SearchSuggestion>();
+ this.processingTimeInMs = 0;
+ this.totalFound = 0;
+ }
+
+ public long getProcessingTimeInMs() {
+ return processingTimeInMs;
+ }
+
+ public void setProcessingTimeInMs(long processingTimeInMs) {
+ this.processingTimeInMs = processingTimeInMs;
+ }
+
+ public int getTotalFound() {
+ return totalFound;
+ }
+
+ public void setTotalFound(int totalFound) {
+ this.totalFound = totalFound;
+ }
+
+ public List<SearchSuggestion> getSuggestions() {
+ return suggestions;
+ }
+
+ public void setSuggestions(List<SearchSuggestion> suggestions) {
+ this.suggestions = suggestions;
+ }
+
+ /**
+ * Adds the entity entry.
+ *
+ * @param suggestionEntry that will be converted to JSON
+ */
+ public void addSuggestion(SearchSuggestion suggestionEntity){
+ suggestions.add(suggestionEntity);
+ }
+
+ /**
+ * Increments the total number of hits for this SearchResponse by
+ * the value passed in.
+ *
+ * @param additionalCount - Count to increment the total found
+ */
+ public void addToTotalFound(int additionalCount) {
+ totalFound += additionalCount;
+ }
+
+ @Override
+ public String toString() {
+ return "SearchResponse [processingTimeInMs=" + processingTimeInMs + ", totalFound=" + totalFound
+ + ", " + (suggestions != null ? "suggestions=" + suggestions : "") + "]";
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchServiceAdapter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchServiceAdapter.java
new file mode 100644
index 0000000..d37997a
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchServiceAdapter.java
@@ -0,0 +1,139 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.Headers;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.restclient.client.RestClient;
+import org.onap.aai.sparky.dal.rest.RestClientFactory;
+import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig;
+import org.slf4j.MDC;
+
+
+/**
+ * The Class SearchServiceAdapter.
+ */
+public class SearchServiceAdapter {
+
+ private static final String VALUE_QUERY = "query";
+
+ private RestClient client;
+ private RestEndpointConfig endpointConfig;
+ private String serviceApiVersion;
+
+ private Map<String, List<String>> commonHeaders;
+
+ /**
+ * Instantiates a new search adapter.
+ * @throws Exception
+ */
+ public SearchServiceAdapter(RestEndpointConfig endpointConfig, String serviceApiVersion) throws Exception {
+
+ client = RestClientFactory.buildClient(endpointConfig);
+
+ commonHeaders = new HashMap<String, List<String>>();
+ commonHeaders.put("Accept", Arrays.asList("application/json"));
+ commonHeaders.put(Headers.FROM_APP_ID, Arrays.asList("AAI-UI"));
+
+ this.serviceApiVersion = serviceApiVersion;
+ this.endpointConfig = endpointConfig;
+ }
+
+ public String getServiceApiVersion() {
+ return serviceApiVersion;
+ }
+
+ public void setServiceApiVersion(String serviceApiVersion) {
+ this.serviceApiVersion = serviceApiVersion;
+ }
+
+ public RestEndpointConfig getEndpointConfig() {
+ return endpointConfig;
+ }
+
+ public void setEndpointConfig(RestEndpointConfig endpointConfig) {
+ this.endpointConfig = endpointConfig;
+ }
+
+ public OperationResult doPost(String url, String jsonPayload, String acceptContentType) {
+ OperationResult or = client.post(url, jsonPayload, getTxnHeader(),
+ MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE);
+ return new OperationResult(or.getResultCode(), or.getResult());
+ }
+
+ public OperationResult doGet(String url, String acceptContentType) {
+ OperationResult or =
+ client.get(url, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE);
+ return new OperationResult(or.getResultCode(), or.getResult());
+ }
+
+ public OperationResult doPut(String url, String payload, String acceptContentType) {
+ OperationResult or = client.put(url, payload, getTxnHeader(),
+ MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE);
+ return new OperationResult(or.getResultCode(), or.getResult());
+ }
+
+ public OperationResult doDelete(String url, String acceptContentType) {
+
+ OperationResult or =
+ client.delete(url, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE);
+ return new OperationResult(or.getResultCode(), or.getResult());
+ }
+
+ public Map<String, List<String>> getTxnHeader() {
+ HashMap<String, List<String>> headers = new HashMap<String, List<String>>();
+ headers.putAll(this.commonHeaders);
+ headers.put("X-TransactionId", Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID)));
+ headers.put("X-FromAppId", Arrays.asList(MDC.get(MdcContext.MDC_PARTNER_NAME)));
+ return headers;
+ }
+
+ /**
+ * Get Full URL for search
+ *
+ * @param api the api
+ * @param indexName
+ * @return the full url
+ */
+ public String buildSearchServiceQueryUrl(String indexName) {
+ return buildSearchServiceUrlForApi(indexName, VALUE_QUERY);
+ }
+
+ public String buildSearchServiceUrlForApi(String indexName, String api) {
+ return String.format("https://%s:%s/services/search-data-service/%s/search/indexes/%s/%s",
+ endpointConfig.getEndpointIpAddress(), endpointConfig.getEndpointServerPort(),
+ serviceApiVersion, indexName, api);
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java
new file mode 100644
index 0000000..dfe9016
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java
@@ -0,0 +1,188 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.TreeMap;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.camel.Exchange;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.logging.util.ServletUtils;
+import org.onap.aai.sparky.search.api.SearchProvider;
+import org.onap.aai.sparky.search.entity.QuerySearchEntity;
+import org.onap.aai.sparky.search.entity.SearchSuggestion;
+import org.onap.aai.sparky.search.registry.SearchProviderRegistry;
+import org.onap.aai.sparky.util.NodeUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class UnifiedSearchProcessor {
+
+ protected static final String HASH_ID_KEY = "hashId";
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(UnifiedSearchProcessor.class);
+
+ protected SearchProviderRegistry searchProviderRegistry;
+ protected ObjectMapper mapper;
+ protected boolean useOrderedSearchProviderKeys;
+
+ public UnifiedSearchProcessor() {
+ mapper = new ObjectMapper();
+ this.useOrderedSearchProviderKeys = false;
+ }
+
+ public boolean isUseOrderedSearchProviderKeys() {
+ return useOrderedSearchProviderKeys;
+ }
+
+ public void setUseOrderedSearchProviderKeys(boolean useOrderedSearchProviderKeys) {
+ this.useOrderedSearchProviderKeys = useOrderedSearchProviderKeys;
+ }
+
+ public void search(Exchange exchange) {
+ HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class);
+ ServletUtils.setUpMdcContext(exchange, request);
+
+ SearchResponse searchResponse = new SearchResponse();
+ long processTime = System.currentTimeMillis();
+ int totalAdded = 0;
+
+ try {
+
+ String payload = exchange.getIn().getBody(String.class);
+
+ if (payload == null || payload.isEmpty()) {
+
+ LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, "Request Payload is empty");
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 404);
+ return;
+
+ } else {
+
+ QuerySearchEntity searchRequest = mapper.readValue(payload, QuerySearchEntity.class);
+ int maxResultsPerSearch = Integer.valueOf(searchRequest.getMaxResults());
+
+ Map<String, List<SearchSuggestion>> searchProviderSuggestions =
+ new HashMap<String, List<SearchSuggestion>>();
+
+ int totalSuggestionsFromProviders = 0;
+ List<SearchSuggestion> suggestions = null;
+ for (SearchProvider searchProvider : searchProviderRegistry.getSearchProviders()) {
+ suggestions = searchProvider.search(searchRequest);
+ totalSuggestionsFromProviders += suggestions.size();
+ searchProviderSuggestions.put(searchProvider.getClass().getCanonicalName(), suggestions);
+ }
+
+ /*
+ * Using ordered search provider keys allows us to deterministically calculate how many
+ * results from each provider should be returned. At the moment, this behavior is primarily
+ * only beneficial to test classes. As there is a cost to sorted-collections in the call
+ * processing path, this behavior has been made optional.
+ */
+
+ if (useOrderedSearchProviderKeys) {
+ searchProviderSuggestions =
+ new TreeMap<String, List<SearchSuggestion>>(searchProviderSuggestions);
+ }
+
+ if (totalSuggestionsFromProviders > 0) {
+
+ int suggestionIndex = 0;
+
+ Set<Entry<String, List<SearchSuggestion>>> searchProviderResults =
+ searchProviderSuggestions.entrySet();
+
+ while (totalAdded < maxResultsPerSearch && (totalAdded < totalSuggestionsFromProviders)) {
+
+ for (Entry<String, List<SearchSuggestion>> searchProviderResultList : searchProviderResults) {
+
+ if ((suggestionIndex <= (searchProviderResultList.getValue().size() - 1))) {
+
+ if (totalAdded < maxResultsPerSearch) {
+ searchResponse
+ .addSuggestion(searchProviderResultList.getValue().get(suggestionIndex));
+ totalAdded++;
+ }
+ }
+
+ }
+
+ suggestionIndex++;
+
+ }
+
+ }
+
+ }
+
+ searchResponse.addToTotalFound(totalAdded);
+
+ processTime = System.currentTimeMillis() - processTime;
+ searchResponse.setProcessingTimeInMs(processTime);
+ String searchResponseJson = NodeUtils.convertObjectToJson(searchResponse, true);
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 200);
+ exchange.getOut().setBody(searchResponseJson);
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_PROCESSING_REQUEST, exc);
+
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 500);
+ exchange.getOut().setBody(
+ ServletUtils.generateJsonErrorResponse("Processing error = " + exc.getMessage()),
+ String.class);
+
+ } finally {
+ /*
+ * Restore the txnId + appId from the current thread local via the MdcContext
+ */
+
+ ServletUtils.getTxnHeaders().forEach((key, value) -> {
+ exchange.getOut().setHeader(key, value);
+ });
+
+ exchange.getOut().setHeader("RequestUrl", request.getRequestURI());
+ exchange.getOut().setHeader("RequestPort", request.getLocalPort());
+
+ }
+ }
+
+ public SearchProviderRegistry getSearchProviderRegistry() {
+ return searchProviderRegistry;
+ }
+
+ public void setSearchProviderRegistry(SearchProviderRegistry searchProviderRegistry) {
+ this.searchProviderRegistry = searchProviderRegistry;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java
new file mode 100644
index 0000000..923c4d6
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java
@@ -0,0 +1,36 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.api;
+
+import java.util.List;
+
+import org.onap.aai.sparky.search.entity.QuerySearchEntity;
+import org.onap.aai.sparky.search.entity.SearchSuggestion;
+
+public interface SearchProvider {
+
+ List<SearchSuggestion> search(QuerySearchEntity queryRequest);
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java
new file mode 100644
index 0000000..070f305
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java
@@ -0,0 +1,76 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.config;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+public class SuggestionConfig {
+
+
+ private Map<String, String> pairingList;
+ private Collection<String> stopWords;
+ private String defaultPairingValue;
+ private String ViSuggestionRoute;
+
+
+ public SuggestionConfig() {}
+
+
+ public Collection<String> getStopWords() {
+ return stopWords;
+ }
+
+ public void setStopWords(Collection<String> stopWords) {
+ this.stopWords = stopWords;
+ }
+
+ public Map<String, String> getPairingList() {
+ return pairingList;
+ }
+
+ public void setPairingList(HashMap<String, String> pairingList) {
+ this.pairingList = pairingList;
+ }
+
+ public String getDefaultPairingValue() {
+ return defaultPairingValue;
+ }
+
+ public void setDefaultPairingValue(String defaultPairingValue) {
+ this.defaultPairingValue = defaultPairingValue;
+ }
+
+ public String getViSuggestionRoute() {
+ return ViSuggestionRoute;
+ }
+
+ public void setViSuggestionRoute(String ViSuggestionRoute) {
+ this.ViSuggestionRoute = ViSuggestionRoute;
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java
new file mode 100644
index 0000000..37059fb
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java
@@ -0,0 +1,73 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.entity;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+
+/**
+ * The Class ViewAndInspectSearchRequest.
+ */
+public class QuerySearchEntity {
+
+ private static final String DEFAULT_MAX_RESULTS = "10";
+ public String maxResults;
+ public String queryStr;
+
+ /**
+ * Instantiates a new view and inspect search request.
+ */
+ public QuerySearchEntity() {
+ maxResults = DEFAULT_MAX_RESULTS;
+ queryStr = null;
+ }
+
+ public String getMaxResults() {
+ return maxResults;
+ }
+
+ public void setMaxResults(String maxResults) {
+ this.maxResults = maxResults;
+ }
+
+ public String getQueryStr() {
+ return queryStr;
+ }
+
+ public void setQueryStr(String queryStr) {
+ this.queryStr = queryStr;
+ }
+
+ @JsonIgnore
+ public String[] getSearchTerms() {
+
+ if (queryStr == null) {
+ return null;
+ }
+
+ return queryStr.split(" ");
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java
new file mode 100644
index 0000000..4529af5
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java
@@ -0,0 +1,39 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.entity;
+
+public interface SearchSuggestion {
+ public String getHashId();
+
+ public void setHashId(String hashId);
+
+ public String getRoute();
+
+ public void setRoute(String route);
+
+ public String getText();
+
+ public void setText(String searchText);
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java
new file mode 100644
index 0000000..3a01e2b
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java
@@ -0,0 +1,119 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.json.JsonObject;
+import javax.ws.rs.core.MediaType;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.search.filters.config.UiFilterDataSourceConfig;
+import org.onap.aai.sparky.search.filters.entity.UiFilterEntity;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+
+
+/**
+ * Performs all Elasticsearch related queries for filters related to
+ * the Sparky-FE.
+ *
+ * @author RICHARV
+ */
+public class FilterElasticSearchAdapter {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(FilterElasticSearchAdapter.class);
+ private static final String AGGS = "aggregations";
+ private static final String CONTAINER = "default";
+ private static final String BUCKETS = "buckets";
+ private static final String FILTER_VALUE_KEY = "key";
+ private ElasticSearchAdapter elasticSearchAdapter;
+
+ public FilterElasticSearchAdapter(ElasticSearchAdapter elasticSearchAdapter) {
+ this.elasticSearchAdapter = elasticSearchAdapter;
+ }
+
+ /**
+ * For a given UiFilterEntity, will attempt to contact an Elasticsearch instance
+ * and fetch all possible values for filter's field name.
+ *
+ * @param filter - Filter object against which the search will take place.
+ * @param sourceData - If present, contains the index name and field value to search against.
+ * @return - A List of strings if results were found, else empty list.
+ */
+ public List<String> fetchValuesForFilter(UiFilterEntity filter, UiFilterDataSourceConfig dataSourceConfig) {
+ ArrayList<String> filterValues = new ArrayList<String>();
+
+ if(dataSourceConfig != null) {
+ JsonObject filterValueQuery = null;
+ if(dataSourceConfig.getPathToField() != null) {
+ filterValueQuery = FilterQueryBuilder.createNestedFilterValueQueryObject(dataSourceConfig.getFieldName(), dataSourceConfig.getPathToField());
+ } else {
+ filterValueQuery = FilterQueryBuilder.createFilterValueQueryObject(dataSourceConfig.getFieldName());
+ }
+
+ OperationResult opResult = elasticSearchAdapter.doPost(
+ elasticSearchAdapter.buildElasticSearchUrlForApi(dataSourceConfig.getIndexName(),
+ SparkyConstants.ES_SEARCH_API),
+ filterValueQuery.toString(), MediaType.APPLICATION_JSON_TYPE);
+
+ String result = opResult.getResult();
+ if(opResult.wasSuccessful() && result != null) {
+ JSONObject responseJson = new JSONObject(result);
+ JSONObject aggJson = responseJson.getJSONObject(AGGS);
+
+ JSONObject containerJson = null;
+ if(dataSourceConfig.getPathToField() != null) {
+ JSONObject nestedContainer = aggJson.getJSONObject(dataSourceConfig.getPathToField());
+ containerJson = nestedContainer.getJSONObject(dataSourceConfig.getFieldName());
+ } else {
+ containerJson = aggJson.getJSONObject(CONTAINER);
+ }
+
+ JSONArray buckets = containerJson.getJSONArray(BUCKETS);
+
+ int bucketLength = buckets.length();
+ for(int i = 0; i < bucketLength; i++) {
+ JSONObject filterBucket = buckets.getJSONObject(i);
+
+ String filterValue = filterBucket.getString(FILTER_VALUE_KEY);
+ if(filterValue != null && !filterValue.isEmpty()) {
+ filterValues.add(filterValue);
+ }
+ }
+ } else {
+ LOG.error(AaiUiMsgs.ERROR_FETCHING_FILTER_VALUES, String.valueOf(opResult.getResultCode()), filter.getFilterName());
+ }
+ }
+ filterValues.sort(String::compareToIgnoreCase);
+ return filterValues;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java
new file mode 100644
index 0000000..9d404a9
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java
@@ -0,0 +1,144 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.json.JsonObject;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.camel.Exchange;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.logging.util.ServletUtils;
+import org.onap.aai.sparky.search.filters.entity.UiFilterEntity;
+import org.onap.aai.sparky.search.filters.entity.UiFiltersEntity;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class FilterProcessor {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(FilterProcessor.class);
+
+ private ObjectMapper mapper;
+ private FilteredSearchHelper filteredSearchHelper;
+
+ public FilterProcessor() {
+ this.mapper = new ObjectMapper();
+ }
+
+ public ObjectMapper getMapper() {
+ return mapper;
+ }
+
+ public FilteredSearchHelper getFilteredSearchHelper() {
+ return filteredSearchHelper;
+ }
+
+ public void setFilteredSearchHelper(FilteredSearchHelper filteredSearchHelper) {
+ this.filteredSearchHelper = filteredSearchHelper;
+ }
+
+ public void getFiltersWithValues(Exchange exchange) {
+
+ HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class);
+ ServletUtils.setUpMdcContext(exchange, request);
+
+ UiFiltersEntity viewFiltersList = null;
+ boolean wasErrorDuringFilterDiscovery = false;
+
+ try {
+ String payload = exchange.getIn().getBody(String.class);
+
+ if (payload == null || payload.isEmpty()) {
+ /* Don't throw back an error, just return an empty set */
+ LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, "Request Payload is empty");
+ wasErrorDuringFilterDiscovery = true;
+ } else {
+ String viewName = mapper.readValue(payload, JsonNode.class).get(SparkyConstants.UI_FILTER_VIEW_NAME_PARAMETER).asText();
+
+ if (viewName == null || viewName.isEmpty()) {
+ wasErrorDuringFilterDiscovery = true;
+ } else {
+ viewFiltersList = filteredSearchHelper.doFilterDiscovery(viewName);
+ }
+ }
+ } catch(Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "FilterProcessor failed to get filter list due to error = " + exc.getMessage());
+
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 500);
+
+ exchange.getOut().setBody(
+ ServletUtils.generateJsonErrorResponse("FilterProcessor failed to get filter list due to error = " + exc.getMessage()),
+ String.class);
+
+ return;
+ }
+
+ boolean wasErrorDuringValueSearch = false;
+ if(!wasErrorDuringFilterDiscovery) {
+ try {
+ if(!viewFiltersList.getFilters().isEmpty()) {
+ List<String> filterIds = new ArrayList<String>();
+
+ for(UiFilterEntity filterEntity : viewFiltersList.getFilters()) {
+ filterIds.add(filterEntity.getFilterId());
+ }
+
+ UiFiltersEntity responseFiltersList = filteredSearchHelper.doFilterEnumeration(filterIds);
+
+ JsonObject finalResponse = UiFiltersEntityConverter.convertUiFiltersEntityToUnifiedFilterResponse(responseFiltersList);
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 200);
+ exchange.getOut().setBody(finalResponse.toString());
+ } else {
+ wasErrorDuringValueSearch = true;
+ }
+ } catch(Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "FilterProcessor failed to generate valid unifiedFilterRequest response due to error, " + exc.getMessage());
+
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 500);
+
+ exchange.getOut().setBody(
+ ServletUtils.generateJsonErrorResponse("FilterProcessor failed to generate valid unifiedFilterRequest response due to error = " + exc.getMessage()),
+ String.class);
+
+ return;
+ }
+ }
+
+ // In the case of an error we want to respond with a valid empty response
+ if(wasErrorDuringFilterDiscovery || wasErrorDuringValueSearch) {
+ //response.setStatus(Status.SUCCESS_OK);
+ //response.setEntity(UiFiltersEntityConverter.generateEmptyResponse().toString(), MediaType.APPLICATION_JSON);
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 404);
+ exchange.getOut().setBody(UiFiltersEntityConverter.generateEmptyResponse().toString());
+ }
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterQueryBuilder.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterQueryBuilder.java
new file mode 100644
index 0000000..41a7b91
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterQueryBuilder.java
@@ -0,0 +1,218 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.json.Json;
+import javax.json.JsonArrayBuilder;
+import javax.json.JsonObject;
+import javax.json.JsonObjectBuilder;
+
+import org.onap.aai.sparky.search.filters.config.FiltersConfig;
+import org.onap.aai.sparky.search.filters.config.UiFilterConfig;
+import org.onap.aai.sparky.search.filters.entity.AggregationEntity;
+import org.onap.aai.sparky.search.filters.entity.BoolQueryBuilder;
+import org.onap.aai.sparky.search.filters.entity.FilteredAggregationQueryBuilder;
+import org.onap.aai.sparky.search.filters.entity.MatchFilterCriteriaEntity;
+import org.onap.aai.sparky.search.filters.entity.SearchFilter;
+
+/**
+ * Used to generate queries against Elasticsearch for filter related queries.
+ */
+public class FilterQueryBuilder {
+
+ private static final int EXISTING_FILTERS_LIMIT = 0;
+ private static final int SHOULD_BRANCH_LIMIT = 2;
+
+ public static JsonObject createFilteredBoolQueryObject(FiltersConfig filtersConfig, List<SearchFilter> searchFilters, int minShouldMatch, List<String> fields) {
+
+ if (searchFilters == null || searchFilters.size() == 0) {
+ return null;
+ }
+
+ int searchFilterValueSize = 0;
+
+ BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
+
+ for (SearchFilter searchFilter : searchFilters) {
+
+ searchFilterValueSize = searchFilter.getValues().size();
+
+ /*
+ * translate the filter-id into the filter-name from the oxm data model/config file
+ */
+ UiFilterConfig filter = filtersConfig.getFilterById(searchFilter.getFilterId());
+
+ if (filter == null || filter.getFilterName() == null) {
+ // log error and continue
+ } else {
+
+ String fieldName = filter.getDataSource().getFieldName();
+ if(!fields.contains(fieldName)) {
+ fields.add(fieldName);
+ }
+
+ if (searchFilterValueSize >= SHOULD_BRANCH_LIMIT) {
+ // Add should branches
+ for (String filterValue : searchFilter.getValues()) {
+ boolQueryBuilder.addShouldFilter(new MatchFilterCriteriaEntity(fieldName, filterValue));
+ }
+
+ } else if (searchFilterValueSize > EXISTING_FILTERS_LIMIT) {
+ // Add must branch
+ for (String filterValue : searchFilter.getValues()) {
+ boolQueryBuilder.addMustFilter(new MatchFilterCriteriaEntity(fieldName, filterValue));
+ }
+ }
+ }
+ }
+
+ boolQueryBuilder.setMinShouldMatch(minShouldMatch);
+
+ return boolQueryBuilder.getJsonObject();
+ }
+
+ public static JsonObject createAggregationQueryArray(FiltersConfig filtersConfig, List<SearchFilter> searchFilters) {
+
+ if (searchFilters == null || searchFilters.size() == 0) {
+ // log error
+ return null;
+ }
+
+ FilteredAggregationQueryBuilder aggQueryBuilder = new FilteredAggregationQueryBuilder();
+
+ for (SearchFilter searchFilter : searchFilters) {
+
+ /*
+ * translate the filter-id into the filter-name from the oxm data model/config file
+ */
+ UiFilterConfig filter = filtersConfig.getFilterById(searchFilter.getFilterId());
+
+ if (filter == null || filter.getFilterName() == null) {
+ // log error and continue
+ } else {
+ String fieldName = filter.getDataSource().getFieldName();
+ aggQueryBuilder.addAggregationEntity(new AggregationEntity(fieldName, fieldName, 0));
+ }
+
+ }
+
+ return aggQueryBuilder.getJsonObject();
+ }
+
+ public static JsonObject createCombinedBoolAndAggQuery(FiltersConfig filtersConfig, List<SearchFilter> searchFilters, int minShouldMatch) {
+ JsonObjectBuilder wrappedQueryBuilder = Json.createObjectBuilder();
+ if(searchFilters != null) {
+ List<String> fields = new ArrayList<String>();
+ JsonObject boolQuery = createFilteredBoolQueryObject(filtersConfig,searchFilters, minShouldMatch, fields);
+ JsonObject aggQuery = createAggregationQueryArray(filtersConfig, searchFilters);
+
+ if (boolQuery != null) {
+ wrappedQueryBuilder.add("size", 0);
+
+ JsonArrayBuilder filedsArrayBuilder = Json.createBuilderFactory(null).createArrayBuilder(); // TODO -> Should we use a class instance factory?
+ for(String field : fields) {
+ filedsArrayBuilder.add(field);
+ }
+ wrappedQueryBuilder.add("fields", filedsArrayBuilder.build());
+
+ wrappedQueryBuilder.add("query", boolQuery);
+ }
+
+ if (aggQuery != null) {
+ wrappedQueryBuilder.add("aggs", aggQuery);
+ }
+ }
+ return wrappedQueryBuilder.build();
+ }
+
+ public static JsonObject createFilterValueQueryObject(String fieldValue) {
+ JsonObjectBuilder jsonBuilder = Json.createObjectBuilder();
+ jsonBuilder.add("size", "0"); // avoid source data
+ buildZeroTermSummaryQuery(jsonBuilder, fieldValue);
+
+ return jsonBuilder.build();
+ }
+
+ public static JsonObject createNestedFilterValueQueryObject(String fieldValue,
+ String pathToField) {
+ JsonObjectBuilder jsonBuilder = Json.createObjectBuilder();
+ jsonBuilder.add("size", "0"); // avoid source data
+ generateNestedAggregations(jsonBuilder, fieldValue, pathToField);
+
+ return jsonBuilder.build();
+ }
+
+ public static void buildZeroTermSummaryQuery(JsonObjectBuilder jsonBuilder, String fieldValue) {
+ JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder();
+ getSummaryAggsBlob(aggsBlobBuilder, fieldValue, 0);
+ jsonBuilder.add("aggs", aggsBlobBuilder.build());
+ }
+
+ public static void getSummaryAggsBlob(JsonObjectBuilder aggsBlobBuilder, String fieldValue,
+ int resultSize) {
+ JsonObjectBuilder fieldBuilder =
+ Json.createObjectBuilder().add("field", fieldValue).add("size", resultSize);
+ JsonObject aggsFieldBlob = fieldBuilder.build();
+ JsonObjectBuilder defaultBlobBuilder = Json.createObjectBuilder().add("terms", aggsFieldBlob);
+ JsonObject defaultBlob = defaultBlobBuilder.build();
+ aggsBlobBuilder.add("default", defaultBlob);
+ }
+
+ public static void addNestedSummaryAggsBlob(JsonObjectBuilder nestedAggsBuilder,
+ String containerValue, String fieldValue, int resultSize) {
+ JsonObjectBuilder fieldBuilder = Json.createObjectBuilder()
+ .add("field", containerValue + "." + fieldValue).add("size", resultSize);
+ JsonObject aggsFieldObject = fieldBuilder.build();
+
+ JsonObjectBuilder termBuilder = Json.createObjectBuilder().add("terms", aggsFieldObject);
+ JsonObject termObject = termBuilder.build();
+
+ JsonObjectBuilder namedAggsBuilder = Json.createObjectBuilder().add(fieldValue, termObject);
+ JsonObject namedAggsObject = namedAggsBuilder.build();
+
+ nestedAggsBuilder.add("aggs", namedAggsObject);
+ }
+
+ public static void generateNestedAggregations(JsonObjectBuilder jsonBuilder, String fieldValue,
+ String pathToField) {
+ JsonObjectBuilder nestedAggsBuilder = Json.createObjectBuilder();
+
+ JsonObjectBuilder pathObjectBuilder = Json.createObjectBuilder().add("path", pathToField);
+ JsonObject nestedPathObject = pathObjectBuilder.build();
+
+ JsonObjectBuilder nestedObjectBuilder =
+ Json.createObjectBuilder().add("nested", nestedPathObject);
+
+ addNestedSummaryAggsBlob(nestedObjectBuilder, pathToField, fieldValue, 0);
+
+ JsonObject nestedObject = nestedObjectBuilder.build();
+ nestedAggsBuilder.add(pathToField, nestedObject);
+
+ jsonBuilder.add("aggs", nestedAggsBuilder.build());
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilteredSearchHelper.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilteredSearchHelper.java
new file mode 100644
index 0000000..0e981b4
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilteredSearchHelper.java
@@ -0,0 +1,158 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.search.filters.config.FiltersConfig;
+import org.onap.aai.sparky.search.filters.config.FiltersDetailsConfig;
+import org.onap.aai.sparky.search.filters.config.UiFilterConfig;
+import org.onap.aai.sparky.search.filters.config.UiFilterDataSourceConfig;
+import org.onap.aai.sparky.search.filters.config.UiFilterListItemConfig;
+import org.onap.aai.sparky.search.filters.config.UiViewListItemConfig;
+import org.onap.aai.sparky.search.filters.entity.UiFilterEntity;
+import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity;
+import org.onap.aai.sparky.search.filters.entity.UiFiltersEntity;
+
+public class FilteredSearchHelper {
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(FilteredSearchHelper.class);
+
+ private FiltersConfig filtersConfig;
+ private Map<String, UiFilterConfig> filtersMap = null;
+ private FilterElasticSearchAdapter filterSearchAdapter = null;
+
+ public FilteredSearchHelper(FiltersConfig filterConfig,FilterElasticSearchAdapter filterElasticSearchAdapter) {
+ this.filtersConfig = filterConfig;
+ this.filterSearchAdapter = filterElasticSearchAdapter;
+
+ if (filtersMap == null) {
+ filtersMap = new HashMap<>();
+
+ final FiltersDetailsConfig uiFiltersConfig = filterConfig.getFiltersConfig();
+
+ if (uiFiltersConfig != null) {
+ for (UiFilterConfig filter : uiFiltersConfig.getFilters()) {
+ filtersMap.put(filter.getFilterId(), filter);
+ }
+ }
+ }
+
+ }
+
+ public FiltersConfig getFiltersConfig() {
+ return filtersConfig;
+ }
+
+ public void setFiltersConfig(FiltersConfig filterConfig) {
+ this.filtersConfig = filterConfig;
+ }
+
+ public UiFiltersEntity doFilterDiscovery(String viewName) {
+ List<UiViewListItemConfig> views = filtersConfig.getViewsConfig().getViews();
+ List<UiFilterListItemConfig> filters = null;
+ UiFiltersEntity viewFiltersList = new UiFiltersEntity();
+
+ if(viewName != null) {
+ for (UiViewListItemConfig view: views) {
+ if (viewName.equalsIgnoreCase(view.getViewName())) {
+ filters = view.getFilters();
+ break;
+ }
+ }
+
+ if (filters == null) {
+ LOG.error(AaiUiMsgs.VIEW_NAME_NOT_SUPPORTED, viewName);
+ } else {
+ for (UiFilterListItemConfig filter : filters) {
+ FiltersDetailsConfig filtersDetailsConfig = filtersConfig.getFiltersConfig();
+
+ for (UiFilterConfig filterConfig: filtersDetailsConfig.getFilters()) {
+ if (filterConfig.getFilterId().equals(filter.getFilterId())) {
+ UiFilterEntity filterEntity = new UiFilterEntity(filterConfig);
+ if(filter.getDefaultValue() != null) {
+ filterEntity.setDefaultValue(filter.getDefaultValue());
+ }
+ viewFiltersList.addFilter(filterEntity);
+ }
+ }
+ }
+ }
+ }
+ return viewFiltersList;
+ }
+
+ public UiFiltersEntity doFilterEnumeration(List<String> requestedFilterIds) {
+ UiFiltersEntity viewFiltersList = new UiFiltersEntity();
+
+ for (String requestedFilterId : requestedFilterIds) {
+ if (null == filtersMap.get(requestedFilterId)) {
+ String errorMessage = "Requested filter ID '" + requestedFilterId + "' does not exist.";
+ LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, errorMessage);
+ } else {
+ UiFilterConfig sourceData = filtersMap.get(requestedFilterId);
+ UiFilterEntity filterEntity = new UiFilterEntity(sourceData);
+ this.getFilterEnumeration(filterEntity, sourceData);
+ viewFiltersList.addFilter(filterEntity);
+ }
+ }
+
+ return viewFiltersList;
+ }
+
+ public void getFilterEnumeration(UiFilterEntity filter, UiFilterConfig sourceData) {
+ List<String> filterValues = filterSearchAdapter.fetchValuesForFilter(filter, sourceData.getDataSource());
+
+ for(String value : filterValues) {
+ UiFilterValueEntity valueEntity = new UiFilterValueEntity();
+ valueEntity.setDisplayName(value);
+ valueEntity.setFilterValue(value);
+ filter.addFilterValue(valueEntity);
+ }
+ }
+
+ public Map<String, UiFilterConfig> getFiltersMap() {
+ return filtersMap;
+ }
+
+ public void setFiltersMap(Map<String, UiFilterConfig> filtersMap) {
+ this.filtersMap = filtersMap;
+ }
+
+ public UiFilterDataSourceConfig getFilterDataSource(String filterId) {
+ UiFilterConfig filterConfig = filtersMap.get(filterId);
+ UiFilterDataSourceConfig returnValue = null;
+
+ if(filterConfig != null) {
+ returnValue = filterConfig.getDataSource();
+ }
+
+ return returnValue;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/UiFiltersEntityConverter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/UiFiltersEntityConverter.java
new file mode 100644
index 0000000..46f62ac
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/UiFiltersEntityConverter.java
@@ -0,0 +1,180 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters;
+
+import java.util.List;
+
+import javax.json.Json;
+import javax.json.JsonArray;
+import javax.json.JsonArrayBuilder;
+import javax.json.JsonObject;
+import javax.json.JsonObjectBuilder;
+
+import org.onap.aai.sparky.search.filters.config.UiFilterOptionsValuesConfig;
+import org.onap.aai.sparky.search.filters.entity.UiFilterEntity;
+import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity;
+import org.onap.aai.sparky.search.filters.entity.UiFiltersEntity;
+
+public class UiFiltersEntityConverter {
+
+ private static final String KEY_TYPE = "type";
+ private static final String KEY_MULTISELECT = "multiSelect";
+ private static final String KEY_WATERMARK = "watermark";
+ private static final String KEY_CONTROLS = "controls";
+ private static final String KEY_LABEL = "label";
+ private static final String KEY_FILTERS = "filters";
+ private static final String KEY_DECODE = "decode";
+ private static final String KEY_CODE = "code";
+ private static final String KEY_DEFAULT_VALUE = "defaultValue";
+
+ /**
+ * Converts a UiFiltersEntity into a JSON object to satisfy a new (as of 23 Oct 2017)
+ * filter library being used in the FE.
+ *
+ * @param entityToConvert - The UiFiltersEntity to be converted into a JSON response.
+ * @return A JsonObject representing the passed in UiFiltersEntity.
+ */
+ public static JsonObject convertUiFiltersEntityToUnifiedFilterResponse(UiFiltersEntity entityToConvert) {
+ JsonObjectBuilder filterBuilder = Json.createObjectBuilder();
+
+ if(entityToConvert != null) {
+ List<UiFilterEntity> filterEntities = entityToConvert.getFilters();
+ if(filterEntities != null) {
+ for(UiFilterEntity entity : filterEntities) {
+ filterBuilder.add(entity.getFilterId(), generateFilterObject(entity));
+ }
+ }
+ }
+
+ JsonObjectBuilder finalObject = Json.createObjectBuilder();
+ finalObject.add(KEY_FILTERS, filterBuilder.build());
+ return finalObject.build();
+ }
+
+ /**
+ * Generates the core body of the a single filter within the JSON body.
+ *
+ * @param entity - The filter entity (loaded from config and populated from data store).
+ * @return A JsonObject representing the core data of a filter.
+ */
+ private static JsonObject generateFilterObject(UiFilterEntity entity) {
+ JsonObjectBuilder filterBuilder = Json.createObjectBuilder();
+
+ filterBuilder.add(KEY_LABEL, entity.getDisplayName());
+ filterBuilder.add(KEY_CONTROLS, generateControlObject(entity, entity.getFilterValueList()));
+
+ return filterBuilder.build();
+ }
+
+ /**
+ * Generates the "controls" object within the filter JSON.
+ *
+ * @param filterEntity - The filter entity on which this filter will be based.
+ * @param filterValues - The list of values associated with the filter
+ * from data store queries.
+ * @return A JsonObject representing the "controls" object of the filter JSON.
+ */
+ private static JsonObject generateControlObject(UiFilterEntity filterEntity, List<UiFilterValueEntity> filterValues) {
+ JsonObjectBuilder controls = Json.createObjectBuilder();
+ JsonObjectBuilder subControl = Json.createObjectBuilder();
+
+ subControl.add(KEY_TYPE, filterEntity.getDataType());
+ subControl.add(KEY_MULTISELECT, filterEntity.getMultiSelect());
+ subControl.add(KEY_WATERMARK, filterEntity.getWatermark());
+
+ if(filterEntity.getDefaultValue() != null && !filterEntity.getDefaultValue().getCode().isEmpty() && !filterEntity.getDefaultValue().getDecode().isEmpty()) {
+ JsonObjectBuilder defaultValueBuilder = Json.createObjectBuilder();
+
+ defaultValueBuilder.add(KEY_DECODE, filterEntity.getDefaultValue().getDecode());
+ defaultValueBuilder.add(KEY_CODE, filterEntity.getDefaultValue().getCode());
+
+ subControl.add(KEY_DEFAULT_VALUE, defaultValueBuilder.build());
+ }
+
+ if(filterEntity.getOptionsValues() == null || filterEntity.getOptionsValues().isEmpty()) {
+ subControl.add(filterEntity.getOptionsType(), generateOptionsObject(filterValues));
+ } else {
+ subControl.add(filterEntity.getOptionsType(), generateOptionsValuesObject(filterEntity.getOptionsValues()));
+ }
+
+ controls.add(filterEntity.getFilterName(), subControl.build());
+
+ return controls.build();
+ }
+
+ /**
+ * Creates a JsonArray representing the list of options for a filter.
+ * Similar to function generateOptionsObject, except using different arguments.
+ *
+ * @param optionsValues - Values that are loaded from config.
+ * @return JsonArray of options for a filter.
+ */
+ private static JsonArray generateOptionsValuesObject(List<UiFilterOptionsValuesConfig> optionsValues) {
+ JsonArrayBuilder optionsBuilder = Json.createArrayBuilder();
+
+ if(optionsValues != null && !optionsValues.isEmpty()) {
+ for(UiFilterOptionsValuesConfig optionValue : optionsValues) {
+ JsonObjectBuilder option = Json.createObjectBuilder();
+
+ option.add(KEY_DECODE, optionValue.getDecode());
+ option.add(KEY_CODE, optionValue.getCode());
+
+ optionsBuilder.add(option.build());
+ }
+ }
+
+ return optionsBuilder.build();
+ }
+
+ /**
+ * Creates a JsonArray representing the list of options for a filter.
+ *
+ * @param filterValues - The list of values associates with a filter (likely from a data store query).
+ * @return JsonArray of options for a filter.
+ */
+ private static JsonArray generateOptionsObject(List<UiFilterValueEntity> filterValues) {
+ JsonArrayBuilder optionsBuilder = Json.createArrayBuilder();
+
+ if(filterValues != null && !filterValues.isEmpty()) {
+ for(UiFilterValueEntity valueEntity : filterValues) {
+ JsonObjectBuilder option = Json.createObjectBuilder();
+
+ option.add(KEY_DECODE, valueEntity.getDisplayName());
+ option.add(KEY_CODE, valueEntity.getFilterValue());
+
+ optionsBuilder.add(option.build());
+ }
+ }
+
+ return optionsBuilder.build();
+ }
+
+ public static JsonObject generateEmptyResponse() {
+ JsonObjectBuilder filterBuilder = Json.createObjectBuilder();
+ JsonObjectBuilder finalObject = Json.createObjectBuilder();
+ finalObject.add(KEY_FILTERS, filterBuilder.build());
+ return finalObject.build();
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java
new file mode 100644
index 0000000..3aded85
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java
@@ -0,0 +1,158 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.config;
+
+import java.io.File;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.config.SparkyResourceLoader;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class FiltersConfig {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(FiltersConfig.class);
+
+ private String filtersFileName;
+
+ private String viewsFileName;
+
+ private FiltersForViewsConfig viewsConfig;
+
+ private FiltersDetailsConfig filtersConfig;
+
+ private SparkyResourceLoader resourceLoader;
+
+ public FiltersConfig() {
+ //exposed for testing
+ }
+
+ public FiltersConfig(String filtersFileName, String viewsFileName, SparkyResourceLoader resourceLoader) {
+ this.filtersFileName = filtersFileName;
+ this.viewsFileName = viewsFileName;
+ this.resourceLoader = resourceLoader;
+
+ initializeFilters();
+ }
+
+ /**
+ * Initialize config.
+ */
+ private void initializeFilters() {
+ viewsConfig = this.readUiViewsConfig();
+ filtersConfig = this.readUiFiltersConfig();
+ }
+
+ public String getViewsFileName() {
+ return viewsFileName;
+ }
+
+ public void setViewsFileName(String viewsFileName) {
+ this.viewsFileName = viewsFileName;
+ }
+
+ public String getFiltersFileName() {
+ return filtersFileName;
+ }
+
+ public void setFiltersFileName(String filtersFileName) {
+ this.filtersFileName = filtersFileName;
+ }
+
+ public FiltersForViewsConfig getViewsConfig() {
+ return viewsConfig;
+ }
+
+ public void setViewsConfig(FiltersForViewsConfig filtersMapEntity) {
+ this.viewsConfig = filtersMapEntity;
+ }
+
+ public FiltersDetailsConfig getFiltersConfig() {
+ return filtersConfig;
+ }
+
+ public UiFilterConfig getFilterById(String filterId) {
+ for ( UiFilterConfig filter : filtersConfig.getFilters()) {
+ if ( filter.getFilterId().equals(filterId)) {
+ return filter;
+ }
+ }
+
+ return null;
+ }
+
+ public void setFiltersConfig(FiltersDetailsConfig filtersConfig) {
+ this.filtersConfig = filtersConfig;
+ }
+
+ public FiltersDetailsConfig readUiFiltersConfig() {
+ ObjectMapper mapper = new ObjectMapper();
+ FiltersDetailsConfig filtersConfig = null;
+ try{
+ filtersConfig = mapper.readValue(resourceLoader.getResourceAsFile(this.getFiltersFileName(),true), FiltersDetailsConfig.class);
+ } catch (Exception e){
+ LOG.error(AaiUiMsgs.ERROR_READING_JSON_SCHEMA, this.getFiltersFileName());
+ }
+
+ return filtersConfig;
+ }
+
+ public FiltersForViewsConfig readUiViewsConfig() {
+ ObjectMapper mapper = new ObjectMapper();
+ FiltersForViewsConfig viewsConfig = null;
+
+ try {
+ viewsConfig = mapper.readValue(resourceLoader.getResourceAsFile(this.getViewsFileName(),true), FiltersForViewsConfig.class);
+ } catch (Exception e){
+ LOG.error(AaiUiMsgs.ERROR_READING_JSON_SCHEMA, this.getViewsFileName());
+ }
+
+ return viewsConfig;
+ }
+
+ public void initializeFiltersDetailsConfig(File filtersFile) {
+ ObjectMapper mapper = new ObjectMapper();
+ try{
+ this.filtersConfig = mapper.readValue(filtersFile, FiltersDetailsConfig.class);
+ } catch (Exception e){
+ LOG.error(AaiUiMsgs.ERROR_READING_JSON_SCHEMA, filtersFile.getAbsolutePath());
+ }
+ }
+
+ public void initializeFiltersForViewsConfig(File viewsFile) {
+ ObjectMapper mapper = new ObjectMapper();
+
+ try {
+ this.viewsConfig = mapper.readValue(viewsFile, FiltersForViewsConfig.class);
+ } catch (Exception e){
+ LOG.error(AaiUiMsgs.ERROR_READING_JSON_SCHEMA, viewsFile.getAbsolutePath());
+ }
+
+ }
+
+}
+
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersDetailsConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersDetailsConfig.java
new file mode 100644
index 0000000..ab5e1d4
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersDetailsConfig.java
@@ -0,0 +1,58 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.config;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+
+public class FiltersDetailsConfig {
+ @JsonProperty("filters")
+ private List<UiFilterConfig> filters = new ArrayList<UiFilterConfig>();
+
+ public FiltersDetailsConfig(){}
+
+ @JsonCreator
+ public FiltersDetailsConfig(@JsonProperty("filters") final List<UiFilterConfig> filters) {
+ this.filters = filters;
+ }
+
+ public List<UiFilterConfig> getFilters() {
+ return filters;
+ }
+
+ public void setFilters(List<UiFilterConfig> filters) {
+ this.filters = filters;
+ }
+
+ @Override
+ public String toString() {
+ return "UiFiltersConfig [filters=" + filters + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersForViewsConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersForViewsConfig.java
new file mode 100644
index 0000000..1d0adfe
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersForViewsConfig.java
@@ -0,0 +1,57 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.config;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class FiltersForViewsConfig {
+
+ private List<UiViewListItemConfig> views = new ArrayList<UiViewListItemConfig>();
+
+ public FiltersForViewsConfig(){}
+
+ @JsonCreator
+ public FiltersForViewsConfig(@JsonProperty("views") final List<UiViewListItemConfig> views) {
+ this.views = views;
+ }
+
+ @JsonProperty("views")
+ public List<UiViewListItemConfig> getViews() {
+ return views;
+ }
+
+ public void setViews(List<UiViewListItemConfig> views) {
+ this.views = views;
+ }
+
+ @Override
+ public String toString() {
+ return "UiViewToFilterMappingEntity [allUiViews=" + views + "]";
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterConfig.java
new file mode 100644
index 0000000..d3ef9ba
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterConfig.java
@@ -0,0 +1,188 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.config;
+
+import java.util.List;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+@JsonInclude(Include.NON_NULL)
+public class UiFilterConfig {
+
+ @JsonProperty("filterId")
+ private String filterId;
+
+ @JsonProperty("filterName")
+ private String filterName;
+
+ @JsonProperty("displayName")
+ private String displayName;
+
+ @JsonProperty("dataType")
+ private String dataType;
+
+ @JsonProperty("multiSelect")
+ private String multiSelect;
+
+ @JsonProperty("watermark")
+ private String watermark;
+
+ @JsonProperty("defaultValue")
+ private UiFilterOptionsValuesConfig defaultValue;
+
+ @JsonProperty("optionsType")
+ private String optionsType;
+
+ @JsonProperty("optionsValues")
+ private List<UiFilterOptionsValuesConfig> optionsValues;
+
+ @JsonProperty("dataSource")
+ private UiFilterDataSourceConfig dataSource = new UiFilterDataSourceConfig();
+
+ @JsonCreator
+ public UiFilterConfig(@JsonProperty("filterId") final String filterId,
+ @JsonProperty("filterName") final String filterName,
+ @JsonProperty("displayName") final String displayName,
+ @JsonProperty("dataType") final String dataType,
+ @JsonProperty("multiSelect") final String multiSelect,
+ @JsonProperty("watermark") final String watermark,
+ @JsonProperty("defaultValue") final UiFilterOptionsValuesConfig defaultValue,
+ @JsonProperty("optionsType") final String optionsType,
+ @JsonProperty("optionsValues") final List<UiFilterOptionsValuesConfig> optionsValues,
+ @JsonProperty("dataSource") final UiFilterDataSourceConfig dataSource
+ ) {
+ this.filterId = filterId;
+ this.filterName = filterName;
+ this.displayName = displayName;
+ this.dataType = dataType;
+ this.multiSelect = multiSelect;
+ this.watermark = watermark;
+ this.defaultValue = defaultValue;
+ this.optionsType = optionsType;
+ this.optionsValues = optionsValues;
+ this.dataSource = dataSource;
+ }
+
+ @JsonProperty("filterId")
+ public String getFilterId() {
+ return filterId;
+ }
+
+ public void setFilterId(String filterId) {
+ this.filterId = filterId;
+ }
+
+ @JsonProperty("filterName")
+ public String getFilterName() {
+ return filterName;
+ }
+
+ public void setFilterName(String filterName) {
+ this.filterName = filterName;
+ }
+
+ @JsonProperty("displayName")
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public void setDisplayName(String displayName) {
+ this.displayName = displayName;
+ }
+
+ @JsonProperty("dataType")
+ public String getDataType() {
+ return dataType;
+ }
+
+ public void setDataType(String dataType) {
+ this.dataType = dataType;
+ }
+
+ @JsonProperty("multiSelect")
+ public String getMultiSelect() {
+ return multiSelect;
+ }
+
+ public void setMultiSelect(String multiSelect) {
+ this.multiSelect = multiSelect;
+ }
+
+ @JsonProperty("watermark")
+ public String getWatermark() {
+ return watermark;
+ }
+
+ public void setWatermark(String watermark) {
+ this.watermark = watermark;
+ }
+
+ @JsonProperty("defaultValue")
+ public UiFilterOptionsValuesConfig getDefaultValue() {
+ return defaultValue;
+ }
+
+ public void setDefaultValue(UiFilterOptionsValuesConfig defaultValue) {
+ this.defaultValue = defaultValue;
+ }
+
+ @JsonProperty("optionsType")
+ public String getOptionsType() {
+ return optionsType;
+ }
+
+ public void setOptionsType(String optionsType) {
+ this.optionsType = optionsType;
+ }
+ @JsonProperty("optionsValues")
+ public List<UiFilterOptionsValuesConfig> getOptionsValues() {
+ return optionsValues;
+ }
+
+ public void setOptionsValues(List<UiFilterOptionsValuesConfig> optionsValues) {
+ this.optionsValues = optionsValues;
+ }
+
+ @JsonProperty("dataSource")
+ public UiFilterDataSourceConfig getDataSource() {
+ return dataSource;
+ }
+
+ public void setDataSource(UiFilterDataSourceConfig dataSource) {
+ this.dataSource = dataSource;
+ }
+
+ @Override
+ public String toString() {
+ return "UiFilterConfig [filterId=" + filterId + ", filterName=" + filterName + ", displayName="
+ + displayName + ", dataType=" + dataType + ", multiSelect=" + multiSelect + ", watermark="
+ + watermark + ", optionsType=" + optionsType + ", optionsValues=" + optionsValues
+ + ", dataSource=" + dataSource + "]";
+ }
+}
+
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterDataSourceConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterDataSourceConfig.java
new file mode 100644
index 0000000..4f9e2da
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterDataSourceConfig.java
@@ -0,0 +1,99 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.config;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+@JsonInclude(Include.NON_NULL)
+public class UiFilterDataSourceConfig {
+
+ @JsonProperty("indexName")
+ private String indexName;
+
+ @JsonProperty("docType")
+ private String docType;
+
+ @JsonProperty("fieldName")
+ private String fieldName;
+
+ @JsonProperty("pathToField")
+ private String pathToField;
+
+ public UiFilterDataSourceConfig(){}
+
+ @JsonCreator
+ public UiFilterDataSourceConfig(@JsonProperty("indexName") final String indexName, @JsonProperty("docType") final String docType, @JsonProperty("fieldName") final String fieldName, @JsonProperty("pathToField") final String pathToField) {
+ this.indexName = indexName;
+ this.docType = docType;
+ this.fieldName = fieldName;
+ this.pathToField = pathToField;
+ }
+
+ @JsonProperty("indexName")
+ public String getIndexName() {
+ return indexName;
+ }
+
+ public void setIndexName(String indexName) {
+ this.indexName = indexName;
+ }
+
+ @JsonProperty("docType")
+ public String getDocType() {
+ return docType;
+ }
+
+ public void setDocType(String docType) {
+ this.docType = docType;
+ }
+
+ @JsonProperty("fieldName")
+ public String getFieldName() {
+ return fieldName;
+ }
+
+ public void setFieldName(String fieldName) {
+ this.fieldName = fieldName;
+ }
+
+ @JsonProperty("pathToField")
+ public String getPathToField() {
+ return pathToField;
+ }
+
+ public void setPathToField(String pathToField) {
+ this.pathToField = pathToField;
+ }
+
+ @Override
+ public String toString() {
+ return "UiFilterDataSourceConfig [indexName=" + indexName + ", docType=" + docType
+ + ", fieldName=" + fieldName + ", pathToField=" + pathToField + "]";
+ }
+}
+
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterListItemConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterListItemConfig.java
new file mode 100644
index 0000000..2f955fc
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterListItemConfig.java
@@ -0,0 +1,70 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.config;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+@JsonInclude(Include.NON_NULL)
+public class UiFilterListItemConfig {
+ @JsonProperty("filterId")
+ private String filterId;
+
+ @JsonProperty("defaultValue")
+ private UiFilterOptionsValuesConfig defaultValue;
+
+ @JsonCreator
+ public UiFilterListItemConfig(@JsonProperty("filterId") final String filterId,
+ @JsonProperty("defaultValue") final UiFilterOptionsValuesConfig defaultValue) {
+ this.filterId = filterId;
+ this.defaultValue = defaultValue;
+ }
+
+ @JsonProperty("filterId")
+ public String getFilterId() {
+ return filterId;
+ }
+
+ public void setFilterId(String filterId) {
+ this.filterId = filterId;
+ }
+
+ @JsonProperty("defaultValue")
+ public UiFilterOptionsValuesConfig getDefaultValue() {
+ return defaultValue;
+ }
+
+ public void setDefaultValue(UiFilterOptionsValuesConfig defaultValue) {
+ this.defaultValue = defaultValue;
+ }
+
+ @Override
+ public String toString() {
+ return "FilterListItemEntity [filterId=" + filterId + ", defaultValue=" + defaultValue + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterOptionsValuesConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterOptionsValuesConfig.java
new file mode 100644
index 0000000..cf45d21
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterOptionsValuesConfig.java
@@ -0,0 +1,68 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.config;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+@JsonInclude(Include.NON_NULL)
+public class UiFilterOptionsValuesConfig {
+ @JsonProperty("decode")
+ private String decode;
+
+ @JsonProperty("code")
+ private String code;
+
+ @JsonCreator
+ public UiFilterOptionsValuesConfig(@JsonProperty("decode") final String decode, @JsonProperty("code") final String code) {
+ this.decode = decode;
+ this.code = code;
+ }
+
+ @JsonProperty("decode")
+ public String getDecode() {
+ return decode;
+ }
+
+ public void setDecode(String decode) {
+ this.decode = decode;
+ }
+
+ @JsonProperty("code")
+ public String getCode() {
+ return code;
+ }
+
+ public void setCode(String code) {
+ this.code = code;
+ }
+
+ @Override
+ public String toString() {
+ return "UiFilterOptionsValuesConfig [decode=" + decode + ", code=" + code + "]";
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiViewListItemConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiViewListItemConfig.java
new file mode 100644
index 0000000..1d70314
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiViewListItemConfig.java
@@ -0,0 +1,68 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.config;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class UiViewListItemConfig {
+ @JsonProperty("viewName")
+ private String viewName;
+
+ private List<UiFilterListItemConfig> filters = new ArrayList<UiFilterListItemConfig>();
+
+ @JsonCreator
+ public UiViewListItemConfig(@JsonProperty("viewName") final String viewName, @JsonProperty("filters") final List<UiFilterListItemConfig> filters) {
+ this.viewName = viewName;
+ this.filters = filters;
+ }
+
+ @JsonProperty("viewName")
+ public String getViewName() {
+ return viewName;
+ }
+
+ public void setViewName(String viewName) {
+ this.viewName = viewName;
+ }
+
+ @JsonProperty("filters")
+ public List<UiFilterListItemConfig> getFilters() {
+ return filters;
+ }
+
+ public void setListOfFilters(List<UiFilterListItemConfig> filters) {
+ this.filters = filters;
+ }
+
+ @Override
+ public String toString() {
+ return "UiViewEntity [viewName=" + viewName + ", filters=" + filters + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/AggregationEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/AggregationEntity.java
new file mode 100644
index 0000000..3fc61ac
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/AggregationEntity.java
@@ -0,0 +1,80 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.entity;
+
+import javax.json.Json;
+import javax.json.JsonBuilderFactory;
+import javax.json.JsonObject;
+
+public class AggregationEntity {
+
+ private String aggregationName;
+ private String aggregationFieldName;
+ private int size;
+
+ public AggregationEntity(String aggName, String fieldName, int size) {
+
+ this.aggregationName = aggName;
+ this.aggregationFieldName = fieldName;
+ this.size = size;
+ }
+
+ public String getAggregationName() {
+ return aggregationName;
+ }
+
+ public void setAggregationName(String aggregationName) {
+ this.aggregationName = aggregationName;
+ }
+
+ public String getAggregationFieldName() {
+ return aggregationFieldName;
+ }
+
+ public void setAggregationFieldName(String aggregationFieldName) {
+ this.aggregationFieldName = aggregationFieldName;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ public void setSize(int size) {
+ this.size = size;
+ }
+
+ public JsonObject getJsonObject() {
+
+ JsonBuilderFactory factory = Json.createBuilderFactory(null);
+
+ return factory.createObjectBuilder()
+ .add("terms", factory.createObjectBuilder()
+ .add("field", aggregationFieldName)
+ .add("size", size))
+ .build();
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/BoolQueryBuilder.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/BoolQueryBuilder.java
new file mode 100644
index 0000000..750270a
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/BoolQueryBuilder.java
@@ -0,0 +1,123 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.entity;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.json.Json;
+import javax.json.JsonArray;
+import javax.json.JsonArrayBuilder;
+import javax.json.JsonBuilderFactory;
+import javax.json.JsonObject;
+import javax.json.JsonObjectBuilder;
+
+public class BoolQueryBuilder {
+
+ private List<MatchFilterCriteriaEntity> mustFilters;
+ private List<MatchFilterCriteriaEntity> shouldFilters;
+
+ private int minShouldMatch;
+
+ public BoolQueryBuilder() {
+
+ mustFilters = new ArrayList<MatchFilterCriteriaEntity>();
+ shouldFilters = new ArrayList<MatchFilterCriteriaEntity>();
+ minShouldMatch = -1;
+
+ }
+
+ public void addMustFilter(MatchFilterCriteriaEntity filter) {
+
+ if (!mustFilters.contains(filter)) {
+ mustFilters.add(filter);
+ }
+
+ }
+
+ public void addShouldFilter(MatchFilterCriteriaEntity filter) {
+
+ if (!shouldFilters.contains(filter)) {
+ shouldFilters.add(filter);
+ }
+
+ }
+
+ public void setMinShouldMatch(int minShouldMatch) {
+ this.minShouldMatch = minShouldMatch;
+ }
+
+ public boolean isMatchAll() {
+ return (mustFilters.isEmpty() && shouldFilters.isEmpty());
+ }
+
+ public JsonObject getJsonObject() {
+ /*
+ * Specify a null config for now, but if we want normalize all the builders, we can do it at one
+ * location, when we are ready.
+ */
+ JsonBuilderFactory factory = Json.createBuilderFactory(null);
+
+ JsonObjectBuilder boolBuilder = factory.createObjectBuilder();
+
+ if(!mustFilters.isEmpty()){
+ JsonArrayBuilder mustArrayBuilder = factory.createArrayBuilder();
+
+ for (MatchFilterCriteriaEntity matchCriteria : mustFilters) {
+ mustArrayBuilder.add(matchCriteria.getJsonObject());
+ }
+
+ JsonArray mustArray = mustArrayBuilder.build();
+ boolBuilder.add("must", mustArray);
+ }
+
+ if (!shouldFilters.isEmpty()) {
+ JsonArray shouldArray = null;
+ JsonArrayBuilder shouldArrayBuilder = factory.createArrayBuilder();
+
+ for (MatchFilterCriteriaEntity matchCriteria : shouldFilters) {
+ shouldArrayBuilder.add(matchCriteria.getJsonObject());
+ }
+
+ shouldArray = shouldArrayBuilder.build();
+ boolBuilder.add("should", shouldArray).add("min_should_match", minShouldMatch);
+ }
+
+ JsonObjectBuilder queryObjectBuilder = factory.createObjectBuilder();
+
+ /*
+ * If both filter lists are empty then we are doing an aggregation
+ * based off fields. Just match-all for the query.
+ */
+ if(isMatchAll()) {
+ JsonObject matchAllObject = factory.createObjectBuilder().build();
+ queryObjectBuilder.add("match_all", matchAllObject);
+ } else {
+ queryObjectBuilder.add("bool", boolBuilder.build());
+ }
+
+ return queryObjectBuilder.build();
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/FilteredAggregationQueryBuilder.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/FilteredAggregationQueryBuilder.java
new file mode 100644
index 0000000..fa9226e
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/FilteredAggregationQueryBuilder.java
@@ -0,0 +1,65 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.entity;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.json.Json;
+import javax.json.JsonBuilderFactory;
+import javax.json.JsonObject;
+import javax.json.JsonObjectBuilder;
+
+public class FilteredAggregationQueryBuilder {
+
+ private List<AggregationEntity> aggregationEntities;
+
+ public FilteredAggregationQueryBuilder() {
+ aggregationEntities = new ArrayList<AggregationEntity>();
+ }
+
+ public void addAggregationEntity(AggregationEntity aggregationEntity) {
+ if (!aggregationEntities.contains(aggregationEntity)) {
+ aggregationEntities.add(aggregationEntity);
+ }
+ }
+
+ public JsonObject getJsonObject() {
+
+ /*
+ * Specify a null config for now, but if we want normalize all the builders, we can do it at one
+ * location, when we are ready.
+ */
+ JsonBuilderFactory factory = Json.createBuilderFactory(null);
+
+ JsonObjectBuilder aggsArrayBuilder = factory.createObjectBuilder();
+
+ for (AggregationEntity aggEntity : aggregationEntities) {
+ aggsArrayBuilder.add(aggEntity.getAggregationName(), aggEntity.getJsonObject());
+ }
+
+ return aggsArrayBuilder.build();
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/MatchFilterCriteriaEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/MatchFilterCriteriaEntity.java
new file mode 100644
index 0000000..68c058b
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/MatchFilterCriteriaEntity.java
@@ -0,0 +1,77 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.entity;
+
+import javax.json.Json;
+import javax.json.JsonBuilderFactory;
+import javax.json.JsonObject;
+
+public class MatchFilterCriteriaEntity {
+
+ private String criteriaName;
+ private String criteriaValue;
+
+ public MatchFilterCriteriaEntity(String criteriaName, String criteriaValue) {
+ super();
+ this.criteriaName = criteriaName;
+ this.criteriaValue = criteriaValue;
+ }
+
+ public String getCriteriaName() {
+ return criteriaName;
+ }
+
+ public void setCriteriaName(String criteriaName) {
+ this.criteriaName = criteriaName;
+ }
+
+ public String getCriteriaValue() {
+ return criteriaValue;
+ }
+
+ public void setCriteriaValue(String criteriaValue) {
+ this.criteriaValue = criteriaValue;
+ }
+
+ public JsonObject getJsonObject() {
+
+ /*
+ * Specify a null config for now, but if we want normalize all the builders, we can do it at one
+ * location, when we are ready.
+ */
+ JsonBuilderFactory factory = Json.createBuilderFactory(null);
+
+ return factory.createObjectBuilder()
+ .add("match", factory.createObjectBuilder().add(criteriaName, criteriaValue)).build();
+
+ }
+
+ @Override
+ public String toString() {
+ return "MatchFilterCriteriaEntity [criteriaName=" + criteriaName + ", criteriaValue="
+ + criteriaValue + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/SearchFilter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/SearchFilter.java
new file mode 100644
index 0000000..2a6f05d
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/SearchFilter.java
@@ -0,0 +1,88 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.entity;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * A base entity to contain the details of the filter id and values from the FE to the BE for the
+ * purpose of driving DAL calls into ElasticSearch, Search Abstraction Service, or as a utility
+ * object within the query builders.
+ *
+ * The class has unique identifier for the filter id, and then 1 or more filter values. The value
+ * list has been introduced to help us with a multi-select use case that will need to be supported
+ * eventually.
+ */
+public class SearchFilter {
+
+ private String filterId;
+ private List<String> values;
+
+ public SearchFilter() {
+ values = new ArrayList<String>();
+ }
+
+ public SearchFilter(String filterId) {
+ this();
+ this.filterId = filterId;
+ }
+
+ public SearchFilter(String filterId, String... values) {
+ this();
+ this.filterId = filterId;
+ this.values.addAll(Arrays.asList(values));
+ }
+
+ public String getFilterId() {
+ return filterId;
+ }
+
+ public void setFilterId(String filterId) {
+ this.filterId = filterId;
+ }
+
+ public List<String> getValues() {
+ return values;
+ }
+
+ public void setValues(List<String> values) {
+ this.values = values;
+ }
+
+ public void addValue(String v) {
+ if (!values.contains(v)) {
+ values.add(v);
+ }
+
+ }
+
+ @Override
+ public String toString() {
+ return "SearchFilter [filterId=" + filterId + ", values=" + values + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterEntity.java
new file mode 100644
index 0000000..f90403c
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterEntity.java
@@ -0,0 +1,180 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.entity;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.aai.sparky.search.filters.config.UiFilterConfig;
+import org.onap.aai.sparky.search.filters.config.UiFilterOptionsValuesConfig;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+
+/**
+ * Stores data for a single filter for a given UI view.
+ * <p>
+ * When a UI view wants to know which filters it should display, an object of this class is created for each
+ * filter discovered and stores data for that filter. Each filter/object of this class is added to a
+ * ViewFilterList object which is then serialized to JSON and returned to the view in the response body.
+ */
+@JsonInclude(Include.NON_NULL)
+public class UiFilterEntity {
+ private String filterId;
+ private String filterName;
+ private String displayName;
+ private String dataType;
+
+ private String multiSelect;
+ private String watermark;
+ private UiFilterOptionsValuesConfig defaultValue;
+ private String optionsType;
+
+ private List<UiFilterOptionsValuesConfig> optionsValues;
+
+ private List<UiFilterValueEntity> filterValueList;
+
+ public UiFilterEntity() {}
+
+ public UiFilterEntity(UiFilterConfig filterConfig) {
+ if (filterConfig.getFilterId() != null) {
+ this.setFilterId(filterConfig.getFilterId());
+ }
+ if (filterConfig.getFilterName() != null) {
+ this.setFilterName(filterConfig.getFilterName());
+ }
+ if (filterConfig.getDisplayName() != null) {
+ this.setDisplayName(filterConfig.getDisplayName());
+ }
+ if (filterConfig.getDataType() != null) {
+ this.setDataType(filterConfig.getDataType());
+ }
+ if (filterConfig.getMultiSelect() != null) {
+ this.setMultiSelect(filterConfig.getMultiSelect());
+ }
+ if (filterConfig.getWatermark() != null) {
+ this.setWatermark(filterConfig.getWatermark());
+ }
+ if (filterConfig.getDefaultValue() != null) {
+ this.setDefaultValue(filterConfig.getDefaultValue());
+ }
+ if (filterConfig.getOptionsType() != null) {
+ this.setOptionsType(filterConfig.getOptionsType());
+ }
+ if(filterConfig.getOptionsValues() != null && !filterConfig.getOptionsValues().isEmpty()) {
+ this.setOptionsValues(filterConfig.getOptionsValues());
+ } else {
+ this.optionsValues = new ArrayList<UiFilterOptionsValuesConfig>();
+ }
+ }
+
+ public void addFilterValue(UiFilterValueEntity valueEntity) {
+ if (null == filterValueList) {
+ filterValueList = new ArrayList<>();
+ }
+
+ this.filterValueList.add(valueEntity);
+ }
+
+ public String getFilterId() {
+ return filterId;
+ }
+
+ public String getFilterName() {
+ return filterName;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public String getDataType() {
+ return dataType;
+ }
+
+ public UiFilterOptionsValuesConfig getDefaultValue() {
+ return defaultValue;
+ }
+
+ public List<UiFilterValueEntity> getFilterValueList() {
+ return filterValueList;
+ }
+
+ public void setFilterId(String filterId) {
+ this.filterId = filterId;
+ }
+
+ public void setFilterName(String filterName) {
+ this.filterName = filterName;
+ }
+
+ public void setDisplayName(String displayName) {
+ this.displayName = displayName;
+ }
+
+ public void setDataType(String dataType) {
+ this.dataType = dataType;
+ }
+
+ public String getMultiSelect() {
+ return multiSelect;
+ }
+
+ public void setMultiSelect(String multiSelect) {
+ this.multiSelect = multiSelect;
+ }
+
+ public String getWatermark() {
+ return watermark;
+ }
+
+ public void setWatermark(String watermark) {
+ this.watermark = watermark;
+ }
+
+ public String getOptionsType() {
+ return optionsType;
+ }
+
+ public void setOptionsType(String optionsType) {
+ this.optionsType = optionsType;
+ }
+
+ public List<UiFilterOptionsValuesConfig> getOptionsValues() {
+ return optionsValues;
+ }
+
+ public void setOptionsValues(List<UiFilterOptionsValuesConfig> optionsValues) {
+ this.optionsValues = optionsValues;
+ }
+
+ public void setDefaultValue(UiFilterOptionsValuesConfig defaultValue) {
+ this.defaultValue = defaultValue;
+ }
+
+ public void setFilterValueList(List<UiFilterValueEntity> values) {
+ this.filterValueList = values;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterValueEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterValueEntity.java
new file mode 100644
index 0000000..73b105b
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterValueEntity.java
@@ -0,0 +1,80 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.entity;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+
+/**
+ * This class represents a single item or value to populate the FE filter component with.
+ * A drop-down list, for example, may be populated with the values from several instances of this class.
+ */
+@JsonInclude(Include.NON_NULL)
+public class UiFilterValueEntity {
+ private String filterId;
+ private String filterValue;
+ private String displayName; // The string that will be rendered in the view
+
+ public UiFilterValueEntity() {}
+
+ public UiFilterValueEntity(String filterId, String filterValue, String displayName) {
+ this.filterId = filterId;
+ this.filterValue = filterValue;
+ this.displayName = displayName;
+ }
+
+ public String getFilterId() {
+ return filterId;
+ }
+
+ public String getFilterValue() {
+ return filterValue;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public void setFilterId(String filterId) {
+ this.filterId = filterId;
+ }
+
+ public void setFilterValue(String filterValue) {
+ this.filterValue = filterValue;
+ }
+
+ public void setDisplayName(String displayName) {
+ this.displayName = displayName;
+ }
+
+ @Override
+ public String toString() {
+ return "UiFilterValueEntity [" + (filterId != null ? "filterId=" + filterId + ", " : "")
+ + (filterValue != null ? "filterValue=" + filterValue + ", " : "")
+ + (displayName != null ? "displayName=" + displayName : "") + "]";
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFiltersEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFiltersEntity.java
new file mode 100644
index 0000000..7780418
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFiltersEntity.java
@@ -0,0 +1,53 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.filters.entity;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Represents a list of filters that a given UI view should display.
+ * <p>
+ * When a UI view wants to know which filters it should display, an object of this class is created to keep
+ * track of all the filters that are discovered for that view and is then serialized to JSON and returned to
+ * the view in the response body.
+ */
+public class UiFiltersEntity {
+ private List<UiFilterEntity> filters = new ArrayList<>();
+
+
+
+ public void addFilter(UiFilterEntity viewFilter) {
+ filters.add(viewFilter);
+ }
+
+ public List<UiFilterEntity> getFilters() {
+ return filters;
+ }
+
+ public void setFilters(List<UiFilterEntity> filters) {
+ this.filters = filters;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java
new file mode 100644
index 0000000..4e785ce
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java
@@ -0,0 +1,76 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.search.registry;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.aai.sparky.search.api.SearchProvider;
+
+/**
+ * Make this a java-scoped singleton to resolve the contextual issue spanning a Spring Context and
+ * accessing the SPR in other parts of the code that are not directly instantiated by a Spring Bean.
+ * Eventually the SPR doesn’t have to be a real singleton, it could simply be a Spring bean scoped
+ * as a singleton and then wired in via dependency injection to the classes that need it. But I’m
+ * not there yet. This will get a demonstrable extension mechanism in place quickly at practically
+ * no cost, beyond what’s already in the email plus some testing.
+ */
+
+public class SearchProviderRegistry {
+
+ private List<SearchProvider> searchProviders;
+
+ public SearchProviderRegistry() {
+ searchProviders = new ArrayList<SearchProvider>();
+ }
+
+ public List<SearchProvider> getSearchProviders() {
+ return searchProviders;
+ }
+
+ public final void addSearchProvider(SearchProvider searchProvider) {
+
+ if (searchProvider == null) {
+ return;
+ }
+
+ if (!searchProviders.contains(searchProvider)) {
+ searchProviders.add(searchProvider);
+ }
+ }
+
+ public final void addSearchProviders(List<SearchProvider> searchProviders) {
+
+ if (searchProviders == null) {
+ return;
+ }
+
+ for (SearchProvider searchProvider : searchProviders) {
+ addSearchProvider(searchProvider);
+ }
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/BaseCookieDecryptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/BaseCookieDecryptor.java
new file mode 100644
index 0000000..9dfb1bf
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/BaseCookieDecryptor.java
@@ -0,0 +1,51 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.openecomp.portalsdk.core.onboarding.util.CipherUtil;
+
+public class BaseCookieDecryptor implements CookieDecryptor {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(BaseCookieDecryptor.class);
+
+
+ public BaseCookieDecryptor(){}
+
+ public String decryptCookie(String encryptedCookie){
+
+ String decryptedCookie = "";
+ try {
+ decryptedCookie = CipherUtil.decrypt(encryptedCookie, "");
+ } catch (Exception e) {
+ LOG.error(AaiUiMsgs.LOGIN_FILTER_INFO, "decrypting base cookie failed " + e.getLocalizedMessage());
+ }
+ return decryptedCookie;
+
+ }
+
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/CookieDecryptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/CookieDecryptor.java
new file mode 100644
index 0000000..6e79b60
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/CookieDecryptor.java
@@ -0,0 +1,31 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security;
+
+public interface CookieDecryptor {
+
+ String decryptCookie(String encryptedCookie);
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/EcompSso.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/EcompSso.java
new file mode 100644
index 0000000..3348b1f
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/EcompSso.java
@@ -0,0 +1,155 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security;
+
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.security.portal.config.PortalAuthenticationConfig;
+import org.openecomp.portalsdk.core.onboarding.util.CipherUtil;
+import org.openecomp.portalsdk.core.onboarding.util.PortalApiProperties;
+
+/**
+ * Provides authentication services for onboarded ECOMP applications.
+ */
+public class EcompSso {
+
+ public static final String EP_SERVICE = "EPService";
+ public static final String CSP_COOKIE_NAME = "csp_cookie_name";
+ public static final String CSP_GATE_KEEPER_PROD_KEY = "csp_gate_keeper_prod_key";
+ public static final String ONAP_ENABLED = "ONAP_ENABLED";
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(EcompSso.class);
+
+ /**
+ * Searches the request for a cookie with the specified name.
+ *
+ * @param request
+ * @param cookieName
+ * @return Cookie, or null if not found.
+ */
+ public static Cookie getCookie(HttpServletRequest request, String cookieName) {
+ Cookie[] cookies = request.getCookies();
+ if (cookies != null)
+ for (Cookie cookie : cookies) {
+ if (cookie.getName().equals(cookieName)) {
+ return cookie;
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Answers whether the ECOMP Portal service cookie is present in the specified request.
+ *
+ * @param request
+ * @return true if the cookie is found, else false.
+ */
+ private static boolean isEPServiceCookiePresent(HttpServletRequest request) {
+ Cookie ep = getCookie(request, EP_SERVICE);
+ return (ep != null);
+ }
+
+ /**
+ * Validates whether the ECOMP Portal sign-on process has completed, which relies the AT&T Global
+ * Log On single-sign on process. Checks for the ECOMP cookie (see {@link #EP_SERVICE}). If found,
+ * then searches for a CSP cookie; if not found, for a WebJunction header.
+ *
+ * @param request
+ * @return ATT UID if the ECOMP cookie is present and the sign-on process established an ATT UID;
+ * else null.
+ */
+ public static String validateEcompSso(HttpServletRequest request) {
+ boolean isOnapEnabled = PortalAuthenticationConfig.getInstance().getIsOnapEnabled();
+ if (isOnapEnabled) {
+ if (isEPServiceCookiePresent(request)) {
+ /*
+ * This is a "temporary" fix until proper separation between closed source and open source
+ * code is reached
+ */
+ return ONAP_ENABLED;
+ }
+ return null;
+ } else {
+ return getLoginIdFromCookie(request);
+ }
+ }
+
+ /**
+ * Searches the specified request for the CSP cookie, decodes it and gets the ATT UID.
+ *
+ * @param request
+ * @return ATTUID if the cookie is present in the request and can be decoded successfully (expired
+ * cookies do not decode); else null.
+ */
+ private static String getLoginIdFromCookie(HttpServletRequest request) {
+ String uid = null;
+ try {
+ String[] cspFields = getCspData(request);
+ if (cspFields != null && cspFields.length > 5)
+ uid = cspFields[5];
+ } catch (Throwable t) {
+ LOG.info(AaiUiMsgs.LOGIN_FILTER_INFO,
+ "getLoginIdFromCookie failed " + t.getLocalizedMessage());
+ }
+ return uid;
+ }
+
+ /**
+ * Searches the specified request for the CSP cookie, decodes it and parses it to a String array.
+ *
+ * @param request
+ * @return Array of String as parsed from the cookie; null if the cookie is not present; empty
+ * array if the cookie could not be decoded.
+ */
+ private static String[] getCspData(HttpServletRequest request) {
+ final String cookieName = PortalApiProperties.getProperty(CSP_COOKIE_NAME);
+ if (cookieName == null) {
+ LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG,
+ "getCspData: Failed to get property " + CSP_COOKIE_NAME);
+ return null;
+ }
+ Cookie csp = getCookie(request, cookieName);
+ if (csp == null) {
+ LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "getCspData failed to get cookie " + cookieName);
+ return null;
+ }
+ final String cspCookieEncrypted = csp.getValue();
+
+ String cspCookieDecrypted = null;
+ try {
+ cspCookieDecrypted = PortalAuthenticationConfig.getInstance().getCookieDecryptor().decryptCookie(cspCookieEncrypted);
+ return cspCookieDecrypted.split("\\|");
+
+ } catch (ClassNotFoundException e) {
+ LOG.error(AaiUiMsgs.DECRYPTION_ERROR,"Unable to find the Cookie Decryptor Class");
+ }
+
+ return null;
+ }
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactory.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactory.java
new file mode 100644
index 0000000..f251c31
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactory.java
@@ -0,0 +1,78 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security;
+
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.security.KeyManagementException;
+import java.security.KeyStoreException;
+import java.security.NoSuchAlgorithmException;
+import java.security.UnrecoverableKeyException;
+import java.security.cert.CertificateException;
+
+import javax.net.ssl.SSLContext;
+
+/**
+ * A factory for creating SecurityContext objects.
+ */
+public interface SecurityContextFactory {
+
+ public String getSslAlgorithm();
+
+ public void setSslAlgorithm(String sslAlgorithm);
+
+ public String getKeyManagerAlgortihm();
+
+ public void setKeyManagerAlgortihm(String keyManagerAlgortihm);
+
+ public String getKeyStoreType();
+
+ public void setKeyStoreType(String keyStoreType);
+
+ public boolean isServerCertificationChainValidationEnabled();
+
+ public void setServerCertificationChainValidationEnabled(
+ boolean serverCertificationChainValidationEnabled);
+
+ public String getTrustStoreFileName();
+
+ public void setTrustStoreFileName(String filename);
+
+ public String getClientCertPassword();
+
+ public void setClientCertPassword(String password);
+
+ public void setClientCertFileInputStream(FileInputStream fis);
+
+ public void setClientCertFileName(String filename) throws IOException;
+
+ public FileInputStream getClientCertFileInputStream();
+
+ public SSLContext getSecureContext()
+ throws KeyManagementException, NoSuchAlgorithmException, FileNotFoundException,
+ KeyStoreException, CertificateException, IOException, UnrecoverableKeyException;
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactoryImpl.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactoryImpl.java
new file mode 100644
index 0000000..c09fa97
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactoryImpl.java
@@ -0,0 +1,205 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.security.KeyManagementException;
+import java.security.KeyStore;
+import java.security.KeyStoreException;
+import java.security.NoSuchAlgorithmException;
+import java.security.UnrecoverableKeyException;
+import java.security.cert.CertificateException;
+import java.security.cert.X509Certificate;
+
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.X509TrustManager;
+
+/**
+ * The Class SecurityContextFactoryImpl.
+ */
+public class SecurityContextFactoryImpl implements SecurityContextFactory {
+
+ protected String sslAlgorithm;
+ protected String keyManagerAlgortihm;
+ protected String keyStoreType;
+ protected boolean serverCertificationChainValidationEnabled;
+ protected String trustStoreFileName;
+ protected String clientCertPassword;
+ protected FileInputStream clientCertFileInputStream;
+ protected String clientCertFileName;
+ protected byte[] clientCertBytes;
+
+ /**
+ * Instantiates a new security context factory impl.
+ */
+ public SecurityContextFactoryImpl() {
+ this.sslAlgorithm = "TLS";
+ this.keyManagerAlgortihm = "SunX509";
+ this.keyStoreType = "PKCS12";
+ this.serverCertificationChainValidationEnabled = false;
+ this.clientCertFileInputStream = null;
+ this.clientCertFileName = null;
+ }
+
+ @Override
+ public String getSslAlgorithm() {
+ return sslAlgorithm;
+ }
+
+ @Override
+ public void setSslAlgorithm(String sslAlgorithm) {
+ this.sslAlgorithm = sslAlgorithm;
+ }
+
+ @Override
+ public String getKeyManagerAlgortihm() {
+ return keyManagerAlgortihm;
+ }
+
+ @Override
+ public void setKeyManagerAlgortihm(String keyManagerAlgortihm) {
+ this.keyManagerAlgortihm = keyManagerAlgortihm;
+ }
+
+ @Override
+ public String getKeyStoreType() {
+ return keyStoreType;
+ }
+
+ @Override
+ public void setKeyStoreType(String keyStoreType) {
+ this.keyStoreType = keyStoreType;
+ }
+
+ @Override
+ public boolean isServerCertificationChainValidationEnabled() {
+ return serverCertificationChainValidationEnabled;
+ }
+
+ @Override
+ public void setServerCertificationChainValidationEnabled(
+ boolean serverCertificationChainValidationEnabled) {
+ this.serverCertificationChainValidationEnabled = serverCertificationChainValidationEnabled;
+ }
+
+ @Override
+ public void setClientCertFileName(String filename) throws IOException {
+ this.clientCertFileName = filename;
+
+ if (filename == null) {
+ this.clientCertBytes = null;
+ } else {
+ this.clientCertBytes = Files.readAllBytes(new File(filename).toPath());
+ }
+ }
+
+ @Override
+ public void setClientCertFileInputStream(FileInputStream fis) {
+ this.clientCertFileInputStream = fis;
+ }
+
+ @Override
+ public FileInputStream getClientCertFileInputStream() {
+ return this.clientCertFileInputStream;
+ }
+
+ @Override
+ public SSLContext getSecureContext() throws KeyManagementException, NoSuchAlgorithmException,
+ KeyStoreException, CertificateException, IOException, UnrecoverableKeyException {
+
+ TrustManager[] trustAllCerts = null;
+
+ if (serverCertificationChainValidationEnabled) {
+
+ System.setProperty("javax.net.ssl.trustStore", trustStoreFileName);
+
+ } else {
+
+ // Create a trust manager that does not validate certificate chains
+ trustAllCerts = new TrustManager[] {new X509TrustManager() {
+ @Override
+ public X509Certificate[] getAcceptedIssuers() {
+ return null;
+ }
+
+ @Override
+ public void checkClientTrusted(X509Certificate[] certs, String authType) {}
+
+ @Override
+ public void checkServerTrusted(X509Certificate[] certs, String authType) {}
+ } };
+ }
+
+ KeyManagerFactory kmf = KeyManagerFactory.getInstance(keyManagerAlgortihm);
+
+ KeyStore ks = KeyStore.getInstance(keyStoreType);
+
+ char[] pwd = null;
+ if (clientCertPassword != null) {
+ pwd = clientCertPassword.toCharArray();
+ }
+
+ if (clientCertBytes != null) {
+ ks.load(new ByteArrayInputStream(clientCertBytes), pwd);
+ } else {
+ ks.load(null, pwd);
+ }
+
+ kmf.init(ks, pwd);
+
+ SSLContext ctx = SSLContext.getInstance(sslAlgorithm);
+ ctx.init(kmf.getKeyManagers(), trustAllCerts, null);
+
+ return ctx;
+
+ }
+
+ @Override
+ public String getTrustStoreFileName() {
+ return this.trustStoreFileName;
+ }
+
+ @Override
+ public void setTrustStoreFileName(String filename) {
+ this.trustStoreFileName = filename;
+ }
+
+ @Override
+ public String getClientCertPassword() {
+ return this.clientCertPassword;
+ }
+
+ @Override
+ public void setClientCertPassword(String password) {
+ this.clientCertPassword = password;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java
new file mode 100644
index 0000000..795739b
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java
@@ -0,0 +1,267 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security.filter;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+
+// import esGateKeeper.esGateKeeper;
+
+/**
+ * Redirects to the AT&T global login page if the user is not authenticated.<br>
+ * Filter properties need to be configured in: csp-cookie-filter.properties
+ */
+public class CspCookieFilter implements Filter {
+
+ /** Redirect URL for the login page. */
+ private String globalLoginUrl;
+
+ /** Application identifier. */
+ private String applicationId;
+
+ /** Gatekeeper environment setting (development or production). */
+ private String gateKeeperEnvironment;
+
+ private static final String FILTER_PARAMETER_CONFIG = "config";
+ private static final String PROPERTY_GLOBAL_LOGIN_URL = "global.login.url";
+ private static final String PROPERTY_APPLICATION_ID = "application.id";
+ private static final String PROPERTY_GATEKEEPER_ENVIRONMENT = "gatekeeper.environment";
+ // valid open redirect domains
+ private List<String> redirectDomains = new ArrayList<>();
+ private static final String PROPERTY_REDIRECT_DOMAINS = "redirect-domain";
+
+ /** Needed by esGateKeeper, does not accept any other value. */
+ private static final String GATEKEEPER_ACCOUNT_NAME = "CSP";
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(CspCookieFilter.class);
+
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#init(javax.servlet.FilterConfig)
+ */
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ String txnID = NodeUtils.getRandomTxnId();
+ MdcContext.initialize(txnID, "CspCookieFilter", "", "Init", "");
+
+ try {
+ setConfigurationProperties(filterConfig);
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.ERROR_CSP_CONFIG_FILE);
+ throw new ServletException(exc);
+ }
+ }
+
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain)
+ */
+ @Override
+ public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain)
+ throws IOException, ServletException {
+ HttpServletRequest request = (HttpServletRequest) req;
+ HttpServletResponse response = (HttpServletResponse) res;
+
+ Cookie[] cookies = request.getCookies();
+ if ((cookies == null) || (cookies.length == 0)) {
+ doLogin(request, response);
+ return;
+ }
+
+ /*
+ * String attEsSec = getSecurityCookie(cookies);
+ *
+ * if (attESSec == null || attESSec.length() == 0) { doLogin(request, response); return; }
+ *
+ * String attESSecUnEncrypted = esGateKeeper.esGateKeeper(attESSec, GATEKEEPER_ACCOUNT_NAME,
+ * gateKeeperEnvironment); if (attESSecUnEncrypted == null) { doLogin(request, response); } else
+ * {
+ */
+ // LOG.info("User has valid cookie");
+ chain.doFilter(request, response);
+ // }
+ }
+
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#destroy()
+ */
+ @Override
+ public void destroy() {}
+
+ /**
+ * Sets all required properties needed by this filter.
+ *
+ * @param filterConfig the filter configuration defined in the application web.xml
+ * @throws IOException if the properties failed to load.
+ */
+ private void setConfigurationProperties(FilterConfig filterConfig) throws IOException {
+ InputStream inputStream = new FileInputStream(SparkyConstants.CONFIG_HOME
+ + filterConfig.getInitParameter(FILTER_PARAMETER_CONFIG));
+ Properties cspProperties = new Properties();
+ cspProperties.load(inputStream);
+ globalLoginUrl = cspProperties.getProperty(PROPERTY_GLOBAL_LOGIN_URL);
+ applicationId = cspProperties.getProperty(PROPERTY_APPLICATION_ID);
+ gateKeeperEnvironment = cspProperties.getProperty(PROPERTY_GATEKEEPER_ENVIRONMENT);
+ redirectDomains = Arrays.asList(cspProperties.getProperty(PROPERTY_REDIRECT_DOMAINS).split(","));
+ }
+
+ /**
+ * Returns the attESSec cookie if found in the client.
+ *
+ * @param cookies the cookies available in the client
+ * @return the attESSec authentication cookie generated by the login page.
+ */
+ private String getSecurityCookie(Cookie[] cookies) {
+ String attEsSec = null;
+ for (int i = 0; i < cookies.length; i++) {
+ Cookie thisCookie = cookies[i];
+ String cookieName = thisCookie.getName();
+
+ if ("attESSec".equals(cookieName)) {
+ attEsSec = thisCookie.getValue();
+ break;
+ }
+ }
+ return attEsSec;
+ }
+
+ /**
+ * Redirects to the AT&T global login page. If this is an AJAX request it returns an unauthorized
+ * HTTP error in the response.
+ *
+ * @param request the filter request object
+ * @param response the filter response object
+ * @throws IOException if there is an error setting the error response
+ */
+ private void doLogin(HttpServletRequest request, HttpServletResponse response)
+ throws IOException {
+ if (isAjaxRequest(request)) {
+ response.sendError(HttpServletResponse.SC_UNAUTHORIZED,
+ "User is not authorized. Please login to application");
+ } else {
+ // Fix for Safari 7.0.2 onwards to avoid login page cache
+ response.addHeader("Cache-Control", "no-cache, no-store");
+ String redirectURL = createRedirectUrl(request);
+ if (this.isValidRedirectURL(redirectURL)){
+ response.sendRedirect(redirectURL);
+ LOG.debug(AaiUiMsgs.VALID_REDIRECT_URL, redirectURL);
+ } else{
+ response.sendError(400, "Bad redirect URL");
+ LOG.error(AaiUiMsgs.INVALID_REDIRECT_URL, redirectURL);
+ }
+ }
+ }
+
+ /**
+ * Checks if a redirect url is valid
+ * @param url URL to validate
+ * @return true if URL is a valid redirect URL, false otherwise
+ */
+ private boolean isValidRedirectURL (String url){
+ String redirectTo = url.substring(url.indexOf("?retURL=")+ "?retURL=".length());
+ try {
+ redirectTo = URLDecoder.decode(redirectTo, StandardCharsets.UTF_8.toString());
+ } catch (UnsupportedEncodingException e) {
+ LOG.error(AaiUiMsgs.UNSUPPORTED_URL_ENCODING, e.getLocalizedMessage());
+ return false;
+ }
+ for (String domain: this.redirectDomains){
+ if (redirectTo.endsWith(domain))
+ return true;
+ }
+ return false;
+ }
+
+
+ /**
+ * Returns <code>true</code> if the request is an AJAX request.
+ *
+ * @param request the filter request object
+ * @return <code>true</code> if the request is an AJAX request.
+ */
+ private boolean isAjaxRequest(HttpServletRequest request) {
+ String headerValue = request.getHeader("X-Requested-With");
+ if ("XMLHttpRequest".equals(headerValue)) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Returns the redirection URL to the AT&T Global login page.
+ *
+ * @param request the request
+ * @return the string
+ * @throws UnsupportedEncodingException the unsupported encoding exception
+ */
+ private String createRedirectUrl(HttpServletRequest request) throws UnsupportedEncodingException {
+ String returnUrl = getReturnUrl(request);
+
+ return globalLoginUrl + "?retURL=" + returnUrl + "&sysName=" + applicationId;
+ }
+
+ /**
+ * Gets the URL encoded return URL.
+ *
+ * @param request the HTTP request
+ * @return an encoded URL to return to following login
+ * @throws UnsupportedEncodingException the unsupported encoding exception
+ */
+ private String getReturnUrl(HttpServletRequest request) throws UnsupportedEncodingException {
+ StringBuffer retUrl = request.getRequestURL();
+ String urlParams = request.getQueryString();
+ if (urlParams != null) {
+ retUrl.append("?" + urlParams);
+ }
+ return URLEncoder.encode(retUrl.toString(), StandardCharsets.UTF_8.toString());
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java
new file mode 100644
index 0000000..dd90573
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java
@@ -0,0 +1,236 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security.filter;
+
+import java.io.IOException;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+import javax.ws.rs.core.HttpHeaders;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.security.EcompSso;
+import org.onap.aai.sparky.security.portal.config.PortalAuthenticationConfig;
+import org.openecomp.portalsdk.core.onboarding.listener.PortalTimeoutHandler;
+import org.openecomp.portalsdk.core.onboarding.util.PortalApiConstants;
+import org.openecomp.portalsdk.core.onboarding.util.PortalApiProperties;
+import org.openecomp.portalsdk.core.onboarding.util.SSOUtil;
+
+/**
+ * This filter checks every request for proper ECOMP Portal single sign on initialization. The
+ * possible paths and actions:
+ * <OL>
+ * <LI>User starts at an app page via a bookmark. No ECOMP portal cookie is set. Redirect there to
+ * get one; then continue as below.
+ * <LI>User starts at ECOMP Portal and goes to app. Alternately, the user's session times out and
+ * the user hits refresh. The ECOMP Portal cookie is set, but there is no valid session. Create one
+ * and publish info.
+ * <LI>User has valid ECOMP Portal cookie and session. Reset the max idle in that session.
+ * </OL>
+ * <P>
+ * Notes:
+ * <UL>
+ * <LI>Portal Session should be up prior to App Session</LI>
+ * <LI>If App Session Expires or if EPService cookie is unavailable, we need to redirect to Portal.
+ * <LI>Method {@link #initiateSessionMgtHandler(HttpServletRequest)} should be called for Session
+ * management when the initial session is created
+ * <LI>While redirecting, the cookie "redirectUrl" should also be set so that Portal knows where to
+ * forward the request to once the Portal Session is created and EPService cookie is set.
+ * <LI>Method {@link #resetSessionMaxIdleTimeOut(HttpServletRequest)} should be called for every
+ * request to reset the MaxInactiveInterval to the right value.
+ * </UL>
+ * <P>
+ * This filter incorporates most features of the SDK application's SessionTimeoutInterceptor and
+ * SingleSignOnController classes
+ */
+public class LoginFilter implements Filter {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(LoginFilter.class);
+
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ // Validate that app has provided useful portal properties
+ if (PortalApiProperties.getProperty(PortalApiConstants.ECOMP_REDIRECT_URL) == null) {
+ throw new ServletException("Failed to find URL in portal.properties");
+ }
+
+ PortalAuthenticationConfig appProperties;
+ try {
+ appProperties = PortalAuthenticationConfig.getInstance();
+ } catch (Exception ex) {
+ throw new ServletException("Failed to get properties", ex);
+ }
+
+ String restUser = appProperties.getUsername();
+ String restPassword = appProperties.getPassword();
+ if (restUser == null || restPassword == null) {
+ throw new ServletException("Failed to find user and/or password from properties");
+ }
+ }
+
+ @Override
+ public void destroy() {
+ // No resources to release
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse,
+ * javax.servlet.FilterChain)
+ */
+ @Override
+ public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain)
+ throws ServletException, IOException {
+ HttpServletRequest request = (HttpServletRequest) req;
+ HttpServletResponse response = (HttpServletResponse) res;
+
+ // Choose authentication appropriate for the request.
+ final String restApiURI = request.getContextPath() + PortalApiConstants.API_PREFIX;
+ if (request.getRequestURI().startsWith(restApiURI)) {
+ // REST servlet checks credentials
+ LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "doFilter: delegating auth to REST servlet for request " + request.getRequestURI());
+ chain.doFilter(request, response);
+ } else {
+ // All other requests require ECOMP Portal authentication
+ if (EcompSso.validateEcompSso(request) == null) {
+ String redirectURL, logMessage;
+ if (request.getRequestURI().contains("/editAttributes")) {
+ // If request is for Edit Attributes UI, redirect straight to the application.
+ String appPath = request.getRequestURI().substring(request.getContextPath().length() + 1)
+ + (request.getQueryString() != null ? ("?" + request.getQueryString()) : "");
+ redirectURL = SSOUtil.getECOMPSSORedirectURL(request, response, appPath);
+ logMessage = "Unauthenticated Edit Attributes UI login attempt.";
+ } else {
+ // Redirect to Portal UI
+ redirectURL = PortalApiProperties.getProperty(PortalApiConstants.ECOMP_REDIRECT_URL);
+ logMessage = "Unauthorized login attempt.";
+ }
+
+ LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG,
+ logMessage +
+ " | Remote IP: " + request.getRemoteAddr() +
+ " | User agent: " + request.getHeader(HttpHeaders.USER_AGENT) +
+ " | Request URL: " + request.getRequestURL() +
+ " | Redirecting to: " + redirectURL);
+
+ response.sendRedirect(redirectURL);
+ } else {
+ HttpSession session = request.getSession(false);
+ if (session == null) {
+ // New session
+ session = request.getSession(true);
+ LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "doFilter: created new session " + session.getId());
+ initiateSessionMgtHandler(request);
+ } else {
+ // Existing session
+ LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "doFilter: resetting idle in existing session " + session.getId());
+ resetSessionMaxIdleTimeOut(request);
+ }
+ // Pass request back down the filter chain
+ chain.doFilter(request, response);
+ }
+ }
+ }
+
+ /**
+ * Publishes information about the session.
+ *
+ * @param request
+ */
+ private void initiateSessionMgtHandler(HttpServletRequest request) {
+ String portalJSessionId = getPortalJSessionId(request);
+ String jSessionId = getJessionId(request);
+ storeMaxInactiveTime(request);
+ PortalTimeoutHandler.sessionCreated(portalJSessionId, jSessionId, request.getSession(false));
+ }
+
+ /**
+ * Gets the ECOMP Portal service cookie value.
+ *
+ * @param request
+ * @return Cookie value, or null if not found.
+ */
+ private String getPortalJSessionId(HttpServletRequest request) {
+ Cookie ep = EcompSso.getCookie(request, EcompSso.EP_SERVICE);
+ return ep == null ? null : ep.getValue();
+ }
+
+ /**
+ * Gets the container session ID.
+ *
+ * @param request
+ * @return Session ID, or null if no session.
+ */
+ private String getJessionId(HttpServletRequest request) {
+ HttpSession session = request.getSession();
+ return session == null ? null : session.getId();
+ }
+
+ /**
+ * Sets the global session's max idle time to the session's max inactive interval.
+ *
+ * @param request
+ */
+ private void storeMaxInactiveTime(HttpServletRequest request) {
+ HttpSession session = request.getSession(false);
+ if (session != null
+ && session.getAttribute(PortalApiConstants.GLOBAL_SESSION_MAX_IDLE_TIME) == null) {
+ session.setAttribute(PortalApiConstants.GLOBAL_SESSION_MAX_IDLE_TIME,
+ session.getMaxInactiveInterval());
+ }
+ }
+
+ /**
+ * Sets the session's max inactive interval.
+ *
+ * @param request
+ */
+ private void resetSessionMaxIdleTimeOut(HttpServletRequest request) {
+ try {
+ HttpSession session = request.getSession(false);
+ if (session != null) {
+ final Object maxIdleAttribute = session
+ .getAttribute(PortalApiConstants.GLOBAL_SESSION_MAX_IDLE_TIME);
+ if (maxIdleAttribute != null) {
+ session.setMaxInactiveInterval(Integer.parseInt(maxIdleAttribute.toString()));
+ }
+ }
+ } catch (Exception e) {
+ LOG.info(AaiUiMsgs.LOGIN_FILTER_INFO, "resetSessionMaxIdleTimeOut: failed to set session max inactive interval - " + e.getLocalizedMessage());
+ }
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java
new file mode 100644
index 0000000..ad64c63
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java
@@ -0,0 +1,213 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security.portal;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.LinkedHashSet;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.aai.sparky.security.EcompSso;
+import org.onap.aai.sparky.security.portal.config.PortalAuthenticationConfig;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+import org.openecomp.portalsdk.core.onboarding.crossapi.IPortalRestAPIService;
+import org.openecomp.portalsdk.core.onboarding.exception.PortalAPIException;
+import org.openecomp.portalsdk.core.restful.domain.EcompRole;
+import org.openecomp.portalsdk.core.restful.domain.EcompUser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Responds to ECOMP Portal's REST queries for user and role information and management.
+ */
+public class PortalRestAPIServiceImpl implements IPortalRestAPIService {
+
+ private static final Logger LOG = LoggerFactory.getLogger(PortalRestAPIServiceImpl.class);
+ private static final String ERROR_MESSAGE = "Failed to {0} user [loginId:{1}]";
+
+ private UserManager userManager;
+
+ /**
+ * Initialise user manager.
+ */
+ public PortalRestAPIServiceImpl() {
+ userManager = new UserManager(new File(SparkyConstants.USERS_FILE_LOCATION));
+ }
+
+ /////////////////////////////////////////////////////////////////////////////
+ // User interface
+ /////////////////////////////////////////////////////////////////////////////
+
+ /*
+ * (non-Javadoc)
+ *
+ */
+ @Override
+ public void pushUser(EcompUser user) throws PortalAPIException {
+ LOG.debug("Push user [loginId:" + user.getLoginId() + "]");
+
+ if (userManager.getUser(user.getLoginId()).isPresent()) {
+ String message = getMessage(ERROR_MESSAGE, "push", user.getLoginId())
+ + ", user is already stored";
+ LOG.error(message);
+ throw new PortalAPIException(message);
+ }
+
+ try {
+ userManager.pushUser(user);
+ } catch (IOException e) {
+ String message = getMessage(ERROR_MESSAGE, "push", user.getLoginId());
+ LOG.error(message, e);
+ throw new PortalAPIException(message, e);
+ }
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ */
+ @Override
+ public void editUser(String loginId, EcompUser user) throws PortalAPIException {
+ LOG.debug("Edit user [loginId:" + loginId + "]");
+
+ userManager.getUser(loginId).orElseThrow(() -> {
+ String message = getMessage(ERROR_MESSAGE, "edit", loginId) + ", unknown user";
+ LOG.error(message);
+ return new PortalAPIException(message);
+ });
+
+ try {
+ userManager.editUser(loginId, user);
+ } catch (IOException e) {
+ String message = getMessage(ERROR_MESSAGE, "edit", loginId);
+ LOG.error(message, e);
+ throw new PortalAPIException(message, e);
+ }
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ */
+ @Override
+ public EcompUser getUser(String loginId) throws PortalAPIException {
+ LOG.debug("Get user [loginId:" + loginId + "]");
+ return userManager.getUser(loginId).orElseThrow(() -> {
+ String message = getMessage(ERROR_MESSAGE, "get", loginId) + ", unknown user";
+ LOG.error(message);
+ return new PortalAPIException(message);
+ });
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ */
+ @Override
+ public List<EcompUser> getUsers() throws PortalAPIException {
+ LOG.debug("Get users");
+ return userManager.getUsers();
+ }
+
+ @Override
+ public String getUserId(HttpServletRequest request) throws PortalAPIException {
+ return EcompSso.validateEcompSso(request);
+ }
+
+ /////////////////////////////////////////////////////////////////////////////
+ // Role interface
+ /////////////////////////////////////////////////////////////////////////////
+
+ public List<EcompRole> getAvailableRoles() throws PortalAPIException {
+ LOG.debug("Get available roles");
+ return UserManager.getRoles();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ */
+ @Override
+ public List<EcompRole> getUserRoles(String loginId) throws PortalAPIException {
+ LOG.debug("Get user roles");
+ return userManager.getUserRoles(loginId);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ */
+ @Override
+ public void pushUserRole(String loginId, List<EcompRole> roles) throws PortalAPIException {
+ LOG.debug("Push user role [loginId:" + loginId + "]");
+ try {
+ EcompUser user = getUser(loginId);
+ if (roles != null) {
+ user.setRoles(new LinkedHashSet<EcompRole>(roles));
+ } else {
+ user.setRoles(new LinkedHashSet<EcompRole>());
+ }
+ editUser(loginId, user);
+ } catch (PortalAPIException e) {
+ String message = getMessage(ERROR_MESSAGE, "push role", loginId);
+ LOG.error(message);
+ throw new PortalAPIException(message, e);
+ }
+ }
+
+ /////////////////////////////////////////////////////////////////////////////
+ // Security interface
+ /////////////////////////////////////////////////////////////////////////////
+
+ /*
+ * (non-Javadoc)
+ *
+ */
+ @Override
+ public boolean isAppAuthenticated(HttpServletRequest request) throws PortalAPIException {
+ LOG.debug("Authentication request");
+ PortalAuthenticationConfig config = PortalAuthenticationConfig.getInstance();
+ String restUsername = request.getHeader(PortalAuthenticationConfig.PROP_USERNAME);
+ String restPassword = request.getHeader(PortalAuthenticationConfig.PROP_PASSWORD);
+ return restUsername != null && restPassword != null && restUsername.equals(config.getUsername())
+ && restPassword.equals(config.getPassword());
+ }
+
+ private String getMessage(String message, Object... args) {
+ MessageFormat formatter = new MessageFormat("");
+ formatter.applyPattern(message);
+ return formatter.format(args);
+ }
+
+ public List<EcompRole> getAvailableRoles(String requestedLoginId) throws PortalAPIException {
+ LOG.debug("Get available roles");
+ return UserManager.getRoles();
+ }
+
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/UserManager.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/UserManager.java
new file mode 100644
index 0000000..ea9b204
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/UserManager.java
@@ -0,0 +1,170 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security.portal;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.stream.Collectors;
+
+import org.onap.aai.sparky.security.portal.config.RolesConfig;
+import org.openecomp.portalsdk.core.restful.domain.EcompRole;
+import org.openecomp.portalsdk.core.restful.domain.EcompUser;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.reflect.TypeToken;
+
+/**
+ * Basic file based user storage.
+ */
+public class UserManager {
+
+ private File usersFile;
+
+ private static final ReadWriteLock LOCK = new ReentrantReadWriteLock(true);
+ private static final Lock READ_LOCK = LOCK.readLock();
+ private static final Lock WRITE_LOCK = LOCK.writeLock();
+
+ private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create();
+
+ /**
+ *
+ * @param usersFile a file to store the users
+ */
+ public UserManager(File usersFile) {
+ this.usersFile = usersFile;
+ }
+
+ /**
+ * Returns all users stored.
+ *
+ * @return a list of users.
+ */
+ public List<EcompUser> getUsers() {
+ Type collectionType = new TypeToken<List<EcompUser>>() {
+ }.getType();
+
+ Optional<String> users = read(usersFile);
+ if (users.isPresent()) {
+ return GSON.fromJson(users.get(), collectionType);
+ }
+
+ return new ArrayList<>();
+ }
+
+ /**
+ * Returns a stored user.
+ *
+ * @param loginId the identifier of the user
+ * @return an optional user.
+ */
+ public Optional<EcompUser> getUser(String loginId) {
+ if (!getUsers().isEmpty()) {
+ return getUsers().stream().filter(u -> loginId.equals(u.getLoginId())).findFirst();
+ }
+ return Optional.empty();
+ }
+
+ /**
+ * Stores a user if not already stored.
+ *
+ * @param user the user to be stored
+ * @throws IOException
+ */
+ public void pushUser(EcompUser user) throws IOException {
+ WRITE_LOCK.lock();
+ try {
+ if (!getUser(user.getLoginId()).isPresent()) {
+ addUser(getUsers(), user);
+ }
+ } finally {
+ WRITE_LOCK.unlock();
+ }
+ }
+
+ /**
+ * Replaces an existing user.
+ *
+ * @param loginId the id of the user
+ * @param user the new user details
+ * @throws IOException
+ */
+ public void editUser(String loginId, EcompUser user) throws IOException {
+ WRITE_LOCK.lock();
+ try {
+ if (getUser(loginId).isPresent()) {
+ List<EcompUser> users = getUsers().stream().filter(u -> !u.getLoginId().equals(loginId))
+ .collect(Collectors.toList());
+ addUser(users, user);
+ }
+ } finally {
+ WRITE_LOCK.unlock();
+ }
+ }
+
+ /**
+ * Gets the roles assigned to a user.
+ *
+ * @param loginId the id of the user
+ * @return the assigned roles
+ */
+ public List<EcompRole> getUserRoles(String loginId) {
+ List<EcompRole> roles = new ArrayList<>();
+ roles.addAll(getUser(loginId).orElseGet(EcompUser::new).getRoles());
+ return roles;
+ }
+
+ public static List<EcompRole> getRoles() {
+ return RolesConfig.getInstance().getRoles();
+ }
+
+ private void addUser(List<EcompUser> users, EcompUser user) throws IOException {
+ users.add(user);
+ write(users);
+ }
+
+ private void write(List<EcompUser> users) throws IOException {
+ Files.write(usersFile.toPath(), GSON.toJson(users).getBytes());
+ }
+
+ private Optional<String> read(File file) {
+ READ_LOCK.lock();
+ try {
+ return Optional.of(new String(Files.readAllBytes(file.toPath())));
+ } catch (IOException e) { // NOSONAR
+ return Optional.empty();
+ } finally {
+ READ_LOCK.unlock();
+ }
+ }
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java
new file mode 100644
index 0000000..e1b7eda
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java
@@ -0,0 +1,124 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security.portal.config;
+
+
+import java.util.Properties;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.security.CookieDecryptor;
+import org.onap.aai.sparky.util.ConfigHelper;
+import org.onap.aai.sparky.util.Encryptor;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+
+
+/**
+ * Provides Portal authentication configuration.
+ */
+public class PortalAuthenticationConfig {
+
+ private String username;
+ private String password;
+ private boolean isOnapEnabled;
+ private CookieDecryptor cookieDecryptor;
+ private String cookieDecryptorClassName;
+
+ public static final String PROP_USERNAME = "username";
+ public static final String PROP_PASSWORD = "password"; // NOSONAR
+ public static final String PROP_IS_ONAP_ENABLED = "onap_enabled"; // NOSONAR
+ private static final String AUTHENTICATION_CONFIG_FILE = SparkyConstants.PORTAL_AUTHENTICATION_FILE_LOCATION;
+ public static final String PROP_COOKIEDECRYPTORCLASSNAME = "cookie_decryptor_classname";
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(PortalAuthenticationConfig.class);
+
+ private PortalAuthenticationConfig() {
+ // Prevent instantiation
+ }
+
+ private static class PortalAuthenticationConfigHelper {
+ private static final PortalAuthenticationConfig INSTANCE = new PortalAuthenticationConfig();
+
+ private PortalAuthenticationConfigHelper() {
+ // Deliberately empty
+ }
+ }
+
+ /**
+ * Get a singleton instance of the configuration.
+ *
+ * @return
+ */
+ public static PortalAuthenticationConfig getInstance() {
+ PortalAuthenticationConfigHelper.INSTANCE.load();
+ return PortalAuthenticationConfigHelper.INSTANCE;
+ }
+
+ public String getUsername() {
+ return username;
+ }
+
+ public String getPassword() {
+ Encryptor encryptor = new Encryptor();
+ return encryptor.decryptValue(password);
+ }
+
+ public boolean getIsOnapEnabled() {
+ return isOnapEnabled;
+ }
+ public String getcookieDecryptorClassName() {
+ return cookieDecryptorClassName;
+ }
+
+ /**
+ * Reload the Portal authentication properties from the classpath.
+ */
+ public void reload() {
+ load();
+ }
+
+ /**
+ * Load the Portal authentication properties from the classpath.
+ */
+ private void load() {
+ Properties props = ConfigHelper.loadConfigFromExplicitPath(AUTHENTICATION_CONFIG_FILE);
+ username = props.getProperty(PROP_USERNAME);
+ password = props.getProperty(PROP_PASSWORD);
+ isOnapEnabled = Boolean.parseBoolean(props.getProperty(PROP_IS_ONAP_ENABLED, "true"));
+ cookieDecryptorClassName= props.getProperty(PROP_COOKIEDECRYPTORCLASSNAME);
+ }
+
+ public CookieDecryptor getCookieDecryptor() throws ClassNotFoundException{
+
+ Class cookieDecrypterClass = Class.forName(cookieDecryptorClassName);
+ try {
+ cookieDecryptor = (CookieDecryptor) cookieDecrypterClass.newInstance();
+ } catch (InstantiationException | IllegalAccessException e) {
+ LOG.error(AaiUiMsgs.DECRYPTION_ERROR,"Unable to instantiate Cookie Decryptor Class");
+ }
+ return cookieDecryptor;
+ }
+
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/RolesConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/RolesConfig.java
new file mode 100644
index 0000000..283834c
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/RolesConfig.java
@@ -0,0 +1,90 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.security.portal.config;
+
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.net.URISyntaxException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.Collections;
+import java.util.List;
+
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+import org.openecomp.portalsdk.core.restful.domain.EcompRole;
+
+import com.google.gson.Gson;
+import com.google.gson.JsonSyntaxException;
+import com.google.gson.reflect.TypeToken;
+
+/**
+ * Provides roles configuration.
+ */
+public class RolesConfig {
+
+ private List<EcompRole> roles;
+
+ private static final Gson GSON = new Gson();
+ private static final String ROLES_CONFIG_FILE = SparkyConstants.ROLES_FILE_LOCATION;
+
+ private RolesConfig() {
+ // Prevent instantiation
+ }
+
+ private static class RolesConfigHelper {
+ private static final RolesConfig INSTANCE = new RolesConfig();
+
+ private RolesConfigHelper() {
+ // Deliberately empty
+ }
+ }
+
+ /**
+ * Get a singleton instance of the configuration.
+ *
+ * @return
+ */
+ public static RolesConfig getInstance() {
+ try {
+ RolesConfigHelper.INSTANCE.load();
+ } catch (Exception e) {
+ throw new ExceptionInInitializerError(e);
+ }
+
+ return RolesConfigHelper.INSTANCE;
+ }
+
+ public List<EcompRole> getRoles() {
+ return roles;
+ }
+
+ private void load() throws JsonSyntaxException, IOException, URISyntaxException {
+ Type collectionType = new TypeToken<List<EcompRole>>() {
+ }.getType();
+
+ roles = Collections.unmodifiableList(GSON
+ .fromJson(new String(Files.readAllBytes(Paths.get(ROLES_CONFIG_FILE))), collectionType));
+ }
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/SubscriptionServiceProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/SubscriptionServiceProcessor.java
new file mode 100644
index 0000000..a8dd131
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/SubscriptionServiceProcessor.java
@@ -0,0 +1,74 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.subscription;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.camel.Exchange;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.logging.util.ServletUtils;
+import org.onap.aai.sparky.subscription.services.SubscriptionService;
+
+public class SubscriptionServiceProcessor {
+
+ private static final String EMPTY_RESPONSE = "{}";
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(SubscriptionServiceProcessor.class);
+
+ SubscriptionService subService;
+
+
+ public SubscriptionServiceProcessor(SubscriptionService subscriptionService) {
+ this.subService = subscriptionService;
+ }
+
+
+ public void getSubscription(Exchange exchange) {
+
+ HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class);
+ ServletUtils.setUpMdcContext(exchange, request);
+
+ OperationResult subscriptionResult = null;
+
+ try {
+
+ subscriptionResult = subService.buildSubscriptionPayload();
+
+ } catch (Exception exc) {
+ subscriptionResult = new OperationResult();
+ subscriptionResult.setResult(EMPTY_RESPONSE);
+ LOG.error(AaiUiMsgs.FAILURE_TO_PROCESS_REQUEST,
+ "Exception thrown during subscription processing: " + exc.getLocalizedMessage());
+ }
+
+ exchange.getOut().setBody(subscriptionResult.getResult());
+
+
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/config/SubscriptionConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/config/SubscriptionConfig.java
new file mode 100644
index 0000000..37c6c4d
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/config/SubscriptionConfig.java
@@ -0,0 +1,139 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.subscription.config;
+
+import java.util.Collection;
+
+
+/**
+ * The Class SubscriptionConfig.
+ */
+public class SubscriptionConfig {
+
+ private static final String EMPTY_TARGET = "";
+ private static final String EMPTY_ORIGIN = "";
+ private static final String EMPTY_MESSAGE_TYPE = "";
+ private static final String EMPTY_TOPIC = "";
+
+
+ private String subscriptionTarget = EMPTY_TARGET;
+ private String subscriptionOrigin = EMPTY_ORIGIN;
+ private String subscriptionMessageType = EMPTY_MESSAGE_TYPE;
+ private String subscriptionTopic = EMPTY_TOPIC;
+
+ private String launchOITarget = EMPTY_TARGET;
+ private String launchOIOrigin = EMPTY_ORIGIN;
+ private String launchOIMessageType = EMPTY_MESSAGE_TYPE;
+ private String launchOITopic = EMPTY_TOPIC;
+
+ private Boolean isLaunchOIEnabled = false;
+
+ private Collection<String> annEntitiyTypes;
+
+ /**
+ * Instantiates a new Subscription config.
+ */
+ public SubscriptionConfig() {}
+
+ public String getSubscriptionTarget() {
+ return subscriptionTarget;
+ }
+
+ public void setSubscriptionTarget(String target) {
+ this.subscriptionTarget = target;
+ }
+
+ public String getSubscriptionOrigin() {
+ return subscriptionOrigin;
+ }
+
+ public void setSubscriptionOrigin(String origin) {
+ this.subscriptionOrigin = origin;
+ }
+
+ public String getSubscriptionMessageType() {
+ return subscriptionMessageType;
+ }
+
+ public void setSubscriptionMessageType(String messageType) {
+ this.subscriptionMessageType = messageType;
+ }
+
+ public String getSubscriptionTopic() {
+ return subscriptionTopic;
+ }
+
+ public void setSubscriptionTopic(String topic) {
+ this.subscriptionTopic = topic;
+ }
+
+ public String getLaunchOITarget() {
+ return launchOITarget;
+ }
+
+ public void setLaunchOITarget(String launchOITarget) {
+ this.launchOITarget = launchOITarget;
+ }
+
+ public String getLaunchOIOrigin() {
+ return launchOIOrigin;
+ }
+
+ public void setLaunchOIOrigin(String launchOIOrigin) {
+ this.launchOIOrigin = launchOIOrigin;
+ }
+
+ public String getLaunchOIMessageType() {
+ return launchOIMessageType;
+ }
+
+ public void setLaunchOIMessageType(String launchOIMessageType) {
+ this.launchOIMessageType = launchOIMessageType;
+ }
+
+ public String getLaunchOITopic() {
+ return launchOITopic;
+ }
+
+ public void setLaunchOITopic(String launchOITopic) {
+ this.launchOITopic = launchOITopic;
+ }
+
+ public Collection<String> getAnnEntitiyTypes() {
+ return annEntitiyTypes;
+ }
+
+ public void setAnnEntitiyTypes(Collection<String> annEntitiyTypes) {
+ this.annEntitiyTypes = annEntitiyTypes;
+ }
+
+ public Boolean getIsLaunchOIEnabled() {
+ return isLaunchOIEnabled;
+ }
+
+ public void setIsLaunchOIEnabled(Boolean isLaunchOIEnabled) {
+ this.isLaunchOIEnabled = isLaunchOIEnabled;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Message.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Message.java
new file mode 100644
index 0000000..89489f3
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Message.java
@@ -0,0 +1,60 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.subscription.payload.entity;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+
+@JsonInclude(JsonInclude.Include.NON_NULL)
+@JsonPropertyOrder({ "applicationName", "payload" })
+public class Message {
+
+ @JsonProperty("applicationName")
+ private String applicationName;
+ @JsonProperty("payload")
+ private Payload payload;
+
+ @JsonProperty("applicationName")
+ public String getApplicationName() {
+ return applicationName;
+ }
+
+ @JsonProperty("applicationName")
+ public void setApplicationName(String applicationName) {
+ this.applicationName = applicationName;
+ }
+
+ @JsonProperty("payload")
+ public Payload getPayload() {
+ return payload;
+ }
+
+ @JsonProperty("payload")
+ public void setPayload(Payload payload) {
+ this.payload = payload;
+ }
+
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/ObjectInspectorPayload.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/ObjectInspectorPayload.java
new file mode 100644
index 0000000..eae1df1
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/ObjectInspectorPayload.java
@@ -0,0 +1,128 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.subscription.payload.entity;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.onap.aai.sparky.subscription.config.SubscriptionConfig;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+@JsonInclude(JsonInclude.Include.NON_NULL)
+@JsonPropertyOrder({ "target", "origin", "messageType", "topic", "message" })
+public class ObjectInspectorPayload {
+
+ @JsonProperty("target")
+ private String target;
+ @JsonProperty("origin")
+ private String origin;
+ @JsonProperty("messageType")
+ private String messageType;
+ @JsonProperty("topic")
+ private String topic;
+ @JsonProperty("message")
+ private Message message;
+
+ @JsonProperty("target")
+ public String getTarget() {
+ return target;
+ }
+
+ @JsonProperty("target")
+ public void setTarget(String target) {
+ this.target = target;
+ }
+
+ @JsonProperty("origin")
+ public String getOrigin() {
+ return origin;
+ }
+
+ @JsonProperty("origin")
+ public void setOrigin(String origin) {
+ this.origin = origin;
+ }
+
+ @JsonProperty("messageType")
+ public String getMessageType() {
+ return messageType;
+ }
+
+ @JsonProperty("messageType")
+ public void setMessageType(String messageType) {
+ this.messageType = messageType;
+ }
+
+ @JsonProperty("topic")
+ public String getTopic() {
+ return topic;
+ }
+
+ @JsonProperty("topic")
+ public void setTopic(String topic) {
+ this.topic = topic;
+ }
+
+ @JsonProperty("message")
+ public Message getMessage() {
+ return message;
+ }
+
+ @JsonProperty("message")
+ public void setMessage(Message message) {
+ this.message = message;
+ }
+
+ private static ObjectInspectorPayload lic;
+ public static ObjectInspectorPayload getOIPayload(SubscriptionConfig subscriptionConf) throws JsonParseException, JsonMappingException, IOException{
+ if(lic == null){
+ ObjectMapper mapper = new ObjectMapper();
+ lic = mapper.readValue(new File(SparkyConstants.SUBSCRIPTION_OI_MAPPING), ObjectInspectorPayload.class);
+ lic.intitializeOIPayload(subscriptionConf);
+ }
+
+ return lic;
+ }
+
+ private void intitializeOIPayload(SubscriptionConfig subscriptionConf) {
+ try {
+ lic.setOrigin(subscriptionConf.getLaunchOIOrigin());
+ lic.setTarget(subscriptionConf.getLaunchOITarget());
+ lic.setTopic(subscriptionConf.getLaunchOITopic());
+ lic.setMessageType(subscriptionConf.getLaunchOIMessageType());
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Params.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Params.java
new file mode 100644
index 0000000..e3bb4c7
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Params.java
@@ -0,0 +1,60 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.subscription.payload.entity;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+
+@JsonInclude(JsonInclude.Include.NON_NULL)
+@JsonPropertyOrder({ "contexts", "objectName", "externalClassId"})
+public class Params {
+
+ @JsonProperty("objectName")
+ private String objectName;
+ @JsonProperty("externalClassId")
+ private String externalClassId;
+
+ @JsonProperty("objectName")
+ public String getObjectName() {
+ return objectName;
+ }
+
+ @JsonProperty("objectName")
+ public void setObjectName(String objectName) {
+ this.objectName = objectName;
+ }
+
+ @JsonProperty("externalClassId")
+ public String getExternalClassId() {
+ return externalClassId;
+ }
+
+ @JsonProperty("externalClassId")
+ public void setExternalClassId(String externalClassId) {
+ this.externalClassId = externalClassId;
+ }
+
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Payload.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Payload.java
new file mode 100644
index 0000000..71db011
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Payload.java
@@ -0,0 +1,60 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.subscription.payload.entity;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+
+@JsonInclude(JsonInclude.Include.NON_NULL)
+@JsonPropertyOrder({ "action", "params" })
+public class Payload {
+
+ @JsonProperty("action")
+ private String action;
+ @JsonProperty("params")
+ private Params params;
+
+ @JsonProperty("action")
+ public String getAction() {
+ return action;
+ }
+
+ @JsonProperty("action")
+ public void setAction(String action) {
+ this.action = action;
+ }
+
+ @JsonProperty("params")
+ public Params getParams() {
+ return params;
+ }
+
+ @JsonProperty("params")
+ public void setParams(Params params) {
+ this.params = params;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/services/SubscriptionService.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/services/SubscriptionService.java
new file mode 100644
index 0000000..78f1d3f
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/services/SubscriptionService.java
@@ -0,0 +1,65 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.subscription.services;
+
+import org.json.JSONObject;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.subscription.config.SubscriptionConfig;
+
+
+public class SubscriptionService {
+
+ SubscriptionConfig config;
+
+ public SubscriptionService(SubscriptionConfig subscriptionConfig) {
+ this.config = subscriptionConfig;
+ }
+
+ public OperationResult buildSubscriptionPayload() throws Exception {
+ OperationResult returnValue = new OperationResult();
+ returnValue.setResultCode(200);
+ JSONObject subscriptionResponse = new JSONObject();
+ JSONObject subscriptionDetails = new JSONObject();
+
+
+ if (config.getSubscriptionTarget().isEmpty()
+ && config.getSubscriptionTopic().isEmpty()
+ && config.getSubscriptionMessageType().isEmpty()
+ && config.getSubscriptionOrigin().isEmpty()) {
+ subscriptionResponse.put("subscriptionEnabled", false);
+ } else {
+ subscriptionResponse.put("subscriptionEnabled", true);
+ subscriptionDetails.put("target", config.getSubscriptionTarget());
+ subscriptionDetails.put("topic", config.getSubscriptionTopic());
+ subscriptionDetails.put("messageType", config.getSubscriptionMessageType());
+ subscriptionDetails.put("origin", config.getSubscriptionOrigin());
+ }
+
+ subscriptionResponse.put("subscriptionDetails", subscriptionDetails);
+ returnValue.setResult(subscriptionResponse.toString());
+
+ return returnValue;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java
new file mode 100644
index 0000000..a2b750b
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java
@@ -0,0 +1,524 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import java.util.EnumSet;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.dal.aai.ActiveInventoryEntityStatistics;
+import org.onap.aai.sparky.dal.aai.ActiveInventoryProcessingExceptionStatistics;
+import org.onap.aai.sparky.dal.elasticsearch.ElasticSearchEntityStatistics;
+import org.onap.aai.sparky.dal.rest.HttpMethod;
+import org.onap.aai.sparky.dal.rest.RestOperationalStatistics;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.util.NodeUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+/**
+ * The Class AbstractEntitySynchronizer.
+ *
+ * @author davea.
+ */
+public abstract class AbstractEntitySynchronizer {
+
+ protected static final int VERSION_CONFLICT_EXCEPTION_CODE = 409;
+ protected static final Integer RETRY_COUNT_PER_ENTITY_LIMIT = new Integer(3);
+
+ protected final Logger logger;
+ protected ObjectMapper mapper;
+ protected long syncDurationInMs;
+
+ /**
+ * The Enum StatFlag.
+ */
+ protected enum StatFlag {
+ AAI_REST_STATS, AAI_ENTITY_STATS, AAI_PROCESSING_EXCEPTION_STATS,
+ AAI_TASK_PROCESSING_STATS, ES_REST_STATS, ES_ENTITY_STATS, ES_TASK_PROCESSING_STATS
+ }
+
+ protected EnumSet<StatFlag> enabledStatFlags;
+
+ protected ElasticSearchAdapter elasticSearchAdapter;
+ protected ActiveInventoryAdapter aaiAdapter;
+
+ protected ExecutorService synchronizerExecutor;
+ protected ExecutorService aaiExecutor;
+ protected ExecutorService esExecutor;
+
+ private RestOperationalStatistics esRestStats;
+ protected ElasticSearchEntityStatistics esEntityStats;
+
+ private RestOperationalStatistics aaiRestStats;
+ protected ActiveInventoryEntityStatistics aaiEntityStats;
+ private ActiveInventoryProcessingExceptionStatistics aaiProcessingExceptionStats;
+
+ private TaskProcessingStats aaiTaskProcessingStats;
+ private TaskProcessingStats esTaskProcessingStats;
+
+ private TransactionRateMonitor aaiTransactionRateController;
+ private TransactionRateMonitor esTransactionRateController;
+
+ protected AtomicInteger aaiWorkOnHand;
+ protected AtomicInteger esWorkOnHand;
+ protected String synchronizerName;
+
+ protected abstract boolean isSyncDone();
+ protected boolean shouldSkipSync;
+
+ public String getActiveInventoryStatisticsReport() {
+
+ StringBuilder sb = new StringBuilder(128);
+
+ if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) {
+ sb.append("\n\n ").append("REST Operational Stats:");
+ sb.append(aaiRestStats.getStatisticsReport());
+ }
+
+ if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) {
+ sb.append("\n\n ").append("Entity Stats:");
+ sb.append(aaiEntityStats.getStatisticsReport());
+ }
+
+ if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) {
+ sb.append("\n\n ").append("Processing Exception Stats:");
+ sb.append(aaiProcessingExceptionStats.getStatisticsReport());
+ }
+
+ return sb.toString();
+
+ }
+
+ public String getElasticSearchStatisticsReport() {
+
+ StringBuilder sb = new StringBuilder(128);
+
+ if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) {
+ sb.append("\n\n ").append("REST Operational Stats:");
+ sb.append(esRestStats.getStatisticsReport());
+ }
+
+ if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) {
+ sb.append("\n\n ").append("Entity Stats:");
+ sb.append(esEntityStats.getStatisticsReport());
+ }
+
+ return sb.toString();
+
+ }
+
+ /**
+ * Adds the active inventory stat report.
+ *
+ * @param sb the sb
+ */
+ private void addActiveInventoryStatReport(StringBuilder sb) {
+
+ if (sb == null) {
+ return;
+ }
+
+ sb.append("\n\n AAI");
+ sb.append(getActiveInventoryStatisticsReport());
+
+ double currentTps = 0;
+ if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) {
+ sb.append("\n\n ").append("Task Processor Stats:");
+ sb.append(aaiTaskProcessingStats.getStatisticsReport(false, " "));
+
+ currentTps = aaiTransactionRateController.getCurrentTps();
+
+ sb.append("\n ").append("Current TPS: ").append(currentTps);
+ }
+
+ sb.append("\n ").append("Current WOH: ").append(aaiWorkOnHand.get());
+
+ if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) {
+ if (currentTps > 0) {
+ double numMillisecondsToCompletion = (aaiWorkOnHand.get() / currentTps) * 1000;
+ sb.append("\n ").append("SyncDurationRemaining=")
+ .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion));
+ }
+ }
+
+ }
+
+ /**
+ * Adds the elastic stat report.
+ *
+ * @param sb the sb
+ */
+ private void addElasticStatReport(StringBuilder sb) {
+
+ if (sb == null) {
+ return;
+ }
+
+ sb.append("\n\n ELASTIC");
+ sb.append(getElasticSearchStatisticsReport());
+
+ double currentTps = 0;
+
+ if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) {
+ sb.append("\n\n ").append("Task Processor Stats:");
+ sb.append(esTaskProcessingStats.getStatisticsReport(false, " "));
+
+ currentTps = esTransactionRateController.getCurrentTps();
+
+ sb.append("\n ").append("Current TPS: ").append(currentTps);
+ }
+
+ sb.append("\n ").append("Current WOH: ").append(esWorkOnHand.get());
+
+ if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) {
+ if (currentTps > 0) {
+ double numMillisecondsToCompletion = (esWorkOnHand.get() / currentTps) * 1000;
+ sb.append("\n ").append("SyncDurationRemaining=")
+ .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion));
+ }
+ }
+
+
+ }
+
+ /**
+ * Gets the stat report.
+ *
+ * @param syncOpTimeInMs the sync op time in ms
+ * @param showFinalReport the show final report
+ * @return the stat report
+ */
+ protected String getStatReport(long syncOpTimeInMs, boolean showFinalReport) {
+
+ StringBuilder sb = new StringBuilder(128);
+
+ sb.append("\n").append(synchronizerName + " Statistics: ( Sync Operation Duration = "
+ + NodeUtils.getDurationBreakdown(syncOpTimeInMs) + " )");
+
+ addActiveInventoryStatReport(sb);
+ addElasticStatReport(sb);
+
+ if (showFinalReport) {
+ sb.append("\n\n ").append("Sync Completed!\n");
+ } else {
+ sb.append("\n\n ").append("Sync in Progress...\n");
+ }
+
+ return sb.toString();
+
+ }
+
+ protected String indexName;
+ protected long syncStartedTimeStampInMs;
+
+ /**
+ * Instantiates a new abstract entity synchronizer.
+ *
+ * @param logger the logger
+ * @param syncName the sync name
+ * @param numSyncWorkers the num sync workers
+ * @param numActiveInventoryWorkers the num active inventory workers
+ * @param numElasticsearchWorkers the num elasticsearch workers
+ * @param indexName the index name
+ * @throws Exception the exception
+ */
+ protected AbstractEntitySynchronizer(Logger logger, String syncName, int numSyncWorkers,
+ int numActiveInventoryWorkers, int numElasticsearchWorkers, String indexName,
+ NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig)
+ throws Exception {
+ this.logger = logger;
+ this.synchronizerExecutor =
+ NodeUtils.createNamedExecutor(syncName + "-INTERNAL", numSyncWorkers, logger);
+ this.aaiExecutor =
+ NodeUtils.createNamedExecutor(syncName + "-AAI", numActiveInventoryWorkers, logger);
+ this.esExecutor =
+ NodeUtils.createNamedExecutor(syncName + "-ES", numElasticsearchWorkers, logger);
+ this.mapper = new ObjectMapper();
+ this.indexName = indexName;
+ this.esRestStats = new RestOperationalStatistics();
+ this.esEntityStats = new ElasticSearchEntityStatistics();
+ this.aaiRestStats = new RestOperationalStatistics();
+ this.aaiEntityStats = new ActiveInventoryEntityStatistics();
+ this.aaiProcessingExceptionStats = new ActiveInventoryProcessingExceptionStatistics();
+ this.aaiTaskProcessingStats =
+ new TaskProcessingStats(aaiStatConfig);
+ this.esTaskProcessingStats =
+ new TaskProcessingStats(esStatConfig);
+
+ this.aaiTransactionRateController =
+ new TransactionRateMonitor(numActiveInventoryWorkers, aaiStatConfig);
+ this.esTransactionRateController =
+ new TransactionRateMonitor(numElasticsearchWorkers, esStatConfig);
+
+ this.aaiWorkOnHand = new AtomicInteger(0);
+ this.esWorkOnHand = new AtomicInteger(0);
+
+ enabledStatFlags = EnumSet.allOf(StatFlag.class);
+
+ this.synchronizerName = "Abstact Entity Synchronizer";
+
+ String txnID = NodeUtils.getRandomTxnId();
+ MdcContext.initialize(txnID, "AbstractEntitySynchronizer", "", "Sync", "");
+
+ this.shouldSkipSync = false;
+ this.syncStartedTimeStampInMs = System.currentTimeMillis();
+ this.syncDurationInMs = -1;
+ }
+
+ public boolean shouldSkipSync() {
+ return shouldSkipSync;
+ }
+
+ public void setShouldSkipSync(boolean shouldSkipSync) {
+ this.shouldSkipSync = shouldSkipSync;
+ }
+
+ /**
+ * Inc active inventory work on hand counter.
+ */
+ protected void incActiveInventoryWorkOnHandCounter() {
+ aaiWorkOnHand.incrementAndGet();
+ }
+
+ /**
+ * Dec active inventory work on hand counter.
+ */
+ protected void decActiveInventoryWorkOnHandCounter() {
+ aaiWorkOnHand.decrementAndGet();
+ }
+
+ /**
+ * Inc elastic search work on hand counter.
+ */
+ protected void incElasticSearchWorkOnHandCounter() {
+ esWorkOnHand.incrementAndGet();
+ }
+
+ /**
+ * Dec elastic search work on hand counter.
+ */
+ protected void decElasticSearchWorkOnHandCounter() {
+ esWorkOnHand.decrementAndGet();
+ }
+
+ /**
+ * Shutdown executors.
+ */
+ protected void shutdownExecutors() {
+ try {
+
+ if (synchronizerExecutor != null) {
+ synchronizerExecutor.shutdown();
+ }
+
+ if (aaiExecutor != null) {
+ aaiExecutor.shutdown();
+ }
+
+ if (esExecutor != null) {
+ esExecutor.shutdown();
+ }
+
+ } catch (Exception exc) {
+ logger.error(AaiUiMsgs.ERROR_SHUTDOWN_EXECUTORS, exc );
+ }
+ }
+
+ /**
+ * Clear cache.
+ */
+ public void clearCache() {}
+
+ public ElasticSearchAdapter getElasticSearchAdapter() {
+ return elasticSearchAdapter;
+ }
+
+ public void setElasticSearchAdapter(ElasticSearchAdapter elasticSearchAdapter) {
+ this.elasticSearchAdapter = elasticSearchAdapter;
+ }
+
+ public ActiveInventoryAdapter getAaiAdapter() {
+ return aaiAdapter;
+ }
+
+ public void setAaiAdapter(ActiveInventoryAdapter aaiAdapter) {
+ this.aaiAdapter = aaiAdapter;
+ }
+
+ public String getIndexName() {
+ return indexName;
+ }
+
+ public void setIndexName(String indexName) {
+ this.indexName = indexName;
+ }
+
+
+ /**
+ * Gets the response length.
+ *
+ * @param txn the txn
+ * @return the response length
+ */
+ private long getResponseLength(NetworkTransaction txn) {
+
+ if (txn == null) {
+ return -1;
+ }
+
+ OperationResult result = txn.getOperationResult();
+
+ if (result == null) {
+ return -1;
+ }
+
+ if (result.getResult() != null) {
+ return result.getResult().length();
+ }
+
+ return -1;
+ }
+
+ /**
+ * Update elastic search counters.
+ *
+ * @param method the method
+ * @param entityType the entity type
+ * @param or the or
+ */
+ protected void updateElasticSearchCounters(HttpMethod method, String entityType,
+ OperationResult or) {
+ updateElasticSearchCounters(new NetworkTransaction(method, entityType, or));
+ }
+
+ /**
+ * Update elastic search counters.
+ *
+ * @param txn the txn
+ */
+ protected void updateElasticSearchCounters(NetworkTransaction txn) {
+
+ if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) {
+ esRestStats.updateCounters(txn);
+ }
+
+ if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) {
+ esEntityStats.updateCounters(txn);
+ }
+
+ if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) {
+
+ esTransactionRateController.trackResponseTime(txn.getOpTimeInMs());
+
+ esTaskProcessingStats
+ .updateTaskResponseStatsHistogram(txn.getOpTimeInMs());
+ esTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs());
+
+ // don't know the cost of the lengh calc, we'll see if it causes a
+ // problem
+
+ long responsePayloadSizeInBytes = getResponseLength(txn);
+ if (responsePayloadSizeInBytes >= 0) {
+ esTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes);
+ }
+
+ esTaskProcessingStats
+ .updateTransactionsPerSecondHistogram((long) esTransactionRateController.getCurrentTps());
+ }
+ }
+
+ /**
+ * Update active inventory counters.
+ *
+ * @param method the method
+ * @param entityType the entity type
+ * @param or the or
+ */
+ protected void updateActiveInventoryCounters(HttpMethod method, String entityType,
+ OperationResult or) {
+ updateActiveInventoryCounters(new NetworkTransaction(method, entityType, or));
+ }
+
+ /**
+ * Update active inventory counters.
+ *
+ * @param txn the txn
+ */
+ protected void updateActiveInventoryCounters(NetworkTransaction txn) {
+
+ if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) {
+ aaiRestStats.updateCounters(txn);
+ }
+
+ if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) {
+ aaiEntityStats.updateCounters(txn);
+ }
+
+ if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) {
+ aaiProcessingExceptionStats.updateCounters(txn);
+ }
+
+ if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) {
+ aaiTransactionRateController
+ .trackResponseTime(txn.getOpTimeInMs());
+
+ aaiTaskProcessingStats
+ .updateTaskResponseStatsHistogram(txn.getOpTimeInMs());
+ aaiTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs());
+
+ // don't know the cost of the lengh calc, we'll see if it causes a
+ // problem
+
+ long responsePayloadSizeInBytes = getResponseLength(txn);
+ if (responsePayloadSizeInBytes >= 0) {
+ aaiTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes);
+ }
+
+ aaiTaskProcessingStats.updateTransactionsPerSecondHistogram(
+ (long) aaiTransactionRateController.getCurrentTps());
+ }
+ }
+
+ /**
+ * Reset counters.
+ */
+ protected void resetCounters() {
+ aaiRestStats.reset();
+ aaiEntityStats.reset();
+ aaiProcessingExceptionStats.reset();
+
+ esRestStats.reset();
+ esEntityStats.reset();
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java
new file mode 100644
index 0000000..3255e39
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java
@@ -0,0 +1,604 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.ws.rs.core.MediaType;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.entity.ObjectIdCollection;
+import org.onap.aai.sparky.sync.entity.SearchableEntity;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+/**
+ * The Class ElasticSearchIndexCleaner.
+ */
+public class ElasticSearchIndexCleaner implements IndexCleaner {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(ElasticSearchIndexCleaner.class);
+
+ private static final String BULK_OP_LINE_TEMPLATE = "%s\n";
+ private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ";
+
+ private ObjectIdCollection before;
+ private ObjectIdCollection after;
+
+ private ObjectMapper mapper;
+ private ElasticSearchAdapter esAdapter;
+ private ElasticSearchEndpointConfig endpointConfig;
+ private ElasticSearchSchemaConfig schemaConfig;
+
+ /**
+ * Instantiates a new elastic search index cleaner.
+ *
+ * @param restDataProvider the rest data provider
+ * @param indexName the index name
+ * @param indexType the index type
+ * @param host the host
+ * @param port the port
+ * @param scrollContextTimeToLiveInMinutes the scroll context time to live in minutes
+ * @param numItemsToGetBulkRequest the num items to get bulk request
+ */
+ public ElasticSearchIndexCleaner(ElasticSearchAdapter esAdapter, ElasticSearchEndpointConfig endpointConfig, ElasticSearchSchemaConfig schemaConfig) {
+ this.esAdapter = esAdapter;
+ this.before = null;
+ this.after = null;
+ this.endpointConfig = endpointConfig;
+ this.schemaConfig = schemaConfig;
+ this.mapper = new ObjectMapper();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePreOperationCollection()
+ */
+ @Override
+ public OperationState populatePreOperationCollection() {
+
+ try {
+ before = retrieveAllDocumentIdentifiers();
+ return OperationState.OK;
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, schemaConfig.getIndexName(), exc.getMessage());
+ return OperationState.ERROR;
+ }
+
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePostOperationCollection()
+ */
+ @Override
+ public OperationState populatePostOperationCollection() {
+ try {
+ after = retrieveAllDocumentIdentifiers();
+ return OperationState.OK;
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, schemaConfig.getIndexName(), exc.getMessage());
+ return OperationState.ERROR;
+ }
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexCleaner#performCleanup()
+ */
+ @Override
+ public OperationState performCleanup() {
+ // TODO Auto-generated method stub
+ LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP, schemaConfig.getIndexName());
+
+ int sizeBefore = before.getSize();
+ int sizeAfter = after.getSize();
+
+ LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP_SIZE, String.valueOf(sizeBefore),
+ String.valueOf(sizeAfter));
+
+ /*
+ * If the processedImportIds size <= 0, then something has failed in the sync operation and we
+ * shouldn't do the selective delete right now.
+ */
+
+ if (sizeAfter > 0) {
+
+ Collection<String> presyncIds = before.getImportedObjectIds();
+ presyncIds.removeAll(after.getImportedObjectIds());
+
+ try {
+ LOG.info(AaiUiMsgs.ES_SYNC_SELECTIVE_DELETE, schemaConfig.getIndexName(), schemaConfig.getIndexDocType(),
+ String.valueOf(presyncIds.size()));
+
+ ObjectIdCollection bulkIds = new ObjectIdCollection();
+
+ Iterator<String> it = presyncIds.iterator();
+ int numItemsInBulkRequest = 0;
+ int numItemsRemainingToBeDeleted = presyncIds.size();
+
+ while (it.hasNext()) {
+
+ bulkIds.addObjectId(it.next());
+ numItemsInBulkRequest++;
+
+ if (numItemsInBulkRequest >= endpointConfig.getScrollContextBatchRequestSize()) {
+ LOG.info(AaiUiMsgs.ES_BULK_DELETE, schemaConfig.getIndexName(), String.valueOf(bulkIds.getSize()));
+ bulkDelete(bulkIds.getImportedObjectIds());
+ numItemsRemainingToBeDeleted -= numItemsInBulkRequest;
+ numItemsInBulkRequest = 0;
+ bulkIds.clear();
+ }
+ }
+
+ if (numItemsRemainingToBeDeleted > 0) {
+ LOG.info(AaiUiMsgs.ES_BULK_DELETE, schemaConfig.getIndexName(), String.valueOf(bulkIds.getSize()));
+ bulkDelete(bulkIds.getImportedObjectIds());
+ }
+
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_BULK_DELETE_ERROR, schemaConfig.getIndexName(), exc.getLocalizedMessage());
+
+ }
+ }
+
+ return OperationState.OK;
+ }
+
+ @Override
+ public String getIndexName() {
+ return schemaConfig.getIndexName();
+ }
+
+ /**
+ * Builds the initial scroll request payload.
+ *
+ * @param numItemsToGetPerRequest the num items to get per request
+ * @param fieldList the field list
+ * @return the string
+ * @throws JsonProcessingException the json processing exception
+ */
+ protected String buildInitialScrollRequestPayload(int numItemsToGetPerRequest,
+ List<String> fieldList) throws JsonProcessingException {
+
+ ObjectNode rootNode = mapper.createObjectNode();
+ rootNode.put("size", numItemsToGetPerRequest);
+
+ ArrayNode fields = mapper.createArrayNode();
+
+ for (String f : fieldList) {
+ fields.add(f);
+ }
+
+ rootNode.set("fields", fields);
+
+ ObjectNode queryNode = mapper.createObjectNode();
+ queryNode.set("match_all", mapper.createObjectNode());
+
+ rootNode.set("query", queryNode);
+
+ return mapper.writeValueAsString(rootNode);
+
+ }
+
+ /**
+ * Builds the subsequent scroll context request payload.
+ *
+ * @param scrollId the scroll id
+ * @param contextTimeToLiveInMinutes the context time to live in minutes
+ * @return the string
+ * @throws JsonProcessingException the json processing exception
+ */
+ protected String buildSubsequentScrollContextRequestPayload(String scrollId,
+ int contextTimeToLiveInMinutes) throws JsonProcessingException {
+
+ ObjectNode rootNode = mapper.createObjectNode();
+
+ rootNode.put("scroll", contextTimeToLiveInMinutes + "m");
+ rootNode.put("scroll_id", scrollId);
+
+ return mapper.writeValueAsString(rootNode);
+
+ }
+
+ /**
+ * Parses the elastic search result.
+ *
+ * @param jsonResult the json result
+ * @return the json node
+ * @throws JsonProcessingException the json processing exception
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ protected JsonNode parseElasticSearchResult(String jsonResult)
+ throws JsonProcessingException, IOException {
+ ObjectMapper mapper = new ObjectMapper();
+ return mapper.readTree(jsonResult);
+ }
+
+ /**
+ * Lookup index doc.
+ *
+ * @param ids the ids
+ * @param docs the docs
+ * @return the array list
+ */
+ protected ArrayList<SearchableEntity> lookupIndexDoc(ArrayList<String> ids,
+ List<SearchableEntity> docs) {
+ ArrayList<SearchableEntity> objs = new ArrayList<SearchableEntity>();
+
+ if (ids != null && docs != null) {
+ for (SearchableEntity d : docs) {
+ if (ids.contains(d.getId())) {
+ objs.add(d);
+ }
+ }
+ }
+
+ return objs;
+ }
+
+ /**
+ * Builds the delete data object.
+ *
+ * @param index the index
+ * @param type the type
+ * @param id the id
+ * @return the object node
+ */
+ protected ObjectNode buildDeleteDataObject(String index, String type, String id) {
+
+ ObjectNode indexDocProperties = mapper.createObjectNode();
+
+ indexDocProperties.put("_index", index);
+ indexDocProperties.put("_type", type);
+ indexDocProperties.put("_id", id);
+
+ ObjectNode rootNode = mapper.createObjectNode();
+ rootNode.set("delete", indexDocProperties);
+
+ return rootNode;
+ }
+
+ /**
+ * This method might appear to be a little strange, and is simply an optimization to take an
+ * elipsed JsonNode key path and retrieve the node at the end of the path, if it exists.
+ *
+ * @param startNode the start node
+ * @param fieldPath the field path
+ * @return the node path
+ */
+ protected JsonNode getNodePath(JsonNode startNode, String... fieldPath) {
+
+ JsonNode jsonNode = null;
+
+ for (String field : fieldPath) {
+ if (jsonNode == null) {
+ jsonNode = startNode.get(field);
+ } else {
+ jsonNode = jsonNode.get(field);
+ }
+
+ /*
+ * This is our safety net in case any intermediate path returns a null
+ */
+
+ if (jsonNode == null) {
+ return null;
+ }
+
+ }
+
+ return jsonNode;
+ }
+
+ /**
+ * Gets the full url.
+ *
+ * @param resourceUrl the resource url
+ * @return the full url
+ */
+ private String getFullUrl(String resourceUrl) {
+ return String.format("http://%s:%s%s", endpointConfig.getEsIpAddress(),
+ endpointConfig.getEsServerPort(), resourceUrl);
+ }
+
+ /**
+ * Retrieve all document identifiers.
+ *
+ * @return the object id collection
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ public ObjectIdCollection retrieveAllDocumentIdentifiers() throws IOException {
+
+ ObjectIdCollection currentDocumentIds = new ObjectIdCollection();
+
+ long opStartTimeInMs = System.currentTimeMillis();
+
+ List<String> fields = new ArrayList<String>();
+ fields.add("_id");
+ // fields.add("entityType");
+
+ String scrollRequestPayload =
+ buildInitialScrollRequestPayload(endpointConfig.getScrollContextBatchRequestSize(), fields);
+
+ final String fullUrlStr = getFullUrl("/" + schemaConfig.getIndexName()+ "/" + schemaConfig.getIndexDocType() + "/_search?scroll="
+ + endpointConfig.getScrollContextTimeToLiveInMinutes() + "m");
+
+ OperationResult result =
+ esAdapter.doPost(fullUrlStr, scrollRequestPayload, MediaType.APPLICATION_JSON_TYPE);
+
+ if (result.wasSuccessful()) {
+
+ JsonNode rootNode = parseElasticSearchResult(result.getResult());
+
+ /*
+ * Check the result for success / failure, and enumerate all the index ids that resulted in
+ * success, and ignore the ones that failed or log them so we have a record of the failure.
+ */
+ int totalRecordsAvailable = 0;
+ String scrollId = null;
+ int numRecordsFetched = 0;
+
+ if (rootNode != null) {
+
+ scrollId = getFieldValue(rootNode, "_scroll_id");
+ final String tookStr = getFieldValue(rootNode, "took");
+ int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr);
+ boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out"));
+
+ if (timedOut) {
+ LOG.error(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "all document Identifiers",
+ String.valueOf(tookInMs));
+ } else {
+ LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "all document Identifiers",
+ String.valueOf(tookInMs));
+ }
+
+ JsonNode hitsNode = rootNode.get("hits");
+ totalRecordsAvailable = Integer.parseInt(hitsNode.get("total").asText());
+
+ LOG.info(AaiUiMsgs.COLLECT_TOTAL, "all document Identifiers",
+ String.valueOf(totalRecordsAvailable));
+
+ /*
+ * Collect all object ids
+ */
+
+ ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits");
+
+ Iterator<JsonNode> nodeIterator = hitsArray.iterator();
+
+ String key = null;
+ String value = null;
+ JsonNode jsonNode = null;
+
+ while (nodeIterator.hasNext()) {
+
+ jsonNode = nodeIterator.next();
+
+ key = getFieldValue(jsonNode, "_id");
+
+ if (key != null) {
+ currentDocumentIds.addObjectId(key);
+ }
+
+ }
+
+ int totalRecordsRemainingToFetch = (totalRecordsAvailable - numRecordsFetched);
+
+ int numRequiredAdditionalFetches =
+ (totalRecordsRemainingToFetch / endpointConfig.getScrollContextBatchRequestSize());
+
+ /*
+ * Do an additional fetch for the remaining items (if needed)
+ */
+
+ if (totalRecordsRemainingToFetch % endpointConfig.getScrollContextBatchRequestSize() != 0) {
+ numRequiredAdditionalFetches += 1;
+ }
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.SYNC_NUMBER_REQ_FETCHES,
+ String.valueOf(numRequiredAdditionalFetches));
+ }
+
+
+ for (int x = 0; x < numRequiredAdditionalFetches; x++) {
+
+ if (collectItemsFromScrollContext(scrollId, currentDocumentIds) != OperationState.OK) {
+ // abort the whole thing because now we can't reliably cleanup the orphans.
+ throw new IOException(
+ "Failed to collect pre-sync doc collection from index. Aborting operation");
+ }
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.SYNC_NUMBER_TOTAL_FETCHES,
+ String.valueOf(currentDocumentIds.getSize()),
+ String.valueOf(totalRecordsAvailable));
+ }
+
+ }
+
+ }
+
+ } else {
+ // scroll context get failed, nothing else to do
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, result.toString());
+ }
+
+ LOG.info(AaiUiMsgs.COLLECT_TOTAL_TIME, "all document Identifiers",
+ String.valueOf((System.currentTimeMillis() - opStartTimeInMs)));
+
+ return currentDocumentIds;
+
+ }
+
+ /**
+ * Collect items from scroll context.
+ *
+ * @param scrollId the scroll id
+ * @param objectIds the object ids
+ * @return the operation state
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ private OperationState collectItemsFromScrollContext(String scrollId,
+ ObjectIdCollection objectIds) throws IOException {
+
+ String requestPayload = buildSubsequentScrollContextRequestPayload(scrollId,
+ endpointConfig.getScrollContextTimeToLiveInMinutes());
+
+ final String fullUrlStr = getFullUrl("/_search/scroll");
+
+ OperationResult opResult =
+ esAdapter.doPost(fullUrlStr, requestPayload, MediaType.APPLICATION_JSON_TYPE);
+
+ if (opResult.getResultCode() >= 300) {
+ LOG.warn(AaiUiMsgs.ES_SCROLL_CONTEXT_ERROR, opResult.getResult());
+ return OperationState.ERROR;
+ }
+
+ JsonNode rootNode = parseElasticSearchResult(opResult.getResult());
+ boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out"));
+ final String tookStr = getFieldValue(rootNode, "took");
+ int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr);
+
+ JsonNode hitsNode = rootNode.get("hits");
+
+ /*
+ * Check the result for success / failure, and enumerate all the index ids that resulted in
+ * success, and ignore the ones that failed or log them so we have a record of the failure.
+ */
+
+ if (rootNode != null) {
+
+ if (timedOut) {
+ LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "Scroll Context", String.valueOf(tookInMs));
+ } else {
+ LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "Scroll Context", String.valueOf(tookInMs));
+ }
+
+ /*
+ * Collect all object ids
+ */
+
+ ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits");
+ String key = null;
+ String value = null;
+ JsonNode jsonNode = null;
+
+ Iterator<JsonNode> nodeIterator = hitsArray.iterator();
+
+ while (nodeIterator.hasNext()) {
+
+ jsonNode = nodeIterator.next();
+
+ key = getFieldValue(jsonNode, "_id");
+
+ if (key != null) {
+ objectIds.addObjectId(key);
+
+ }
+
+ }
+ }
+
+ return OperationState.OK;
+ }
+
+ /**
+ * Gets the field value.
+ *
+ * @param node the node
+ * @param fieldName the field name
+ * @return the field value
+ */
+ protected String getFieldValue(JsonNode node, String fieldName) {
+
+ JsonNode field = node.get(fieldName);
+
+ if (field != null) {
+ return field.asText();
+ }
+
+ return null;
+
+ }
+
+ /**
+ * Bulk delete.
+ *
+ * @param docIds the doc ids
+ * @return the operation result
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ public OperationResult bulkDelete(Collection<String> docIds) throws IOException {
+
+ if (docIds == null || docIds.size() == 0) {
+ LOG.info(AaiUiMsgs.ES_BULK_DELETE_SKIP);
+ return new OperationResult(500,
+ "Skipping bulkDelete(); operation because docs to delete list is empty");
+ }
+
+ LOG.info(AaiUiMsgs.ES_BULK_DELETE_START, String.valueOf(docIds.size()));
+
+ StringBuilder sb = new StringBuilder(128);
+
+ for (String id : docIds) {
+ sb.append(String.format(BULK_OP_LINE_TEMPLATE,
+ buildDeleteDataObject(schemaConfig.getIndexName(), schemaConfig.getIndexDocType(), id)));
+ }
+
+ sb.append("\n");
+
+ final String fullUrlStr = getFullUrl("/_bulk");
+
+ return esAdapter.doPost(fullUrlStr, sb.toString(), MediaType.APPLICATION_FORM_URLENCODED_TYPE);
+
+ }
+
+ /*
+
+ */
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java
new file mode 100644
index 0000000..a2ff6cf
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java
@@ -0,0 +1,97 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import java.io.IOException;
+
+import org.onap.aai.sparky.config.SparkyResourceLoader;
+import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+public class ElasticSearchSchemaFactory {
+
+ private static final String SETTINGS = "settings";
+ private static final String MAPPINGS = "mappings";
+
+ private static ObjectMapper mapper = new ObjectMapper();
+ private SparkyResourceLoader resourceLoader;
+
+ public String getIndexSchema(ElasticSearchSchemaConfig schemaConfig)
+ throws ElasticSearchOperationException {
+
+ JsonNode esSettingsNode = null;
+ JsonNode esMappingsNodes = null;
+
+ try {
+
+ if (schemaConfig.getIndexSettingsFileName() != null) {
+ esSettingsNode = mapper
+ .readTree(resourceLoader.getResourceAsString(schemaConfig.getIndexSettingsFileName(),true));
+ }
+
+ if (schemaConfig.getIndexMappingsFileName() != null) {
+ esMappingsNodes = mapper
+ .readTree(resourceLoader.getResourceAsString(schemaConfig.getIndexMappingsFileName(),true));
+ }
+
+ } catch (IOException e1) {
+
+ throw new ElasticSearchOperationException("Caught an exception building initial ES index. Error: " + e1.getMessage());
+ }
+
+ ObjectNode esConfig = null;
+
+ ObjectNode mappings =
+ (ObjectNode) mapper.createObjectNode().set(schemaConfig.getIndexDocType(), esMappingsNodes);
+
+ if (esSettingsNode == null) {
+ esConfig = (ObjectNode) mapper.createObjectNode().set(MAPPINGS, mappings);
+ } else {
+ esConfig = (ObjectNode) mapper.createObjectNode().set(SETTINGS, esSettingsNode);
+ esConfig.set(MAPPINGS, mappings);
+ }
+
+ try {
+ return mapper.writeValueAsString(esConfig);
+ } catch (JsonProcessingException exc) {
+ throw new ElasticSearchOperationException("Error getting object node as string", exc);
+ }
+
+ }
+
+ public SparkyResourceLoader getResourceLoader() {
+ return resourceLoader;
+ }
+
+ public void setResourceLoader(SparkyResourceLoader resourceLoader) {
+ this.resourceLoader = resourceLoader;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java
new file mode 100644
index 0000000..bc47fa8
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java
@@ -0,0 +1,57 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+
+/**
+ * The Interface IndexCleaner.
+ */
+public interface IndexCleaner {
+
+ /**
+ * Populate pre operation collection.
+ *
+ * @return the operation state
+ */
+ public OperationState populatePreOperationCollection();
+
+ /**
+ * Populate post operation collection.
+ *
+ * @return the operation state
+ */
+ public OperationState populatePostOperationCollection();
+
+ /**
+ * Perform cleanup.
+ *
+ * @return the operation state
+ */
+ public OperationState performCleanup();
+
+ public String getIndexName();
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java
new file mode 100644
index 0000000..2c1bb16
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java
@@ -0,0 +1,178 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import javax.ws.rs.core.MediaType;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+
+/**
+ * The Class IndexIntegrityValidator.
+ */
+public class IndexIntegrityValidator implements IndexValidator {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(IndexIntegrityValidator.class);
+
+ private ElasticSearchEndpointConfig endpointConfig;
+ private ElasticSearchSchemaConfig schemaConfig;
+ private String tableConfigJson;
+
+ private final ElasticSearchAdapter esAdapter;
+
+ /**
+ * Instantiates a new index integrity validator.
+ *
+ * @param restDataProvider the rest data provider
+ * @param indexName the index name
+ * @param indexType the index type
+ * @param host the host
+ * @param port the port
+ * @param tableConfigJson the table config json
+ */
+ public IndexIntegrityValidator(ElasticSearchAdapter esAdapter,
+ ElasticSearchSchemaConfig esSchemaConfig, ElasticSearchEndpointConfig esEndpointConfig,
+ String tableConfigJson) {
+
+ this.esAdapter = esAdapter;
+ this.schemaConfig = esSchemaConfig;
+ this.endpointConfig = esEndpointConfig;
+ this.tableConfigJson = tableConfigJson;
+ }
+
+ public ElasticSearchEndpointConfig getEndpointConfig() {
+ return endpointConfig;
+ }
+
+ public void setEndpointConfig(ElasticSearchEndpointConfig endpointConfig) {
+ this.endpointConfig = endpointConfig;
+ }
+
+ public ElasticSearchSchemaConfig getSchemaConfig() {
+ return schemaConfig;
+ }
+
+ public void setSchemaConfig(ElasticSearchSchemaConfig schemaConfig) {
+ this.schemaConfig = schemaConfig;
+ }
+
+ public ElasticSearchAdapter getEsAdapter() {
+ return esAdapter;
+ }
+
+ @Override
+ public String getIndexName() {
+ return schemaConfig.getIndexName();
+ }
+
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexValidator#exists()
+ */
+ @Override
+ public boolean exists() {
+ final String fullUrlStr = getFullUrl("/" + schemaConfig.getIndexName() + "/");
+ OperationResult existsResult = esAdapter.doHead(fullUrlStr, MediaType.APPLICATION_JSON_TYPE);
+
+ int rc = existsResult.getResultCode();
+
+ if (rc >= 200 && rc < 300) {
+ LOG.info(AaiUiMsgs.INDEX_EXISTS, schemaConfig.getIndexName());
+ return true;
+ } else {
+ LOG.info(AaiUiMsgs.INDEX_NOT_EXIST, schemaConfig.getIndexName());
+ return false;
+ }
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexValidator#integrityValid()
+ */
+ @Override
+ public boolean integrityValid() {
+ return true;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexValidator#createOrRepair()
+ */
+ @Override
+ public void createOrRepair() {
+
+ String message =
+ "IndexIntegrityValidator.createOrRepair() for indexName = " + schemaConfig.getIndexName();
+ LOG.info(AaiUiMsgs.INFO_GENERIC, message);
+
+ final String fullUrlStr = getFullUrl("/" + schemaConfig.getIndexName() + "/");
+ OperationResult createResult =
+ esAdapter.doPut(fullUrlStr, tableConfigJson, MediaType.APPLICATION_JSON_TYPE);
+
+ int rc = createResult.getResultCode();
+
+ if (rc >= 200 && rc < 300) {
+ LOG.info(AaiUiMsgs.INDEX_RECREATED, schemaConfig.getIndexName());
+ } else if (rc == 400) {
+ LOG.info(AaiUiMsgs.INDEX_ALREADY_EXISTS, schemaConfig.getIndexName());
+ } else {
+ LOG.warn(AaiUiMsgs.INDEX_INTEGRITY_CHECK_FAILED, schemaConfig.getIndexName(),
+ createResult.getResult());
+ }
+
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.IndexValidator#destroyIndex()
+ */
+ @Override
+ public void destroyIndex() {
+ // we don't do this for now
+ }
+
+ /**
+ * Gets the full url.
+ *
+ * @param resourceUrl the resource url
+ * @return the full url
+ */
+ private String getFullUrl(String resourceUrl) {
+ return String.format("http://%s:%s%s", endpointConfig.getEsIpAddress(),
+ endpointConfig.getEsServerPort(), resourceUrl);
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java
new file mode 100644
index 0000000..05c7a5e
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java
@@ -0,0 +1,67 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+
+/**
+ * The Interface IndexSynchronizer.
+ *
+ * @author davea.
+ */
+public interface IndexSynchronizer {
+
+ /**
+ * Do sync.
+ *
+ * @return the operation state
+ */
+ public OperationState doSync();
+
+ public SynchronizerState getState();
+
+ /**
+ * Gets the stat report.
+ *
+ * @param finalReport the final report
+ * @return the stat report
+ */
+ public String getStatReport(boolean finalReport);
+
+ /**
+ * Shutdown.
+ */
+ public void shutdown();
+
+ public String getIndexName();
+
+ /**
+ * Clear cache.
+ */
+ public void clearCache();
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexValidator.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexValidator.java
new file mode 100644
index 0000000..05e7d91
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexValidator.java
@@ -0,0 +1,58 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+/**
+ * The Interface IndexValidator.
+ */
+public interface IndexValidator {
+
+ /**
+ * Exists.
+ *
+ * @return true, if successful
+ */
+ public boolean exists();
+
+ /**
+ * Integrity valid.
+ *
+ * @return true, if successful
+ */
+ public boolean integrityValid();
+
+ /**
+ * Creates the or repair.
+ */
+ public void createOrRepair();
+
+ /**
+ * Destroy index.
+ */
+ public void destroyIndex();
+
+ public String getIndexName();
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncController.java
new file mode 100644
index 0000000..bdd009b
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncController.java
@@ -0,0 +1,96 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import java.util.Calendar;
+import java.util.Date;
+
+import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+
+public interface SyncController {
+
+ String getControllerName();
+ boolean isPeriodicSyncEnabled();
+ boolean isRunOnceSyncEnabled();
+
+ /**
+ * Perform action.
+ *
+ * @param requestedAction the requested action
+ * @return
+ */
+ OperationState performAction(SyncActions requestedAction);
+
+ /**
+ * Register entity synchronizer.
+ *
+ * @param entitySynchronizer the entity synchronizer
+ */
+ void registerEntitySynchronizer(IndexSynchronizer entitySynchronizer);
+
+ /**
+ * Register index validator.
+ *
+ * @param indexValidator the index validator
+ */
+ void registerIndexValidator(IndexValidator indexValidator);
+
+ /**
+ * Register index cleaner.
+ *
+ * @param indexCleaner the index cleaner
+ */
+ void registerIndexCleaner(IndexCleaner indexCleaner);
+
+ /**
+ * Shutdown.
+ */
+ void shutdown();
+
+ SynchronizerState getState();
+
+ long getDelayInMs();
+
+ void setDelayInMs(long delayInMs);
+
+ long getSyncFrequencyInMs();
+
+ void setSyncFrequencyInMs(long syncFrequencyInMs);
+
+ Date getSyncStartTime();
+
+ void setSyncStartTime(Date syncStartTime);
+
+ Date getLastExecutionDate();
+
+ void setLastExecutionDate(Date lastExecutionDate);
+
+ Calendar getCreationTime();
+
+ String getNextSyncTime();
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java
new file mode 100644
index 0000000..0e0b193
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java
@@ -0,0 +1,682 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import static java.util.concurrent.CompletableFuture.supplyAsync;
+
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.Date;
+import java.util.LinkedHashSet;
+import java.util.TimeZone;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.config.SyncControllerConfig;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+/**
+ * The Class SyncController.
+ *
+ * @author davea.
+ */
+public class SyncControllerImpl implements SyncController {
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(SyncControllerImpl.class);
+
+ /**
+ * The Enum InternalState.
+ */
+ private enum InternalState {
+ IDLE, PRE_SYNC, SYNC_OPERATION, SELECTIVE_DELETE, ABORTING_SYNC, REPAIRING_INDEX, POST_SYNC,
+ TEST_INDEX_INTEGRITY, GENERATE_FINAL_REPORT
+ }
+
+ /**
+ * The Enum SyncActions.
+ */
+ public enum SyncActions {
+ SYNCHRONIZE, REPAIR_INDEX, INDEX_INTEGRITY_VALIDATION_COMPLETE, PRE_SYNC_COMPLETE,
+ SYNC_COMPLETE, SYNC_ABORTED, SYNC_FAILURE, POST_SYNC_COMPLETE, PURGE_COMPLETE, REPORT_COMPLETE
+ }
+
+ private Collection<IndexSynchronizer> registeredSynchronizers;
+ private Collection<IndexValidator> registeredIndexValidators;
+ private Collection<IndexCleaner> registeredIndexCleaners;
+ private InternalState currentInternalState;
+ private ExecutorService syncControllerExecutor;
+ private ExecutorService statReporterExecutor;
+
+ private long delayInMs;
+ private long syncFrequencyInMs;
+ private Date syncStartTime;
+
+ private Date lastExecutionDate;
+ private AtomicInteger runCount;
+ private Semaphore performingActionGate;
+ private Calendar creationTime;
+
+ private String syncStartTimeWithTimeZone;
+ private String controllerName;
+
+ protected SyncControllerConfig syncControllerConfig;
+
+
+
+
+ /**
+ * Instantiates a new sync controller.
+ *
+ * @param name the name
+ * @throws Exception the exception
+ */
+ public SyncControllerImpl(SyncControllerConfig syncControllerConfig) throws Exception {
+ this(syncControllerConfig,null);
+ }
+
+ public SyncControllerImpl(SyncControllerConfig syncControllerConfig, String targetEntityType)
+ throws Exception {
+
+ this.syncControllerConfig = syncControllerConfig;
+
+ this.delayInMs = 0L;
+ this.syncFrequencyInMs = 86400000L;
+ this.syncStartTime = null;
+ this.lastExecutionDate = null;
+ this.runCount = new AtomicInteger(0);
+ this.performingActionGate = new Semaphore(1);
+ registeredSynchronizers = new LinkedHashSet<IndexSynchronizer>();
+ registeredIndexValidators = new LinkedHashSet<IndexValidator>();
+ registeredIndexCleaners = new LinkedHashSet<IndexCleaner>();
+
+ String controllerName = syncControllerConfig.getControllerName();
+
+ if (targetEntityType != null) {
+ controllerName += " (" + targetEntityType + ")";
+ }
+
+ this.controllerName = controllerName;
+
+ this.syncControllerExecutor = NodeUtils.createNamedExecutor("SyncController-" + controllerName,
+ syncControllerConfig.getNumSyncControllerWorkers(), LOG);
+ this.statReporterExecutor =
+ NodeUtils.createNamedExecutor("StatReporter-" + controllerName, 1, LOG);
+
+ this.currentInternalState = InternalState.IDLE;
+
+ this.creationTime =
+ Calendar.getInstance(TimeZone.getTimeZone(syncControllerConfig.getTimeZoneOfSyncStartTimeStamp()));
+
+ }
+
+
+
+
+
+
+ /**
+ * Change internal state.
+ *
+ * @param newState the new state
+ * @param causedByAction the caused by action
+ */
+ private void changeInternalState(InternalState newState, SyncActions causedByAction) {
+ LOG.info(AaiUiMsgs.SYNC_INTERNAL_STATE_CHANGED, controllerName,
+ currentInternalState.toString(), newState.toString(), causedByAction.toString());
+
+ this.currentInternalState = newState;
+
+ performStateAction();
+ }
+
+
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.SyncController2#getDelayInMs()
+ */
+ @Override
+ public long getDelayInMs() {
+ return delayInMs;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.SyncController2#setDelayInMs(long)
+ */
+ @Override
+ public void setDelayInMs(long delayInMs) {
+ this.delayInMs = delayInMs;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.SyncController2#getSyncFrequencyInMs()
+ */
+ @Override
+ public long getSyncFrequencyInMs() {
+ return syncFrequencyInMs;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.SyncController2#setSyncFrequencyInMs(long)
+ */
+ @Override
+ public void setSyncFrequencyInMs(long syncFrequencyInMs) {
+ this.syncFrequencyInMs = syncFrequencyInMs;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.SyncController2#getSyncStartTime()
+ */
+ @Override
+ public Date getSyncStartTime() {
+ return syncStartTime;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.SyncController2#setSyncStartTime(java.util.Date)
+ */
+ @Override
+ public void setSyncStartTime(Date syncStartTime) {
+ this.syncStartTime = syncStartTime;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.SyncController2#getLastExecutionDate()
+ */
+ @Override
+ public Date getLastExecutionDate() {
+ return lastExecutionDate;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.SyncController2#setLastExecutionDate(java.util.Date)
+ */
+ @Override
+ public void setLastExecutionDate(Date lastExecutionDate) {
+ this.lastExecutionDate = lastExecutionDate;
+ }
+
+ @Override
+ public String getControllerName() {
+ return controllerName;
+ }
+
+
+
+
+ @Override
+ public OperationState performAction(SyncActions requestedAction) {
+
+ if (currentInternalState == InternalState.IDLE) {
+
+ try {
+
+ /*
+ * non-blocking semaphore acquire used to guarantee only 1 execution of the synchronization
+ * at a time.
+ */
+
+ switch (requestedAction) {
+ case SYNCHRONIZE:
+
+ if (performingActionGate.tryAcquire()) {
+ try {
+
+ long opStartTime = System.currentTimeMillis();
+
+ LOG.info(AaiUiMsgs.INFO_GENERIC,
+ getControllerName() + " started synchronization at "
+ + SynchronizerConstants.SIMPLE_DATE_FORMAT.format(opStartTime).replaceAll(
+ SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD));
+
+ runCount.incrementAndGet();
+
+ changeInternalState(InternalState.TEST_INDEX_INTEGRITY, requestedAction);
+
+ long opEndTime = System.currentTimeMillis();
+
+ long opTime = (opEndTime - opStartTime);
+
+ String durationMessage =
+ String.format(getControllerName() + " synchronization took '%d' ms.", opTime);
+
+ LOG.info(AaiUiMsgs.SYNC_DURATION, durationMessage);
+
+ if (syncControllerConfig.isPeriodicSyncEnabled()) {
+
+ LOG.info(AaiUiMsgs.INFO_GENERIC,
+ getControllerName() + " next sync to begin at " + getNextSyncTime());
+
+ TimeZone tz = TimeZone.getTimeZone(syncControllerConfig.getTimeZoneOfSyncStartTimeStamp());
+
+ if (opTime > this.getSyncFrequencyInMs()) {
+
+ String durationWasLongerMessage = String.format(
+ getControllerName() + " synchronization took '%d' ms which is larger than"
+ + " synchronization interval of '%d' ms.",
+ opTime, this.getSyncFrequencyInMs());
+
+ LOG.info(AaiUiMsgs.SYNC_DURATION, durationWasLongerMessage);
+ }
+ }
+
+ } catch (Exception syncException) {
+ String message = "An error occurred while performing action = " + requestedAction
+ + ". Error = " + syncException.getMessage();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ } finally {
+ performingActionGate.release();
+ }
+ } else {
+ return OperationState.IGNORED_SYNC_NOT_IDLE;
+ }
+
+ break;
+
+ default:
+ break;
+ }
+
+ return OperationState.OK;
+
+ } catch (Exception exc) {
+ String message = "An error occurred while performing action = " + requestedAction
+ + ". Error = " + exc.getMessage();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ return OperationState.ERROR;
+ } finally {
+
+ }
+ } else {
+ LOG.error(AaiUiMsgs.SYNC_NOT_VALID_STATE_DURING_REQUEST, currentInternalState.toString());
+ return OperationState.IGNORED_SYNC_NOT_IDLE;
+ }
+ }
+
+ /**
+ * Perform state action.
+ */
+ private void performStateAction() {
+
+ try {
+ switch (currentInternalState) {
+
+ case TEST_INDEX_INTEGRITY:
+ performIndexIntegrityValidation();
+ break;
+
+ case PRE_SYNC:
+ performPreSyncCleanupCollection();
+ break;
+
+ case SYNC_OPERATION:
+ performSynchronization();
+ break;
+
+ case POST_SYNC:
+ performIndexSyncPostCollection();
+ changeInternalState(InternalState.SELECTIVE_DELETE, SyncActions.POST_SYNC_COMPLETE);
+ break;
+
+ case SELECTIVE_DELETE:
+ performIndexCleanup();
+ changeInternalState(InternalState.GENERATE_FINAL_REPORT, SyncActions.PURGE_COMPLETE);
+ break;
+
+ case GENERATE_FINAL_REPORT:
+
+ dumpStatReport(true);
+ clearCaches();
+ changeInternalState(InternalState.IDLE, SyncActions.REPORT_COMPLETE);
+ break;
+
+ case ABORTING_SYNC:
+ performSyncAbort();
+ break;
+
+ default:
+ break;
+ }
+ } catch (Exception exc) {
+ /*
+ * Perhaps we should abort the sync on an exception
+ */
+ String message = "Caught an error which performing action. Error = " + exc.getMessage();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ }
+ }
+
+ @Override
+ public void registerEntitySynchronizer(IndexSynchronizer entitySynchronizer) {
+
+ String indexName = entitySynchronizer.getIndexName();
+
+ if (indexName != null) {
+ registeredSynchronizers.add(entitySynchronizer);
+ } else {
+ String message = "Failed to register entity synchronizer because index name is null";
+ LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message);
+ }
+
+ }
+
+ @Override
+ public void registerIndexValidator(IndexValidator indexValidator) {
+
+ String indexName = indexValidator.getIndexName();
+
+ if (indexName != null) {
+ registeredIndexValidators.add(indexValidator);
+ } else {
+ String message = "Failed to register index validator because index name is null";
+ LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message);
+ }
+
+ }
+
+ @Override
+ public void registerIndexCleaner(IndexCleaner indexCleaner) {
+
+ String indexName = indexCleaner.getIndexName();
+
+ if (indexName != null) {
+ registeredIndexCleaners.add(indexCleaner);
+ } else {
+ String message = "Failed to register index cleaner because index name is null";
+ LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message);
+ }
+ }
+
+ /*
+ * State machine should drive our flow dosync just dispatches an action and the state machine
+ * determines what is in play and what is next
+ */
+
+ /**
+ * Dump stat report.
+ *
+ * @param showFinalReport the show final report
+ */
+ private void dumpStatReport(boolean showFinalReport) {
+
+ for (IndexSynchronizer synchronizer : registeredSynchronizers) {
+
+ String statReport = synchronizer.getStatReport(showFinalReport);
+
+ if (statReport != null) {
+ LOG.info(AaiUiMsgs.INFO_GENERIC, statReport);
+ }
+ }
+ }
+
+ /**
+ * Clear caches.
+ */
+ private void clearCaches() {
+
+ /*
+ * Any entity caches that were built as part of the sync operation should be cleared to save
+ * memory. The original intent of the caching was to provide a short-lived cache to satisfy
+ * entity requests from multiple synchronizers yet minimizing interactions with the AAI.
+ */
+
+ for (IndexSynchronizer synchronizer : registeredSynchronizers) {
+ synchronizer.clearCache();
+ }
+ }
+
+ /**
+ * Perform pre sync cleanup collection.
+ */
+ private void performPreSyncCleanupCollection() {
+
+ /*
+ * ask the index cleaners to collect the their pre-sync object id collections
+ */
+
+ for (IndexCleaner cleaner : registeredIndexCleaners) {
+ cleaner.populatePreOperationCollection();
+ }
+
+ changeInternalState(InternalState.SYNC_OPERATION, SyncActions.PRE_SYNC_COMPLETE);
+
+ }
+
+ /**
+ * Perform index sync post collection.
+ */
+ private void performIndexSyncPostCollection() {
+
+ /*
+ * ask the entity purgers to collect the their pre-sync object id collections
+ */
+
+ for (IndexCleaner cleaner : registeredIndexCleaners) {
+ cleaner.populatePostOperationCollection();
+ }
+
+ }
+
+ /**
+ * Perform index cleanup.
+ */
+ private void performIndexCleanup() {
+
+ /*
+ * ask the entity purgers to collect the their pre-sync object id collections
+ */
+
+ for (IndexCleaner cleaner : registeredIndexCleaners) {
+ cleaner.performCleanup();
+ }
+
+ }
+
+ /**
+ * Perform sync abort.
+ */
+ private void performSyncAbort() {
+ changeInternalState(InternalState.IDLE, SyncActions.SYNC_ABORTED);
+ }
+
+ /**
+ * Perform index integrity validation.
+ */
+ private void performIndexIntegrityValidation() {
+
+ /*
+ * loop through registered index validators and test and fix, if needed
+ */
+
+ for (IndexValidator validator : registeredIndexValidators) {
+ try {
+ if (!validator.exists()) {
+ validator.createOrRepair();
+ }
+ } catch (Exception exc) {
+ String message = "Index validator caused an error = " + exc.getMessage();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ }
+ }
+
+ changeInternalState(InternalState.PRE_SYNC, SyncActions.INDEX_INTEGRITY_VALIDATION_COMPLETE);
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.SyncControllerInterface#shutdown()
+ */
+ @Override
+ public void shutdown() {
+
+ this.syncControllerExecutor.shutdown();
+ for (IndexSynchronizer synchronizer : registeredSynchronizers) {
+
+ try {
+ synchronizer.shutdown();
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "Synchronizer shutdown caused an error = " + exc.getMessage());
+ }
+
+ }
+ this.statReporterExecutor.shutdown();
+ }
+
+ /*
+ * Need some kind of task running that responds to a transient boolean to kill it or we just stop
+ * the executor that it is in?
+ */
+
+
+
+ /**
+ * Perform synchronization.
+ */
+ private void performSynchronization() {
+
+ /*
+ * Get all the synchronizers running in parallel
+ */
+
+ for (IndexSynchronizer synchronizer : registeredSynchronizers) {
+ supplyAsync(new Supplier<Void>() {
+
+ @Override
+ public Void get() {
+
+ synchronizer.doSync();
+ return null;
+ }
+
+ }, this.syncControllerExecutor).whenComplete((result, error) -> {
+
+ /*
+ * We don't bother checking the result, because it will always be null as the doSync() is
+ * non-blocking.
+ */
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "doSync operation failed with an error = " + error.getMessage());
+ }
+ });
+ }
+
+ boolean allDone = false;
+ long nextReportTimeStampInMs = System.currentTimeMillis() + 30000L;
+ boolean dumpPeriodicStatReport = false;
+
+ while (!allDone) {
+ int totalFinished = 0;
+
+ for (IndexSynchronizer synchronizer : registeredSynchronizers) {
+ if (dumpPeriodicStatReport) {
+ if (synchronizer.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) {
+ String statReport = synchronizer.getStatReport(false);
+
+ if (statReport != null) {
+ LOG.info(AaiUiMsgs.INFO_GENERIC, statReport);
+ }
+ }
+ }
+
+ if (synchronizer.getState() == SynchronizerState.IDLE
+ || synchronizer.getState() == SynchronizerState.ABORTED) {
+ totalFinished++;
+ }
+ }
+
+ if ( System.currentTimeMillis() > nextReportTimeStampInMs) {
+ dumpPeriodicStatReport = true;
+ nextReportTimeStampInMs = System.currentTimeMillis() + 30000L;
+ } else {
+ dumpPeriodicStatReport = false;
+ }
+
+ allDone = (totalFinished == registeredSynchronizers.size());
+
+ try {
+ Thread.sleep(250);
+ } catch (InterruptedException exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "An error occurred while waiting for sync to complete. Error = " + exc.getMessage());
+ }
+
+ }
+
+ changeInternalState(InternalState.POST_SYNC, SyncActions.SYNC_COMPLETE);
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.SyncControllerInterface#getState()
+ */
+ @Override
+ public SynchronizerState getState() {
+
+ switch (currentInternalState) {
+
+ case IDLE: {
+ return SynchronizerState.IDLE;
+ }
+
+ default: {
+ return SynchronizerState.PERFORMING_SYNCHRONIZATION;
+
+ }
+ }
+
+ }
+
+ @Override
+ public Calendar getCreationTime() {
+ return creationTime;
+ }
+
+ @Override
+ public String getNextSyncTime() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public boolean isPeriodicSyncEnabled() {
+ return syncControllerConfig.isPeriodicSyncEnabled();
+ }
+
+ @Override
+ public boolean isRunOnceSyncEnabled() {
+ return syncControllerConfig.isRunOnceSyncEnabled();
+ }
+
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java
new file mode 100644
index 0000000..f20eceb
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java
@@ -0,0 +1,29 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+public interface SyncControllerRegistrar {
+ public void registerController();
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java
new file mode 100644
index 0000000..d4013d3
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java
@@ -0,0 +1,50 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class SyncControllerRegistry {
+
+ private List<SyncController> controllers;
+
+ public SyncControllerRegistry() {
+ controllers = new ArrayList<SyncController>();
+ }
+
+ public void registerSyncController(SyncController controller) {
+ controllers.add(controller);
+ }
+
+ public List<SyncController> getControllers() {
+ return controllers;
+ }
+
+ public void setControllers(List<SyncController> controllers) {
+ this.controllers = controllers;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java
new file mode 100644
index 0000000..07653b7
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java
@@ -0,0 +1,222 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import java.lang.Thread.UncaughtExceptionHandler;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+import org.springframework.context.ApplicationListener;
+import org.springframework.context.event.ApplicationContextEvent;
+
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+public class SyncControllerService implements ApplicationListener<ApplicationContextEvent> {
+
+ private SyncControllerRegistry syncControllerRegistry;
+ private ExecutorService runonceSyncExecutor;
+ private ScheduledExecutorService periodicSyncExecutor;
+ private boolean syncStarted;
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(SyncControllerService.class);
+
+ private class SyncControllerTask implements Runnable {
+
+ private SyncController controller;
+
+ public SyncControllerTask(SyncController controller) {
+ this.controller = controller;
+ }
+
+ @Override
+ public void run() {
+
+ try {
+
+ if (controller.getState() == SynchronizerState.IDLE) {
+
+ /*
+ * This is a blocking-call, but would be nicer if it was async internally within the
+ * controller but at the moment, that's not the way it works.
+ */
+
+ if (controller.performAction(SyncActions.SYNCHRONIZE) != OperationState.OK) {
+
+ LOG.info(AaiUiMsgs.INFO_GENERIC,
+ controller.getControllerName() + " is not idle, sync attempt has been skipped.");
+ }
+ } else {
+
+ LOG.info(AaiUiMsgs.INFO_GENERIC,
+ controller.getControllerName() + " is not idle, sync attempt has been skipped.");
+ }
+
+ } catch (Exception exception) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "Error while attempting synchronization. Error = " + exception.getMessage());
+ }
+
+ }
+
+ }
+
+ public SyncControllerService(SyncControllerRegistry syncControllerRegistry, int numRunOnceWorkers,
+ int numPeriodicWorkers) {
+ this.syncControllerRegistry = syncControllerRegistry;
+ this.syncStarted = false;
+
+ UncaughtExceptionHandler uncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() {
+
+ @Override
+ public void uncaughtException(Thread thread, Throwable exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, thread.getName() + ": " + exc);
+ }
+ };
+
+ runonceSyncExecutor = Executors.newFixedThreadPool(numRunOnceWorkers,
+ new ThreadFactoryBuilder().setNameFormat("RunonceSyncWorker-%d")
+ .setUncaughtExceptionHandler(uncaughtExceptionHandler).build());
+
+
+ periodicSyncExecutor = Executors.newScheduledThreadPool(numPeriodicWorkers,
+ new ThreadFactoryBuilder().setNameFormat("PeriodicSyncWorker-%d")
+ .setUncaughtExceptionHandler(uncaughtExceptionHandler).build());
+
+ }
+
+ public SyncControllerRegistry getSyncControllerRegistry() {
+ return syncControllerRegistry;
+ }
+
+ public void startSync() {
+
+ long syncInitialDelayInMs = 0;
+
+ for (SyncController controller : syncControllerRegistry.getControllers()) {
+
+ syncInitialDelayInMs = controller.getDelayInMs();
+
+ if (!controller.isPeriodicSyncEnabled()) {
+
+ if (controller.isRunOnceSyncEnabled()) {
+ LOG.info(AaiUiMsgs.INFO_GENERIC, controller.getControllerName() + " is enabled.");
+ runonceSyncExecutor.submit(new SyncControllerTask(controller));
+ } else {
+ LOG.info(AaiUiMsgs.INFO_GENERIC, controller.getControllerName() + " is disabled.");
+ }
+
+ } else {
+
+ /**
+ * Do both. We'll take one instance of the SyncController and wrap the object instance into
+ * two SyncControllerTasks. The responsibility for preventing a conflicting sync should live
+ * in the SyncController instance. If a sync is underway when the periodic sync kicks in,
+ * then it will be ignored by the SyncController which is already underway.
+ *
+ * The SyncController instance itself would then also be stateful such that it would know
+ * the last time it ran, and the next time it is supposed to run, the number times a sync
+ * has executed, etc.
+ */
+
+ if (controller.isRunOnceSyncEnabled()) {
+ LOG.info(AaiUiMsgs.INFO_GENERIC,
+ controller.getControllerName() + " run-once sync is enabled.");
+ runonceSyncExecutor.submit(new SyncControllerTask(controller));
+ } else {
+ LOG.info(AaiUiMsgs.INFO_GENERIC,
+ controller.getControllerName() + " run-once sync is disabled.");
+ }
+
+ /*
+ * The controller knows it's configuredfrequency and we can just ask it to tell us what the
+ * delay and frequency needs to be, rather than trying to calculate the configured frequency
+ * per controller which "could" be different for each controller.
+ */
+
+ if (controller.isPeriodicSyncEnabled()) {
+
+ LOG.info(AaiUiMsgs.INFO_GENERIC,
+ controller.getControllerName() + " periodic sync is enabled and scheduled to start @ "
+ + controller.getNextSyncTime());
+
+ periodicSyncExecutor.scheduleAtFixedRate(new SyncControllerTask(controller),
+ controller.getDelayInMs(), controller.getSyncFrequencyInMs(), TimeUnit.MILLISECONDS);
+
+ } else {
+
+ LOG.info(AaiUiMsgs.INFO_GENERIC,
+ controller.getControllerName() + " periodic sync is disabled.");
+
+ }
+
+ }
+
+ }
+
+ }
+
+ public void shutdown() {
+
+ if (runonceSyncExecutor != null) {
+ runonceSyncExecutor.shutdown();
+ }
+
+ if (periodicSyncExecutor != null) {
+ periodicSyncExecutor.shutdown();
+ }
+
+ if (syncControllerRegistry != null) {
+ for (SyncController controller : syncControllerRegistry.getControllers()) {
+ controller.shutdown();
+ }
+ }
+
+ }
+
+ @Override
+ public synchronized void onApplicationEvent(ApplicationContextEvent arg0) {
+
+ /*
+ * Start sync service processing when spring-context-initialization has finished
+ */
+
+ if (!syncStarted) {
+ syncStarted = true;
+ startSync();
+ }
+
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java
new file mode 100644
index 0000000..9201f07
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java
@@ -0,0 +1,65 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+/**
+ * The Class SynchronizerConstants.
+ */
+public final class SynchronizerConstants {
+ // Error values for invalid user input
+ public static final int DEFAULT_CONFIG_ERROR_INT_VALUE = Integer.MAX_VALUE;
+ public static final Date DEFAULT_CONFIG_ERROR_DATE_VALUE = new Date(Long.MAX_VALUE);
+ public static final SimpleDateFormat SIMPLE_DATE_FORMAT =
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
+
+ public static final String DEPTH_MODIFIER = "?depth=0";
+ public static final String DEPTH_ALL_MODIFIER = "?depth=all";
+ public static final String DEPTH_AND_NODES_ONLY_MODIFIER = "?depth=0&nodes-only";
+ public static final String NODES_ONLY_MODIFIER = "?nodes-only";
+
+ // constants for scheduling synchronizer
+ public static final int COMPONENTS_IN_TIMESTAMP = 2;
+ public static final String DEFAULT_INITIAL_DELAY_IN_MS = "0";
+ public static final String DEFAULT_TASK_FREQUENCY_IN_DAY = "0";
+ public static final String DEFAULT_START_TIMESTAMP = "05:00:00 UTC";
+ public static final long DELAY_NO_STARTUP_SYNC_IN_MS = 0;
+ public static final long DELAY_NO_PERIODIC_SYNC_IN_MS = 0;
+ public static final int IDX_TIME_IN_TIMESTAMP = 0;
+ public static final int IDX_TIMEZONE_IN_TIMESTAMP = 1;
+ public static final long MILLISEC_IN_A_MIN = 60000;
+ public static final long MILLISEC_IN_A_DAY = 24 * 60 * 60 * 1000;
+ public static final String TIME_STD = "GMT";
+ public static final String TIME_CONFIG_STD = "UTC";
+ public static final String TIMESTAMP24HOURS_PATTERN =
+ "([01]?[0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9] UTC[+|-][0-5][0-9]:[0-5][0-9]";
+
+ /**
+ * Instantiates a new synchronizer constants.
+ */
+ private SynchronizerConstants() {}
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java
new file mode 100644
index 0000000..48625c0
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java
@@ -0,0 +1,135 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import org.onap.aai.sparky.analytics.AbstractStatistics;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+
+/**
+ * The Class TaskProcessingStats.
+ */
+public class TaskProcessingStats extends AbstractStatistics {
+
+ private static String TASK_AGE_STATS = "taskAgeStats";
+ private static String TASK_RESPONSE_STATS = "taskResponseStats";
+ private static String RESPONSE_SIZE_IN_BYTES = "taskResponseSizeInBytes";
+ // private static String QUEUE_ITEM_LENGTH = "queueItemLength";
+ private static String TPS = "transactionsPerSecond";
+
+ /**
+ * Instantiates a new task processing stats.
+ *
+ * @param config the config
+ */
+ public TaskProcessingStats(NetworkStatisticsConfig config) {
+
+ addHistogram(TASK_AGE_STATS, config.getTaskAgeHistogramLabel(),
+ config.getTaskAgeHistogramMaxYAxis(), config.getTaskAgeHistogramNumBins(),
+ config.getTaskAgeHistogramNumDecimalPoints());
+
+ addHistogram(TASK_RESPONSE_STATS, config.getResponseTimeHistogramLabel(),
+ config.getResponseTimeHistogramMaxYAxis(), config.getResponseTimeHistogramNumBins(),
+ config.getResponseTimeHistogramNumDecimalPoints());
+
+ addHistogram(RESPONSE_SIZE_IN_BYTES, config.getBytesHistogramLabel(),
+ config.getBytesHistogramMaxYAxis(), config.getBytesHistogramNumBins(),
+ config.getBytesHistogramNumDecimalPoints());
+
+ /*
+ * addHistogram(QUEUE_ITEM_LENGTH, config.getQueueLengthHistogramLabel(),
+ * config.getQueueLengthHistogramMaxYAxis(), config.getQueueLengthHistogramNumBins(),
+ * config.getQueueLengthHistogramNumDecimalPoints());
+ */
+
+ addHistogram(TPS, config.getTpsHistogramLabel(), config.getTpsHistogramMaxYAxis(),
+ config.getTpsHistogramNumBins(), config.getTpsHistogramNumDecimalPoints());
+
+ }
+
+ /*
+ * public void updateQueueItemLengthHistogram(long value) { updateHistogram(QUEUE_ITEM_LENGTH,
+ * value); }
+ */
+
+ /**
+ * Update task age stats histogram.
+ *
+ * @param value the value
+ */
+ public void updateTaskAgeStatsHistogram(long value) {
+ updateHistogram(TASK_AGE_STATS, value);
+ }
+
+ /**
+ * Update task response stats histogram.
+ *
+ * @param value the value
+ */
+ public void updateTaskResponseStatsHistogram(long value) {
+ updateHistogram(TASK_RESPONSE_STATS, value);
+ }
+
+ /**
+ * Update response size in bytes histogram.
+ *
+ * @param value the value
+ */
+ public void updateResponseSizeInBytesHistogram(long value) {
+ updateHistogram(RESPONSE_SIZE_IN_BYTES, value);
+ }
+
+ /**
+ * Update transactions per second histogram.
+ *
+ * @param value the value
+ */
+ public void updateTransactionsPerSecondHistogram(long value) {
+ updateHistogram(TPS, value);
+ }
+
+ /**
+ * Gets the statistics report.
+ *
+ * @param verboseEnabled the verbose enabled
+ * @param indentPadding the indent padding
+ * @return the statistics report
+ */
+ public String getStatisticsReport(boolean verboseEnabled, String indentPadding) {
+
+ StringBuilder sb = new StringBuilder();
+
+ sb.append("\n").append(getHistogramStats(TASK_AGE_STATS, verboseEnabled, indentPadding));
+ // sb.append("\n").append(getHistogramStats(QUEUE_ITEM_LENGTH, verboseEnabled, indentPadding));
+ sb.append("\n").append(getHistogramStats(TASK_RESPONSE_STATS, verboseEnabled, indentPadding));
+ sb.append("\n")
+ .append(getHistogramStats(RESPONSE_SIZE_IN_BYTES, verboseEnabled, indentPadding));
+ sb.append("\n").append(getHistogramStats(TPS, verboseEnabled, indentPadding));
+
+ return sb.toString();
+
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java
new file mode 100644
index 0000000..e639eff
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java
@@ -0,0 +1,75 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.onap.aai.sparky.analytics.AveragingRingBuffer;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+
+public class TransactionRateMonitor {
+
+ private AtomicInteger numTransactions;
+ private AveragingRingBuffer responseTimeTracker;
+ private long startTimeInMs;
+
+ /**
+ * Instantiates a new transaction rate controller.
+ *
+ * @param config the config
+ */
+ public TransactionRateMonitor(int numWorkerThreads, NetworkStatisticsConfig config) {
+
+ this.responseTimeTracker = new AveragingRingBuffer(
+ config.getNumSamplesPerThreadForRunningAverage() * numWorkerThreads);
+ this.startTimeInMs = System.currentTimeMillis();
+ this.numTransactions = new AtomicInteger(0);
+ }
+
+ /**
+ * Track response time.
+ *
+ * @param responseTimeInMs the response time in ms
+ */
+ public void trackResponseTime(long responseTimeInMs) {
+ this.numTransactions.incrementAndGet();
+ responseTimeTracker.addSample(responseTimeInMs);
+ }
+
+ public long getAvg() {
+ return responseTimeTracker.getAvg();
+ }
+
+ public double getCurrentTps() {
+ if (numTransactions.get() > 0) {
+ double timeDelta = System.currentTimeMillis() - startTimeInMs;
+ double numTxns = numTransactions.get();
+ return (numTxns / timeDelta) * 1000.0;
+ }
+
+ return 0.0;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java
new file mode 100644
index 0000000..0197b9b
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java
@@ -0,0 +1,72 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.config;
+
+public class ElasticSearchEndpointConfig {
+
+ private String esIpAddress;
+ private String esServerPort;
+ private int scrollContextTimeToLiveInMinutes;
+ private int scrollContextBatchRequestSize;
+
+ public ElasticSearchEndpointConfig() {
+
+ }
+
+ public String getEsIpAddress() {
+ return esIpAddress;
+ }
+
+ public void setEsIpAddress(String esIpAddress) {
+ this.esIpAddress = esIpAddress;
+ }
+
+ public String getEsServerPort() {
+ return esServerPort;
+ }
+
+ public void setEsServerPort(String esServerPort) {
+ this.esServerPort = esServerPort;
+ }
+
+ public int getScrollContextTimeToLiveInMinutes() {
+ return scrollContextTimeToLiveInMinutes;
+ }
+
+ public void setScrollContextTimeToLiveInMinutes(int scrollContextTimeToLiveInMinutes) {
+ this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes;
+ }
+
+ public int getScrollContextBatchRequestSize() {
+ return scrollContextBatchRequestSize;
+ }
+
+ public void setScrollContextBatchRequestSize(int scrollContextBatchRequestSize) {
+ this.scrollContextBatchRequestSize = scrollContextBatchRequestSize;
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java
new file mode 100644
index 0000000..4f134c3
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java
@@ -0,0 +1,77 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.config;
+
+public class ElasticSearchSchemaConfig {
+
+ private String indexName;
+ private String indexDocType;
+ private String indexSettingsFileName;
+ private String indexMappingsFileName;
+
+ public String getIndexName() {
+ return indexName;
+ }
+
+ public void setIndexName(String indexName) {
+ this.indexName = indexName;
+ }
+
+ public String getIndexDocType() {
+ return indexDocType;
+ }
+
+ public void setIndexDocType(String indexDocType) {
+ this.indexDocType = indexDocType;
+ }
+
+ public String getIndexSettingsFileName() {
+ return indexSettingsFileName;
+ }
+
+ public void setIndexSettingsFileName(String indexSettingsFileName) {
+ this.indexSettingsFileName = indexSettingsFileName;
+ }
+
+ public String getIndexMappingsFileName() {
+ return indexMappingsFileName;
+ }
+
+ public void setIndexMappingsFileName(String indexMappingsFileName) {
+ this.indexMappingsFileName = indexMappingsFileName;
+ }
+
+ @Override
+ public String toString() {
+ return "ElasticSearchSchemaConfig ["
+ + (indexName != null ? "indexName=" + indexName + ", " : "")
+ + (indexDocType != null ? "indexDocType=" + indexDocType + ", " : "")
+ + (indexSettingsFileName != null ? "indexSettingsFileName=" + indexSettingsFileName + ", "
+ : "")
+ + (indexMappingsFileName != null ? "indexMappingsFileName=" + indexMappingsFileName : "")
+ + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java
new file mode 100644
index 0000000..0d089a4
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java
@@ -0,0 +1,239 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.config;
+
+public class NetworkStatisticsConfig {
+
+ private int numSamplesPerThreadForRunningAverage;
+
+ private String bytesHistogramLabel;
+
+ private long bytesHistogramMaxYAxis;
+
+ private int bytesHistogramNumBins;
+
+ private int bytesHistogramNumDecimalPoints;
+
+ private String queueLengthHistogramLabel;
+
+ private long queueLengthHistogramMaxYAxis;
+
+ private int queueLengthHistogramNumBins;
+
+ private int queueLengthHistogramNumDecimalPoints;
+
+ private String taskAgeHistogramLabel;
+
+ private long taskAgeHistogramMaxYAxis;
+
+ private int taskAgeHistogramNumBins;
+
+ private int taskAgeHistogramNumDecimalPoints;
+
+ private String responseTimeHistogramLabel;
+
+ private long responseTimeHistogramMaxYAxis;
+
+ private int responseTimeHistogramNumBins;
+
+ private int responseTimeHistogramNumDecimalPoints;
+
+ private String tpsHistogramLabel;
+
+ private long tpsHistogramMaxYAxis;
+
+ private int tpsHistogramNumBins;
+
+ private int tpsHistogramNumDecimalPoints;
+
+ public int getNumSamplesPerThreadForRunningAverage() {
+ return numSamplesPerThreadForRunningAverage;
+ }
+
+ public void setNumSamplesPerThreadForRunningAverage(int numSamplesPerThreadForRunningAverage) {
+ this.numSamplesPerThreadForRunningAverage = numSamplesPerThreadForRunningAverage;
+ }
+
+ public String getBytesHistogramLabel() {
+ return bytesHistogramLabel;
+ }
+
+ public void setBytesHistogramLabel(String bytesHistogramLabel) {
+ this.bytesHistogramLabel = bytesHistogramLabel;
+ }
+
+ public long getBytesHistogramMaxYAxis() {
+ return bytesHistogramMaxYAxis;
+ }
+
+ public void setBytesHistogramMaxYAxis(long bytesHistogramMaxYAxis) {
+ this.bytesHistogramMaxYAxis = bytesHistogramMaxYAxis;
+ }
+
+ public int getBytesHistogramNumBins() {
+ return bytesHistogramNumBins;
+ }
+
+ public void setBytesHistogramNumBins(int bytesHistogramNumBins) {
+ this.bytesHistogramNumBins = bytesHistogramNumBins;
+ }
+
+ public int getBytesHistogramNumDecimalPoints() {
+ return bytesHistogramNumDecimalPoints;
+ }
+
+ public void setBytesHistogramNumDecimalPoints(int bytesHistogramNumDecimalPoints) {
+ this.bytesHistogramNumDecimalPoints = bytesHistogramNumDecimalPoints;
+ }
+
+ public String getQueueLengthHistogramLabel() {
+ return queueLengthHistogramLabel;
+ }
+
+ public void setQueueLengthHistogramLabel(String queueLengthHistogramLabel) {
+ this.queueLengthHistogramLabel = queueLengthHistogramLabel;
+ }
+
+ public long getQueueLengthHistogramMaxYAxis() {
+ return queueLengthHistogramMaxYAxis;
+ }
+
+ public void setQueueLengthHistogramMaxYAxis(long queueLengthHistogramMaxYAxis) {
+ this.queueLengthHistogramMaxYAxis = queueLengthHistogramMaxYAxis;
+ }
+
+ public int getQueueLengthHistogramNumBins() {
+ return queueLengthHistogramNumBins;
+ }
+
+ public void setQueueLengthHistogramNumBins(int queueLengthHistogramNumBins) {
+ this.queueLengthHistogramNumBins = queueLengthHistogramNumBins;
+ }
+
+ public int getQueueLengthHistogramNumDecimalPoints() {
+ return queueLengthHistogramNumDecimalPoints;
+ }
+
+ public void setQueueLengthHistogramNumDecimalPoints(int queueLengthHistogramNumDecimalPoints) {
+ this.queueLengthHistogramNumDecimalPoints = queueLengthHistogramNumDecimalPoints;
+ }
+
+ public String getTaskAgeHistogramLabel() {
+ return taskAgeHistogramLabel;
+ }
+
+ public void setTaskAgeHistogramLabel(String taskAgeHistogramLabel) {
+ this.taskAgeHistogramLabel = taskAgeHistogramLabel;
+ }
+
+ public long getTaskAgeHistogramMaxYAxis() {
+ return taskAgeHistogramMaxYAxis;
+ }
+
+ public void setTaskAgeHistogramMaxYAxis(long taskAgeHistogramMaxYAxis) {
+ this.taskAgeHistogramMaxYAxis = taskAgeHistogramMaxYAxis;
+ }
+
+ public int getTaskAgeHistogramNumBins() {
+ return taskAgeHistogramNumBins;
+ }
+
+ public void setTaskAgeHistogramNumBins(int taskAgeHistogramNumBins) {
+ this.taskAgeHistogramNumBins = taskAgeHistogramNumBins;
+ }
+
+ public int getTaskAgeHistogramNumDecimalPoints() {
+ return taskAgeHistogramNumDecimalPoints;
+ }
+
+ public void setTaskAgeHistogramNumDecimalPoints(int taskAgeHistogramNumDecimalPoints) {
+ this.taskAgeHistogramNumDecimalPoints = taskAgeHistogramNumDecimalPoints;
+ }
+
+ public String getResponseTimeHistogramLabel() {
+ return responseTimeHistogramLabel;
+ }
+
+ public void setResponseTimeHistogramLabel(String responseTimeHistogramLabel) {
+ this.responseTimeHistogramLabel = responseTimeHistogramLabel;
+ }
+
+ public long getResponseTimeHistogramMaxYAxis() {
+ return responseTimeHistogramMaxYAxis;
+ }
+
+ public void setResponseTimeHistogramMaxYAxis(long responseTimeHistogramMaxYAxis) {
+ this.responseTimeHistogramMaxYAxis = responseTimeHistogramMaxYAxis;
+ }
+
+ public int getResponseTimeHistogramNumBins() {
+ return responseTimeHistogramNumBins;
+ }
+
+ public void setResponseTimeHistogramNumBins(int responseTimeHistogramNumBins) {
+ this.responseTimeHistogramNumBins = responseTimeHistogramNumBins;
+ }
+
+ public int getResponseTimeHistogramNumDecimalPoints() {
+ return responseTimeHistogramNumDecimalPoints;
+ }
+
+ public void setResponseTimeHistogramNumDecimalPoints(int responseTimeHistogramNumDecimalPoints) {
+ this.responseTimeHistogramNumDecimalPoints = responseTimeHistogramNumDecimalPoints;
+ }
+
+ public String getTpsHistogramLabel() {
+ return tpsHistogramLabel;
+ }
+
+ public void setTpsHistogramLabel(String tpsHistogramLabel) {
+ this.tpsHistogramLabel = tpsHistogramLabel;
+ }
+
+ public long getTpsHistogramMaxYAxis() {
+ return tpsHistogramMaxYAxis;
+ }
+
+ public void setTpsHistogramMaxYAxis(long tpsHistogramMaxYAxis) {
+ this.tpsHistogramMaxYAxis = tpsHistogramMaxYAxis;
+ }
+
+ public int getTpsHistogramNumBins() {
+ return tpsHistogramNumBins;
+ }
+
+ public void setTpsHistogramNumBins(int tpsHistogramNumBins) {
+ this.tpsHistogramNumBins = tpsHistogramNumBins;
+ }
+
+ public int getTpsHistogramNumDecimalPoints() {
+ return tpsHistogramNumDecimalPoints;
+ }
+
+ public void setTpsHistogramNumDecimalPoints(int tpsHistogramNumDecimalPoints) {
+ this.tpsHistogramNumDecimalPoints = tpsHistogramNumDecimalPoints;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java
new file mode 100644
index 0000000..566c249
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java
@@ -0,0 +1,305 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.config;
+
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+import java.util.TimeZone;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.onap.aai.sparky.sync.SynchronizerConstants;
+
+public class SyncControllerConfig {
+
+ private static final String UNKNOWN_CONTROLLER_NAME = "UnknownControllerName";
+
+ private String controllerName;
+ private boolean enabled;
+ private int syncTaskDelayInMs;
+ private int syncTaskFrequencyInDays;
+
+ private int numSyncControllerWorkers;
+ private boolean runOnceSyncEnabled;
+ private boolean periodicSyncEnabled;
+
+ private String targetSyncStartTimeStamp;
+
+ private int numInternalSyncWorkers;
+ private int numSyncElasticWorkers;
+ private int numSyncActiveInventoryWorkers;
+
+ /*
+ * calculated variables based on incoming config
+ */
+ private String timeZoneOfSyncStartTimeStamp;
+ private int syncTaskStartTimeHr;
+ private int syncTaskStartTimeMin;
+ private int syncTaskStartTimeSec;
+
+
+
+ public SyncControllerConfig() {
+ controllerName = UNKNOWN_CONTROLLER_NAME;
+ enabled = false;
+ syncTaskDelayInMs = 0;
+ syncTaskFrequencyInDays = 365;
+ numSyncControllerWorkers = 1;
+ runOnceSyncEnabled = false;
+ periodicSyncEnabled = false;
+ targetSyncStartTimeStamp = SynchronizerConstants.DEFAULT_START_TIMESTAMP;
+ numInternalSyncWorkers = 2;
+ numSyncElasticWorkers = 5;
+ numSyncActiveInventoryWorkers = 5;
+ }
+
+ protected void initializeSyncTimeParameters() {
+
+ if (syncTaskDelayInMs < 0) {
+ throw new IllegalArgumentException("syncTaskDelayInMs must >= 0");
+ }
+
+ Pattern pattern = Pattern.compile(SynchronizerConstants.TIMESTAMP24HOURS_PATTERN);
+ Matcher matcher = pattern.matcher(targetSyncStartTimeStamp);
+ if (!matcher.matches()) {
+ throw new IllegalArgumentException("Invalid time format for targetSyncStartTimeStamp");
+ }
+
+ List<String> timestampVal = Arrays.asList(targetSyncStartTimeStamp.split(" "));
+
+ if (timestampVal.size() == SynchronizerConstants.COMPONENTS_IN_TIMESTAMP) {
+
+ // Need both time and timezone offset
+ timeZoneOfSyncStartTimeStamp = timestampVal
+ .get(SynchronizerConstants.IDX_TIMEZONE_IN_TIMESTAMP).replaceAll("UTC", "GMT");
+
+ String time = timestampVal.get(SynchronizerConstants.IDX_TIME_IN_TIMESTAMP);
+ DateFormat format = new SimpleDateFormat("HH:mm:ss");
+
+ Date date = null;
+
+ try {
+ date = format.parse(time);
+ } catch (ParseException parseException) {
+ throw new IllegalArgumentException(parseException);
+ }
+
+ Calendar calendar = Calendar.getInstance();
+ calendar.setTime(date);
+
+ syncTaskStartTimeHr = calendar.get(Calendar.HOUR_OF_DAY);
+ syncTaskStartTimeMin = calendar.get(Calendar.MINUTE);
+ syncTaskStartTimeSec = calendar.get(Calendar.SECOND);
+ } else {
+ throw new IllegalArgumentException("Invalid timestamp format from targetSyncStartTimeStamp");
+ }
+
+ }
+
+
+ public int getNumInternalSyncWorkers() {
+ return numInternalSyncWorkers;
+ }
+
+ public void setNumInternalSyncWorkers(int numInternalSyncWorkers) {
+ this.numInternalSyncWorkers = numInternalSyncWorkers;
+ }
+
+ public int getNumSyncElasticWorkers() {
+ return numSyncElasticWorkers;
+ }
+
+ public void setNumSyncElasticWorkers(int numSyncElasticWorkers) {
+ this.numSyncElasticWorkers = numSyncElasticWorkers;
+ }
+
+ public int getNumSyncActiveInventoryWorkers() {
+ return numSyncActiveInventoryWorkers;
+ }
+
+ public void setNumSyncActiveInventoryWorkers(int numSyncActiveInventoryWorkers) {
+ this.numSyncActiveInventoryWorkers = numSyncActiveInventoryWorkers;
+ }
+
+ public String getTargetSyncStartTimeStamp() {
+ return targetSyncStartTimeStamp;
+ }
+
+ public void setTargetSyncStartTimeStamp(String targetSyncStartTimeStamp) {
+ this.targetSyncStartTimeStamp = targetSyncStartTimeStamp;
+ initializeSyncTimeParameters();
+ }
+
+ public String getControllerName() {
+ return controllerName;
+ }
+
+ public void setControllerName(String controllerName) {
+ this.controllerName = controllerName;
+ }
+
+ public boolean isEnabled() {
+ return enabled;
+ }
+
+ public void setEnabled(boolean enabled) {
+ this.enabled = enabled;
+ }
+
+ public int getSyncTaskDelayInMs() {
+ return syncTaskDelayInMs;
+ }
+
+ public void setSyncTaskDelayInMs(int syncTaskDelayInMs) {
+ this.syncTaskDelayInMs = syncTaskDelayInMs;
+ }
+
+ public int getSyncTaskFrequencyInDays() {
+ return syncTaskFrequencyInDays;
+ }
+
+ public void setSyncTaskFrequencyInDays(int syncTaskFrequencyInDays) {
+ this.syncTaskFrequencyInDays = syncTaskFrequencyInDays;
+ }
+
+ public int getNumSyncControllerWorkers() {
+ return numSyncControllerWorkers;
+ }
+
+ public void setNumSyncControllerWorkers(int numSyncControllerWorkers) {
+ this.numSyncControllerWorkers = numSyncControllerWorkers;
+ }
+
+ public boolean isRunOnceSyncEnabled() {
+ return runOnceSyncEnabled;
+ }
+
+ public void setRunOnceSyncEnabled(boolean runOnceSyncEnabled) {
+ this.runOnceSyncEnabled = runOnceSyncEnabled;
+ }
+
+ public boolean isPeriodicSyncEnabled() {
+ return periodicSyncEnabled;
+ }
+
+ public void setPeriodicSyncEnabled(boolean periodicSyncEnabled) {
+ this.periodicSyncEnabled = periodicSyncEnabled;
+ }
+
+ public long getSyncFrequencyInMs() {
+
+ return (syncTaskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY);
+
+ }
+
+ public Calendar getTargetSyncTime() {
+
+ TimeZone tz = TimeZone.getTimeZone(timeZoneOfSyncStartTimeStamp);
+ Calendar targetSyncTime = Calendar.getInstance(tz);
+
+ targetSyncTime.set(Calendar.HOUR_OF_DAY, syncTaskStartTimeHr);
+ targetSyncTime.set(Calendar.MINUTE, syncTaskStartTimeMin);
+ targetSyncTime.set(Calendar.SECOND, syncTaskStartTimeSec);
+
+ return targetSyncTime;
+
+ }
+
+
+ public String getNextSyncTime() {
+
+ int taskFrequencyInSeconds = 0;
+ if (getSyncFrequencyInMs() > 0) {
+ taskFrequencyInSeconds = (int) (getSyncFrequencyInMs() / 1000);
+ }
+
+ if (taskFrequencyInSeconds < 86400) {
+
+ TimeZone tz = TimeZone.getTimeZone(timeZoneOfSyncStartTimeStamp);
+ Calendar targetSyncTime = Calendar.getInstance(tz);
+ targetSyncTime.add(Calendar.SECOND, taskFrequencyInSeconds);
+
+ return SynchronizerConstants.SIMPLE_DATE_FORMAT.format(targetSyncTime.getTimeInMillis())
+ .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD);
+
+ } else {
+
+ return SynchronizerConstants.SIMPLE_DATE_FORMAT
+ .format(getNextSyncTime(getTargetSyncTime(), taskFrequencyInSeconds))
+ .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD);
+
+ }
+
+ }
+
+ public long getNextSyncTime(Calendar syncTime, int taskFrequencyInSeconds) {
+
+ TimeZone tz = TimeZone.getTimeZone(timeZoneOfSyncStartTimeStamp);
+ Calendar timeNow = Calendar.getInstance(tz);
+
+ return getNextSyncTime(syncTime, timeNow.getTimeInMillis(), taskFrequencyInSeconds);
+ }
+
+ /**
+ * Gets the first sync time.
+ *
+ * @param calendar the calendar
+ * @param timeNow the time now in ms
+ * @param taskFrequencyInMs task period in ms
+ * @return the first sync time
+ */
+
+ public long getNextSyncTime(Calendar syncTime, long timeNowInMs, int taskFrequencyInSeconds) {
+ if (taskFrequencyInSeconds == 0) {
+ return 0;
+ } else if (timeNowInMs > syncTime.getTimeInMillis()) {
+
+ /*
+ * If current time is after the scheduled sync start time, then we'll skip ahead to the next
+ * sync time period
+ */
+
+ syncTime.add(Calendar.SECOND, taskFrequencyInSeconds);
+ }
+
+ return syncTime.getTimeInMillis();
+ }
+
+ public String getTimeZoneOfSyncStartTimeStamp() {
+ return timeZoneOfSyncStartTimeStamp;
+ }
+
+ public void setTimeZoneOfSyncStartTimeStamp(String timeZoneOfSyncStartTimeStamp) {
+ this.timeZoneOfSyncStartTimeStamp = timeZoneOfSyncStartTimeStamp;
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java
new file mode 100644
index 0000000..30d4e71
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java
@@ -0,0 +1,99 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.onap.aai.sparky.util.NodeUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+/**
+ * The Class AggregationEntity.
+ */
+public class AggregationEntity extends IndexableEntity implements IndexDocument {
+ private Map<String, String> attributes = new HashMap<String, String>();
+ protected ObjectMapper mapper = new ObjectMapper();
+
+ /**
+ * Instantiates a new aggregation entity.
+ */
+ public AggregationEntity() {
+ super();
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields()
+ */
+ @Override
+ public void deriveFields() {
+
+ /*
+ * We'll try and create a unique identity key that we can use for differencing the previously
+ * imported record sets as we won't have granular control of what is created/removed and when.
+ * The best we can hope for is identification of resources by generated Id until the
+ * Identity-Service UUID is tagged against all resources, then we can use that instead.
+ */
+ this.id =
+ NodeUtils.generateUniqueShaDigest(link);
+ }
+
+ public void copyAttributeKeyValuePair(Map<String, Object> map){
+ for(String key: map.keySet()){
+ if (!key.equalsIgnoreCase("relationship-list")){ // ignore relationship data which is not required in aggregation
+ this.attributes.put(key, map.get(key).toString()); // not sure if entity attribute can contain an object as value
+ }
+ }
+ }
+
+ public void addAttributeKeyValuePair(String key, String value){
+ this.attributes.put(key, value);
+ }
+
+ @Override
+ public String getAsJson() {
+ ObjectNode rootNode = mapper.createObjectNode();
+ rootNode.put("link", this.getLink());
+ rootNode.put("lastmodTimestamp", this.getEntityTimeStamp());
+ for (String key: this.attributes.keySet()){
+ rootNode.put(key, this.attributes.get(key));
+ }
+ return rootNode.toString();
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "")
+ + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", "
+ : "")
+ + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "")
+ + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") + "]";
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java
new file mode 100644
index 0000000..06f60b3
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java
@@ -0,0 +1,111 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.aai.sparky.search.filters.config.FiltersConfig;
+import org.onap.aai.sparky.search.filters.config.UiFilterListItemConfig;
+import org.onap.aai.sparky.search.filters.config.UiViewListItemConfig;
+import org.onap.aai.sparky.util.NodeUtils;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class AggregationSuggestionEntity extends IndexableEntity implements IndexDocument {
+
+ private static final String FILTER_ID = "filterId";
+ private static final String FILTER_LIST = "filterList";
+
+ private List<String> inputs = new ArrayList<>();
+ private final String outputString = "VNFs";
+ protected ObjectMapper mapper = new ObjectMapper();
+ List<String> filterIds = new ArrayList<>();
+
+ @JsonIgnore
+ private FiltersConfig filtersConfig;
+
+ public AggregationSuggestionEntity(FiltersConfig filtersConfig) {
+ super();
+ this.filtersConfig = filtersConfig;
+ inputs.add("VNFs");
+ inputs.add("generic-vnfs");
+ }
+
+ @Override
+ public void deriveFields() {
+ this.id = NodeUtils.generateUniqueShaDigest(this.outputString);
+ }
+
+ @Override
+ public String getAsJson() {
+ JSONArray inputArray = new JSONArray();
+ for (String input: inputs) {
+ input = input.replace(",","" );
+ input = input.replace("[","" );
+ input = input.replace("]","" );
+ inputArray.put(input);
+ }
+
+ JSONObject entitySuggest = new JSONObject();
+ entitySuggest.put("input", inputArray);
+ entitySuggest.put("output", this.outputString);
+ entitySuggest.put("weight", 100);
+
+ JSONArray payloadFilters = new JSONArray();
+
+ for (String filterId : filterIds) {
+ JSONObject filterPayload = new JSONObject();
+ filterPayload.put(FILTER_ID, filterId);
+ payloadFilters.put(filterPayload);
+ }
+
+ JSONObject payloadNode = new JSONObject();
+ payloadNode.put(FILTER_LIST, payloadFilters);
+ entitySuggest.put("payload", payloadNode);
+
+ JSONObject rootNode = new JSONObject();
+ rootNode.put("entity_suggest", entitySuggest);
+
+ return rootNode.toString();
+ }
+
+ public void initializeFilters() {
+ for (UiViewListItemConfig view : filtersConfig.getViewsConfig().getViews()) {
+ if (view.getViewName().equals("vnfSearch")) {
+ for (UiFilterListItemConfig currentViewFilter : view.getFilters()) {
+ filterIds.add(currentViewFilter.getFilterId());
+ }
+ }
+ }
+ }
+
+ public void setFilterIds(List<String> filterIds) {
+ this.filterIds = filterIds;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java
new file mode 100644
index 0000000..6ee3351
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java
@@ -0,0 +1,41 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+/**
+ * The Interface IndexDocument.
+ */
+public interface IndexDocument {
+
+ /**
+ * Derive fields.
+ */
+ public void deriveFields();
+
+ public String getId();
+
+ public String getAsJson() throws Exception;
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java
new file mode 100644
index 0000000..0c94227
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java
@@ -0,0 +1,97 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+import java.util.ArrayList;
+
+import org.onap.aai.sparky.util.NodeUtils;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.core.JsonProcessingException;
+
+
+/**
+ * The Class IndexableCrossEntityReference.
+ */
+
+public class IndexableCrossEntityReference extends IndexableEntity implements IndexDocument {
+
+ protected String crossReferenceEntityValues;
+ protected ArrayList<String> crossEntityReferenceCollection = new ArrayList<String>();
+
+ /**
+ * Instantiates a new indexable cross entity reference.
+ */
+ public IndexableCrossEntityReference() {
+ super();
+ }
+
+ /**
+ * Adds the cross entity reference value.
+ *
+ * @param crossEntityReferenceValue the cross entity reference value
+ */
+ public void addCrossEntityReferenceValue(String crossEntityReferenceValue) {
+ if (!crossEntityReferenceCollection.contains(crossEntityReferenceValue)) {
+ crossEntityReferenceCollection.add(crossEntityReferenceValue);
+ }
+ }
+
+ public String getCrossReferenceEntityValues() {
+ return crossReferenceEntityValues;
+ }
+
+ public void setCrossReferenceEntityValues(String crossReferenceEntityValues) {
+ this.crossReferenceEntityValues = crossReferenceEntityValues;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields()
+ */
+ @Override
+ public void deriveFields() {
+ this.id = NodeUtils.generateUniqueShaDigest(link);
+ this.crossReferenceEntityValues = NodeUtils.concatArray(crossEntityReferenceCollection, ";");
+ }
+
+ @JsonIgnore // Since this is a "get" we need to JSON ignore otherwise it will be called when converting to JSON
+ @Override
+ public String getAsJson() throws JsonProcessingException {
+
+ return NodeUtils.convertObjectToJson(this, false);
+
+ }
+
+ @Override
+ public String toString() {
+ return "IndexableCrossEntityReference ["
+ + (crossReferenceEntityValues != null
+ ? "crossReferenceEntityValues=" + crossReferenceEntityValues + ", " : "")
+ + (crossEntityReferenceCollection != null
+ ? "crossEntityReferenceCollection=" + crossEntityReferenceCollection + ", " : "");
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java
new file mode 100644
index 0000000..9a7f865
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java
@@ -0,0 +1,100 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * The Class IndexableEntity.
+ */
+public abstract class IndexableEntity {
+ protected String id; // generated, SHA-256 digest
+ protected String entityType;
+ protected String entityPrimaryKeyValue;
+ protected String lastmodTimestamp;
+ protected String link;
+
+ private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ";
+
+ /**
+ * Instantiates a new indexable entity.
+ */
+ public IndexableEntity() {
+ SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT);
+ Timestamp timestamp = new Timestamp(System.currentTimeMillis());
+ String currentFormattedTimeStamp = dateFormat.format(timestamp);
+ this.setEntityTimeStamp(currentFormattedTimeStamp);
+ }
+
+ @JsonIgnore
+ public String getId() {
+ return id;
+ }
+
+ @JsonProperty("entityType")
+ public String getEntityType() {
+ return entityType;
+ }
+
+ @JsonProperty("entityPrimaryKeyValue")
+ public String getEntityPrimaryKeyValue() {
+ return entityPrimaryKeyValue;
+ }
+
+ @JsonProperty("lastmodTimestamp")
+ public String getEntityTimeStamp() {
+ return lastmodTimestamp;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public void setEntityPrimaryKeyValue(String fieldValue) {
+ this.entityPrimaryKeyValue = fieldValue;
+ }
+
+ public void setEntityTimeStamp(String lastmodTimestamp) {
+ this.lastmodTimestamp = lastmodTimestamp;
+ }
+
+ @JsonProperty("link")
+ public String getLink() {
+ return link;
+ }
+
+ public void setLink(String link) {
+ this.link = link;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java
new file mode 100644
index 0000000..82c874a
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java
@@ -0,0 +1,59 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+
+/**
+ * The Class MergableEntity.
+ */
+public class MergableEntity {
+ private Map<String, String> other = new HashMap<String, String>();
+
+ /**
+ * Any.
+ *
+ * @return the map
+ */
+ @JsonAnyGetter
+ public Map<String, String> any() {
+ return other;
+ }
+
+ /**
+ * Sets the.
+ *
+ * @param name the name
+ * @param value the value
+ */
+ @JsonAnySetter
+ public void set(String name, String value) {
+ other.put(name, value);
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java
new file mode 100644
index 0000000..8462bc9
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java
@@ -0,0 +1,78 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * The Class ObjectIdCollection.
+ */
+public class ObjectIdCollection {
+
+ protected ConcurrentHashMap<String, String> importedObjectIds =
+ new ConcurrentHashMap<String, String>();
+
+ public Collection<String> getImportedObjectIds() {
+ return importedObjectIds.values();
+ }
+
+ /**
+ * Adds the object id.
+ *
+ * @param id the id
+ */
+ public void addObjectId(String id) {
+ importedObjectIds.putIfAbsent(id, id);
+ }
+
+ public int getSize() {
+ return importedObjectIds.values().size();
+ }
+
+ /**
+ * Adds the all.
+ *
+ * @param items the items
+ */
+ public void addAll(List<String> items) {
+ if (items == null) {
+ return;
+ }
+
+ items.stream().forEach((item) -> {
+ importedObjectIds.putIfAbsent(item, item);
+ });
+
+ }
+
+ /**
+ * Clear.
+ */
+ public void clear() {
+ importedObjectIds.clear();
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java
new file mode 100644
index 0000000..84fc093
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java
@@ -0,0 +1,142 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.aai.sparky.util.NodeUtils;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+/**
+ * The Class SearchableEntity.
+ */
+public class SearchableEntity extends IndexableEntity implements IndexDocument {
+
+ @JsonIgnore
+ protected List<String> searchTagCollection = new ArrayList<String>();
+
+ @JsonIgnore
+ protected List<String> searchTagIdCollection = new ArrayList<String>();
+
+ @JsonIgnore
+ protected ObjectMapper mapper = new ObjectMapper();
+
+ /**
+ * Instantiates a new searchable entity.
+ */
+ public SearchableEntity() {
+ super();
+ }
+
+ /*
+ * Generated fields, leave the settings for junit overrides
+ */
+
+ protected String searchTags; // generated based on searchTagCollection values
+
+ protected String searchTagIDs;
+
+ /**
+ * Generates the sha based id.
+ */
+ public void generateId() {
+ this.id = NodeUtils.generateUniqueShaDigest(link);
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields()
+ */
+ @Override
+ public void deriveFields() {
+
+ /*
+ * We'll try and create a unique identity key that we can use for differencing the previously
+ * imported record sets as we won't have granular control of what is created/removed and when.
+ * The best we can hope for is identification of resources by generated Id until the
+ * Identity-Service UUID is tagged against all resources, then we can use that instead.
+ */
+ generateId();
+ this.searchTags = NodeUtils.concatArray(searchTagCollection, ";");
+ this.searchTagIDs = NodeUtils.concatArray(this.searchTagIdCollection, ";");
+ }
+
+ /**
+ * Adds the search tag with key.
+ *
+ * @param searchTag the search tag
+ * @param searchTagKey the key associated with the search tag (key:value)
+ */
+ public void addSearchTagWithKey(String searchTag, String searchTagKey) {
+ searchTagIdCollection.add(searchTagKey);
+ searchTagCollection.add(searchTag);
+ }
+
+ public List<String> getSearchTagCollection() {
+ return searchTagCollection;
+ }
+
+ @JsonProperty("searchTags")
+ public String getSearchTags() {
+ return searchTags;
+ }
+
+ @JsonProperty("searchTagIDs")
+ public String getSearchTagIDs() {
+ return searchTagIDs;
+ }
+
+ @JsonIgnore
+ public List<String> getSearchTagIdCollection() {
+ return searchTagIdCollection;
+ }
+
+ @Override
+ @JsonIgnore
+ public String getAsJson() throws JsonProcessingException {
+ return NodeUtils.convertObjectToJson(this, false);
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "")
+ + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", "
+ : "")
+ + (searchTagCollection != null ? "searchTagCollection=" + searchTagCollection + ", " : "")
+ + (searchTagIdCollection != null ? "searchTagIDCollection=" + searchTagIdCollection + ", "
+ : "")
+ + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "")
+ + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "")
+ + (searchTags != null ? "searchTags=" + searchTags + ", " : "")
+ + (searchTagIDs != null ? "searchTagIDs=" + searchTagIDs : "") + "]";
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java
new file mode 100644
index 0000000..2b3f858
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java
@@ -0,0 +1,90 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+/**
+ * The Class SelfLinkDescriptor.
+ */
+public class SelfLinkDescriptor {
+ private String selfLink;
+ private String entityType;
+ private String depthModifier;
+
+ public String getDepthModifier() {
+ return depthModifier;
+ }
+
+ public void setDepthModifier(String depthModifier) {
+ this.depthModifier = depthModifier;
+ }
+
+ public String getSelfLink() {
+ return selfLink;
+ }
+
+ public void setSelfLink(String selfLink) {
+ this.selfLink = selfLink;
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public SelfLinkDescriptor(String selfLink) {
+ this(selfLink, null, null);
+ }
+
+ /**
+ * Instantiates a new self link descriptor.
+ *
+ * @param selfLink the self link
+ * @param entityType the entity type
+ */
+ public SelfLinkDescriptor(String selfLink, String entityType) {
+ this(selfLink, null, entityType);
+ }
+
+ public SelfLinkDescriptor(String selfLink, String depthModifier, String entityType) {
+ this.selfLink = selfLink;
+ this.entityType = entityType;
+ this.depthModifier = depthModifier;
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "SelfLinkDescriptor [" + (selfLink != null ? "selfLink=" + selfLink + ", " : "")
+ + (entityType != null ? "entityType=" + entityType + ", " : "")
+ + (depthModifier != null ? "depthModifier=" + depthModifier : "") + "]";
+ }
+
+}
+
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java
new file mode 100644
index 0000000..3648b53
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java
@@ -0,0 +1,327 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup;
+import org.onap.aai.sparky.search.filters.config.FiltersConfig;
+import org.onap.aai.sparky.search.filters.config.FiltersDetailsConfig;
+import org.onap.aai.sparky.search.filters.config.UiFilterConfig;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.util.SuggestionsPermutation;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class SuggestionSearchEntity extends IndexableEntity implements IndexDocument {
+ private static final String FILTER_ID = "filterId";
+ private static final String FILTER_VALUE = "filterValue";
+ private static final String FILTER_LIST = "filterList";
+
+ private String entityType;
+ private List<String> suggestionConnectorWords = new ArrayList<String>();
+ private List<String> suggestionAttributeTypes = new ArrayList<String>();
+ private List<String> suggestionAttributeValues = new ArrayList<String>();
+ private List<String> suggestionTypeAliases = new ArrayList<String>();
+ private List<String> suggestionInputPermutations = new ArrayList<String>();
+ private List<String> suggestableAttr = new ArrayList<String>();
+
+ private Map<String, String> inputOutputData = new HashMap<String, String>();
+ Map<String, UiFilterConfig> filters = new HashMap<String, UiFilterConfig>();
+ private JSONObject payload = new JSONObject();
+ private JSONArray payloadFilters = new JSONArray();
+ private StringBuffer outputString = new StringBuffer();
+ private String aliasToUse;
+
+ private SuggestionEntityLookup entityLookup;
+
+ public JSONObject getPayload() {
+ return payload;
+ }
+
+ public void setPayload(JSONObject payload) {
+ this.payload = payload;
+ }
+
+ protected ObjectMapper mapper = new ObjectMapper();
+
+ public SuggestionSearchEntity(FiltersConfig filtersConfig) {
+ super();
+
+ FiltersDetailsConfig filterConfigList = filtersConfig.getFiltersConfig();
+ // Populate the map with keys that will match the suggestableAttr values
+ for(UiFilterConfig filter : filterConfigList.getFilters()) {
+ if(filter.getDataSource() != null) {
+ filters.put(filter.getDataSource().getFieldName(), filter);
+ }
+ }
+ }
+
+ public SuggestionSearchEntity(FiltersConfig filtersConfig, SuggestionEntityLookup entityLookup) {
+
+ this.entityLookup = entityLookup;
+
+ FiltersDetailsConfig filterConfigList = filtersConfig.getFiltersConfig();
+ // Populate the map with keys that will match the suggestableAttr values
+ for(UiFilterConfig filter : filterConfigList.getFilters()) {
+ if(filter.getDataSource() != null) {
+ filters.put(filter.getDataSource().getFieldName(), filter);
+ }
+ }
+ }
+
+ public SuggestionSearchEntity(SuggestionEntityLookup entityLookup, FiltersConfig config) {
+
+ FiltersDetailsConfig filterConfigList = config.getFiltersConfig();
+ // Populate the map with keys that will match the suggestableAttr values
+ for(UiFilterConfig filter : filterConfigList.getFilters()) {
+ if(filter.getDataSource() != null) {
+ filters.put(filter.getDataSource().getFieldName(), filter);
+ }
+ }
+ }
+
+ public void setSuggestableAttr(ArrayList<String> attributes) {
+ for (String attribute : attributes) {
+ this.suggestableAttr.add(attribute);
+ }
+ }
+
+ public void setPayloadFromResponse(JsonNode node) {
+ if (suggestableAttr != null) {
+ JSONObject nodePayload = new JSONObject();
+ for (String attribute : suggestableAttr) {
+ if (node.get(attribute) != null) {
+ inputOutputData.put(attribute, node.get(attribute).asText());
+ this.payload.put(attribute, node.get(attribute).asText());
+ }
+ }
+ }
+ }
+
+ public void setFilterBasedPayloadFromResponse(JsonNode node, String entityName, ArrayList<String> uniqueList) {
+
+ HashMap<String, String> desc = entityLookup.getSuggestionSearchEntityOxmModel().get(entityName);
+
+ if ( desc == null ) {
+ return;
+ }
+
+ String attr = desc.get("suggestibleAttributes");
+
+ if ( attr == null ) {
+ return;
+ }
+
+ List<String> suggestableAttrOxm = Arrays.asList(attr.split(","));
+
+ /*
+ * Note:
+ * (1) 'uniqueList' is one item within the power set of the suggestable attributes.
+ * (2) 'inputeOutputData' is used to generate permutations of strings
+ */
+ for (String selectiveAttr: uniqueList) {
+ if (node.get(selectiveAttr) != null) {
+ inputOutputData.put(selectiveAttr, node.get(selectiveAttr).asText());
+ }
+ }
+
+ if (suggestableAttrOxm != null) {
+ for (String attribute : suggestableAttrOxm) {
+ if (node.get(attribute) != null && uniqueList.contains(attribute)) {
+ UiFilterConfig filterConfig = filters.get(attribute);
+ if(filterConfig != null) {
+ JSONObject filterPayload = new JSONObject();
+ filterPayload.put(FILTER_ID, filterConfig.getFilterId());
+ filterPayload.put(FILTER_VALUE, node.get(attribute).asText());
+ this.payloadFilters.put(filterPayload);
+ } else {
+ this.payload.put(attribute, node.get(attribute).asText());
+ }
+ } else {
+ UiFilterConfig emptyValueFilterConfig = filters.get(attribute);
+ if(emptyValueFilterConfig != null) {
+ JSONObject emptyValueFilterPayload = new JSONObject();
+ emptyValueFilterPayload.put(FILTER_ID, emptyValueFilterConfig.getFilterId());
+ this.payloadFilters.put(emptyValueFilterPayload);
+ }
+ }
+ }
+ this.payload.put(FILTER_LIST, this.payloadFilters);
+ }
+ }
+
+ @Override
+ public String getEntityType() {
+ return entityType;
+ }
+
+ @Override
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public List<String> getSuggestionConnectorWords() {
+ return suggestionConnectorWords;
+ }
+
+ public void setSuggestionConnectorWords(List<String> suggestionConnectorWords) {
+ this.suggestionConnectorWords = suggestionConnectorWords;
+ }
+
+ public List<String> getSuggestionPropertyTypes() {
+ return this.suggestionAttributeTypes;
+ }
+
+ public void setSuggestionPropertyTypes(List<String> suggestionPropertyTypes) {
+ this.suggestionAttributeTypes = suggestionPropertyTypes;
+ }
+
+ public List<String> getSuggestionAttributeValues() {
+ return this.suggestionAttributeValues;
+ }
+
+ public void setSuggestionAttributeValues(List<String> suggestionAttributeValues) {
+ this.suggestionAttributeValues = suggestionAttributeValues;
+ }
+
+ public List<String> getSuggestionAliases() {
+ return this.suggestionTypeAliases;
+ }
+
+ public void setSuggestionAliases(List<String> suggestionAliases) {
+ this.suggestionTypeAliases = suggestionAliases;
+ }
+
+ public List<String> getSuggestionInputPermutations() {
+ return this.suggestionInputPermutations;
+ }
+
+ public void setSuggestionInputPermutations(List<String> permutations) {
+ this.suggestionInputPermutations = permutations;
+ }
+
+ public void generateSuggestionInputPermutations() {
+
+ List<String> entityNames = new ArrayList<>();
+ entityNames.add(entityType);
+ HashMap<String, String> desc = entityLookup.getSuggestionSearchEntityOxmModel().get(this.entityType);
+ String attr = desc.get("suggestionAliases");
+ String[] suggestionAliasesArray = attr.split(",");
+ suggestionTypeAliases = Arrays.asList(suggestionAliasesArray);
+ this.setAliasToUse(suggestionAliasesArray[suggestionAliasesArray.length - 1]);
+ for (String alias : suggestionTypeAliases) {
+ entityNames.add(alias);
+ }
+
+ ArrayList<String> listToPermutate = new ArrayList<>(inputOutputData.values());
+
+ for (String entity : entityNames){
+ listToPermutate.add(entity); // add entity-name or alias in list to permutate
+ List<List<String>> lists = SuggestionsPermutation.getListPermutations(listToPermutate);
+ for (List<String> li : lists){
+ suggestionInputPermutations.add(String.join(" ", li));
+ }
+ // prepare for the next pass: remove the entity-name or alias from the list
+ listToPermutate.remove(entity);
+ }
+ }
+
+ public boolean isSuggestableDoc() {
+ return this.getPayload().length() != 0;
+ }
+
+
+ @Override
+ public void deriveFields() {
+
+ int entryCounter = 1;
+ for (Map.Entry<String, String> outputValue : inputOutputData.entrySet()) {
+ if (outputValue.getValue() != null && outputValue.getValue().length() > 0) {
+ this.outputString.append(outputValue.getValue());
+ if (entryCounter < inputOutputData.entrySet().size()) {
+ this.outputString.append(" and ");
+ } else{
+ this.outputString.append(" ");
+ }
+ }
+ entryCounter++;
+ }
+
+ this.outputString.append(this.getAliasToUse());
+ this.id = NodeUtils.generateUniqueShaDigest(outputString.toString());
+ }
+
+ @Override
+ public String getAsJson() {
+ // TODO Auto-generated method stub
+ JSONObject rootNode = new JSONObject();
+
+ JSONArray suggestionsArray = new JSONArray();
+ for (String suggestion : suggestionInputPermutations) {
+ suggestionsArray.put(suggestion);
+ }
+
+ JSONObject entitySuggest = new JSONObject();
+
+ entitySuggest.put("input", suggestionsArray);
+ entitySuggest.put("output", this.outputString);
+ entitySuggest.put("payload", this.payload);
+ rootNode.put("entity_suggest", entitySuggest);
+
+ return rootNode.toString();
+ }
+
+ public String getAliasToUse() {
+ return aliasToUse;
+ }
+
+ public void setAliasToUse(String aliasToUse) {
+ this.aliasToUse = aliasToUse;
+ }
+
+ public Map<String, String> getInputOutputData() {
+ return inputOutputData;
+ }
+
+ public void setInputOutputData(Map<String, String> inputOutputData) {
+ this.inputOutputData = inputOutputData;
+ }
+
+ @Override
+ public String toString() {
+ return "SuggestionSearchEntity [entityType=" + entityType + ", suggestionConnectorWords="
+ + suggestionConnectorWords + ", suggestionAttributeTypes=" + suggestionAttributeTypes
+ + ", suggestionAttributeValues=" + suggestionAttributeValues + ", suggestionTypeAliases="
+ + suggestionTypeAliases + ", mapper=" + mapper + "]";
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java
new file mode 100644
index 0000000..52ea891
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java
@@ -0,0 +1,56 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.entity;
+
+/**
+ * The Enum TransactionStorageType.
+ */
+public enum TransactionStorageType {
+ EDGE_TAG_QUERY(0, "aaiOffline/edge-tag-query"), ACTIVE_INVENTORY_QUERY(1,
+ "aaiOffline/active-inventory-query");
+
+ private Integer index;
+ private String outputFolder;
+
+ /**
+ * Instantiates a new transaction storage type.
+ *
+ * @param index the index
+ * @param outputFolder the output folder
+ */
+ TransactionStorageType(Integer index, String outputFolder) {
+ this.index = index;
+ this.outputFolder = outputFolder;
+ }
+
+ public Integer getIndex() {
+ return index;
+ }
+
+ public String getOutputFolder() {
+ return outputFolder;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java
new file mode 100644
index 0000000..b8c12c9
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java
@@ -0,0 +1,32 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.enumeration;
+
+/**
+ * The Enum OperationState.
+ */
+public enum OperationState {
+ INIT, OK, ERROR, ABORT, PENDING, IGNORED_SYNC_NOT_IDLE
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java
new file mode 100644
index 0000000..01b28e0
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java
@@ -0,0 +1,32 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.enumeration;
+
+/**
+ * The Enum SynchronizerState.
+ */
+public enum SynchronizerState {
+ IDLE, PERFORMING_SYNCHRONIZATION, ABORTED
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java
new file mode 100644
index 0000000..a986dfc
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java
@@ -0,0 +1,97 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.task;
+
+import java.util.Map;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.slf4j.MDC;
+
+/*
+ * Consider abstraction the tasks into common elemnts, because most of them repeat a generic call
+ * flow pattern
+ */
+
+/**
+ * The Class PerformActiveInventoryRetrieval.
+ */
+public class PerformActiveInventoryRetrieval implements Supplier<NetworkTransaction> {
+
+ private static Logger logger = LoggerFactory.getInstance().getLogger(PerformActiveInventoryRetrieval.class);
+
+ private NetworkTransaction txn;
+ private ActiveInventoryAdapter aaiAdapter;
+ private Map<String, String> contextMap;
+
+ /**
+ * Instantiates a new perform active inventory retrieval.
+ *
+ * @param txn the txn
+ * @param aaiProvider the aai provider
+ */
+ public PerformActiveInventoryRetrieval(NetworkTransaction txn,
+ ActiveInventoryAdapter aaiAdapter) {
+ this.txn = txn;
+ this.aaiAdapter = aaiAdapter;
+ this.contextMap = MDC.getCopyOfContextMap();
+ }
+
+ /* (non-Javadoc)
+ * @see java.util.function.Supplier#get()
+ */
+ @Override
+ public NetworkTransaction get() {
+
+ txn.setTaskAgeInMs();
+
+ long startTimeInMs = System.currentTimeMillis();
+ MDC.setContextMap(contextMap);
+ OperationResult result = null;
+ try {
+
+ final String absoluteSelfLink = aaiAdapter.repairSelfLink(txn.getLink(), txn.getQueryParameters());
+ result = aaiAdapter.queryActiveInventoryWithRetries(absoluteSelfLink, "application/json", 5);
+ } catch (Exception exc) {
+ logger.error(AaiUiMsgs.ERROR_GENERIC,"Failure to resolve self link from AAI. Error = " + exc.getMessage());
+ result = new OperationResult(500,
+ "Caught an exception while trying to resolve link = " + exc.getMessage());
+ } finally {
+ txn.setOperationResult(result);
+ txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs);
+ }
+
+ return txn;
+ }
+
+ protected void setContextMap(Map<String, String> contextMap) {
+ this.contextMap = contextMap;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java
new file mode 100644
index 0000000..cbc9ccb
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java
@@ -0,0 +1,90 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.task;
+
+import java.util.Map;
+import java.util.function.Supplier;
+
+import javax.ws.rs.core.MediaType;
+
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.slf4j.MDC;
+
+/**
+ * The Class PerformElasticSearchPut.
+ */
+public class PerformElasticSearchPut implements Supplier<NetworkTransaction> {
+
+ private ElasticSearchAdapter esAdapter;
+ private String jsonPayload;
+ private NetworkTransaction txn;
+ private Map<String, String> contextMap;
+
+ /**
+ * Instantiates a new perform elastic search put.
+ *
+ * @param jsonPayload the json payload
+ * @param txn the txn
+ * @param restDataProvider the rest data provider
+ */
+ public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn,
+ ElasticSearchAdapter esAdapter) {
+ this.jsonPayload = jsonPayload;
+ this.txn = txn;
+ this.esAdapter = esAdapter;
+ this.contextMap = MDC.getCopyOfContextMap();
+ }
+
+ public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn,
+ ElasticSearchAdapter esAdapter, Map<String, String> contextMap) {
+ this.jsonPayload = jsonPayload;
+ this.txn = txn;
+ this.esAdapter = esAdapter;
+ this.contextMap = contextMap;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.util.function.Supplier#get()
+ */
+ @Override
+ public NetworkTransaction get() {
+ txn.setTaskAgeInMs();
+ MDC.setContextMap(contextMap);
+
+ long startTimeInMs = System.currentTimeMillis();
+
+ OperationResult or =
+ esAdapter.doPut(txn.getLink(), jsonPayload, MediaType.APPLICATION_JSON_TYPE);
+
+ txn.setOperationResult(or);
+ txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs);
+
+ return txn;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java
new file mode 100644
index 0000000..048d19e
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java
@@ -0,0 +1,72 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.task;
+
+import java.util.Map;
+import java.util.function.Supplier;
+
+import javax.ws.rs.core.MediaType;
+
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.slf4j.MDC;
+
+/**
+ * The Class PerformElasticSearchRetrieval.
+ */
+public class PerformElasticSearchRetrieval implements Supplier<NetworkTransaction> {
+
+ private NetworkTransaction txn;
+ private ElasticSearchAdapter esAdapter;
+ private Map<String, String> contextMap;
+
+ /**
+ * Instantiates a new perform elastic search retrieval.
+ *
+ * @param elasticSearchTxn the elastic search txn
+ * @param restDataProvider the rest data provider
+ */
+ public PerformElasticSearchRetrieval(NetworkTransaction elasticSearchTxn,
+ ElasticSearchAdapter esAdapter) {
+ this.txn = elasticSearchTxn;
+ this.esAdapter = esAdapter;
+ this.contextMap = MDC.getCopyOfContextMap();
+ }
+
+ /* (non-Javadoc)
+ * @see java.util.function.Supplier#get()
+ */
+ @Override
+ public NetworkTransaction get() {
+ MDC.setContextMap(contextMap);
+ long startTimeInMs = System.currentTimeMillis();
+ OperationResult or = esAdapter.doGet(txn.getLink(), MediaType.APPLICATION_JSON_TYPE);
+ txn.setOperationResult(or);
+ txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs);
+ return txn;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java
new file mode 100644
index 0000000..2e329e5
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java
@@ -0,0 +1,82 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.task;
+
+import java.util.Map;
+import java.util.function.Supplier;
+
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.slf4j.MDC;
+
+/**
+ * The Class PerformElasticSearchUpdate.
+ */
+public class PerformElasticSearchUpdate implements Supplier<NetworkTransaction> {
+
+ private ElasticSearchAdapter esAdapter;
+ private NetworkTransaction operationTracker;
+ private String updatePayload;
+ private String updateUrl;
+ private Map<String, String> contextMap;
+
+ /**
+ * Instantiates a new perform elastic search update.
+ *
+ * @param updateUrl the update url
+ * @param updatePayload the update payload
+ * @param esDataProvider the es data provider
+ * @param transactionTracker the transaction tracker
+ */
+ public PerformElasticSearchUpdate(String updateUrl, String updatePayload,
+ ElasticSearchAdapter esAdapter, NetworkTransaction transactionTracker) {
+ this.updateUrl = updateUrl;
+ this.updatePayload = updatePayload;
+ this.esAdapter = esAdapter;
+ this.contextMap = MDC.getCopyOfContextMap();
+ this.operationTracker = new NetworkTransaction();
+ operationTracker.setEntityType(transactionTracker.getEntityType());
+ operationTracker.setDescriptor(transactionTracker.getDescriptor());
+ operationTracker.setOperationType(transactionTracker.getOperationType());
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.util.function.Supplier#get()
+ */
+ @Override
+ public NetworkTransaction get() {
+ operationTracker.setTaskAgeInMs();
+ MDC.setContextMap(contextMap);
+ long startTimeInMs = System.currentTimeMillis();
+ OperationResult or = esAdapter.doBulkOperation(updateUrl, updatePayload);
+ operationTracker.setOperationResult(or);
+ operationTracker.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs);
+ return operationTracker;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformGizmoRetrieval.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformGizmoRetrieval.java
new file mode 100644
index 0000000..972b049
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformGizmoRetrieval.java
@@ -0,0 +1,95 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.task;
+
+import java.util.Map;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.GizmoAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.slf4j.MDC;
+
+/*
+ * Consider abstraction the tasks into common elemnts, because most of them repeat a generic call
+ * flow pattern
+ */
+
+/**
+ * The Class PerformActiveInventoryRetrieval.
+ */
+public class PerformGizmoRetrieval implements Supplier<NetworkTransaction> {
+
+ private static Logger logger = LoggerFactory.getInstance().getLogger(PerformGizmoRetrieval.class);
+
+ private NetworkTransaction txn;
+ private GizmoAdapter gizmoAdapter;
+ private Map<String, String> contextMap;
+
+ /**
+ * Instantiates a new perform active inventory retrieval.
+ *
+ * @param txn the txn
+ * @param aaiProvider the aai provider
+ */
+ public PerformGizmoRetrieval(NetworkTransaction txn,
+ GizmoAdapter gizmoAdapter) {
+ this.txn = txn;
+ this.gizmoAdapter = gizmoAdapter;
+ this.contextMap = MDC.getCopyOfContextMap();
+ }
+
+ /* (non-Javadoc)
+ * @see java.util.function.Supplier#get()
+ */
+ @Override
+ public NetworkTransaction get() {
+
+ txn.setTaskAgeInMs();
+
+ long startTimeInMs = System.currentTimeMillis();
+ MDC.setContextMap(contextMap);
+ OperationResult result = null;
+ try {
+ result = gizmoAdapter.queryGizmoWithRetries(txn.getLink(), "application/json", 5);
+ } catch (Exception exc) {
+ logger.error(AaiUiMsgs.ERROR_GENERIC,"Failure to resolve self link from AAI. Error = " + exc.getMessage());
+ result = new OperationResult(500,
+ "Caught an exception while trying to resolve link = " + exc.getMessage());
+ } finally {
+ txn.setOperationResult(result);
+ txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs);
+ }
+
+ return txn;
+ }
+
+ protected void setContextMap(Map<String, String> contextMap) {
+ this.contextMap = contextMap;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java
new file mode 100644
index 0000000..f92ccd3
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java
@@ -0,0 +1,90 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.task;
+
+import java.util.Map;
+import java.util.function.Supplier;
+
+import javax.ws.rs.core.MediaType;
+
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.sync.entity.IndexDocument;
+import org.slf4j.MDC;
+
+/**
+ * The Class StoreDocumentTask.
+ */
+public class StoreDocumentTask implements Supplier<NetworkTransaction> {
+
+ private IndexDocument doc;
+
+ private NetworkTransaction txn;
+
+ private ElasticSearchAdapter esAdapter;
+ private Map<String, String> contextMap;
+
+ /**
+ * Instantiates a new store document task.
+ *
+ * @param doc the doc
+ * @param txn the txn
+ * @param esDataProvider the es data provider
+ */
+ public StoreDocumentTask(IndexDocument doc, NetworkTransaction txn,
+ ElasticSearchAdapter esAdapter) {
+ this.doc = doc;
+ this.txn = txn;
+ this.esAdapter = esAdapter;
+ this.contextMap = MDC.getCopyOfContextMap();
+ }
+
+ /* (non-Javadoc)
+ * @see java.util.function.Supplier#get()
+ */
+ @Override
+ public NetworkTransaction get() {
+ txn.setTaskAgeInMs();
+
+ long startTimeInMs = System.currentTimeMillis();
+ MDC.setContextMap(contextMap);
+ OperationResult operationResult = null;
+
+ try {
+
+ operationResult =
+ esAdapter.doPut(txn.getLink(), doc.getAsJson(), MediaType.APPLICATION_JSON_TYPE);
+ txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs);
+ } catch (Exception exception) {
+ operationResult.setResult(500, exception.getMessage());
+ }
+
+ txn.setOperationResult(operationResult);
+
+ return txn;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java
new file mode 100644
index 0000000..92f3683
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java
@@ -0,0 +1,55 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.sync.task;
+
+import org.onap.aai.sparky.sync.SyncController;
+import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+
+public class SyncControllerTask implements Runnable {
+
+ private SyncController controller;
+
+ public SyncControllerTask(SyncController controller) {
+ this.controller = controller;
+ }
+
+ @Override
+ public void run() {
+
+ controller.performAction(SyncActions.SYNCHRONIZE);
+
+ while (controller.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) {
+ try {
+ Thread.sleep(1000);
+ } catch (InterruptedException e) {
+ // exit out of the sync-wait-loop
+ break;
+ }
+ }
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java
new file mode 100644
index 0000000..93b6eb4
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java
@@ -0,0 +1,101 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.topology.sync;
+
+import org.onap.aai.sparky.config.oxm.GeoEntityLookup;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner;
+import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory;
+import org.onap.aai.sparky.sync.IndexCleaner;
+import org.onap.aai.sparky.sync.IndexIntegrityValidator;
+import org.onap.aai.sparky.sync.SyncControllerImpl;
+import org.onap.aai.sparky.sync.SyncControllerRegistrar;
+import org.onap.aai.sparky.sync.SyncControllerRegistry;
+import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.config.SyncControllerConfig;
+
+public class GeoSyncController extends SyncControllerImpl implements SyncControllerRegistrar {
+
+ private SyncControllerRegistry syncControllerRegistry;
+
+ public GeoSyncController(SyncControllerConfig syncControllerConfig,
+ ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter,
+ ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig,
+ NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig,
+ GeoEntityLookup geoEntityLookup, OxmEntityLookup oxmEntityLookup,
+ ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception {
+ super(syncControllerConfig);
+
+ // final String controllerName = "Inventory Geo Synchronizer";
+
+ IndexIntegrityValidator indexValidator = new IndexIntegrityValidator(esAdapter, schemaConfig,
+ endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig));
+
+ registerIndexValidator(indexValidator);
+
+ GeoSynchronizer synchronizer =
+ new GeoSynchronizer(schemaConfig, syncControllerConfig.getNumInternalSyncWorkers(),
+ syncControllerConfig.getNumSyncActiveInventoryWorkers(),
+ syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig,
+ geoEntityLookup, oxmEntityLookup);
+
+ synchronizer.setAaiAdapter(aaiAdapter);
+ synchronizer.setElasticSearchAdapter(esAdapter);
+
+ registerEntitySynchronizer(synchronizer);
+
+
+ IndexCleaner indexCleaner =
+ new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig);
+
+ registerIndexCleaner(indexCleaner);
+
+ }
+
+ public SyncControllerRegistry getSyncControllerRegistry() {
+ return syncControllerRegistry;
+ }
+
+ public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) {
+ this.syncControllerRegistry = syncControllerRegistry;
+ }
+
+ @Override
+ public void registerController() {
+
+ if ( syncControllerRegistry != null ) {
+ if ( syncControllerConfig.isEnabled()) {
+ syncControllerRegistry.registerSyncController(this);
+ }
+ }
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java
new file mode 100644
index 0000000..fcc8a7a
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java
@@ -0,0 +1,487 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.topology.sync;
+
+import static java.util.concurrent.CompletableFuture.supplyAsync;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Deque;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentLinkedDeque;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.GeoEntityLookup;
+import org.onap.aai.sparky.config.oxm.GeoOxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.dal.rest.HttpMethod;
+import org.onap.aai.sparky.inventory.entity.GeoIndexDocument;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.AbstractEntitySynchronizer;
+import org.onap.aai.sparky.sync.IndexSynchronizer;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval;
+import org.onap.aai.sparky.sync.task.StoreDocumentTask;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.slf4j.MDC;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+
+
+/**
+ * The Class GeoSynchronizer.
+ */
+public class GeoSynchronizer extends AbstractEntitySynchronizer implements IndexSynchronizer {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(GeoSynchronizer.class);
+
+ private boolean allWorkEnumerated;
+ private Deque<SelfLinkDescriptor> selflinks;
+ private GeoEntityLookup geoEntityLookup;
+ private OxmEntityLookup oxmEntityLookup;
+
+ private Map<String, GeoOxmEntityDescriptor> geoDescriptorMap = null;
+
+ /**
+ * Instantiates a new geo synchronizer.
+ *
+ * @param indexName the index name
+ * @throws Exception the exception
+ */
+ public GeoSynchronizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers,
+ int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig,
+ NetworkStatisticsConfig esStatConfig, GeoEntityLookup geoEntityLookup,
+ OxmEntityLookup oxmEntityLookup) throws Exception {
+
+ super(LOG, "GEO", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(),aaiStatConfig, esStatConfig);
+ this.geoEntityLookup = geoEntityLookup;
+ this.oxmEntityLookup = oxmEntityLookup;
+ this.allWorkEnumerated = false;
+ this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>();
+ this.synchronizerName = "Geo Synchronizer";
+ this.geoDescriptorMap = geoEntityLookup.getGeoEntityDescriptors();
+ this.aaiEntityStats.intializeEntityCounters(geoDescriptorMap.keySet());
+ this.esEntityStats.intializeEntityCounters(geoDescriptorMap.keySet());
+ this.syncDurationInMs = -1;
+ }
+
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync()
+ */
+ @Override
+ public OperationState doSync() {
+ this.syncDurationInMs = -1;
+ resetCounters();
+ setShouldSkipSync(false);
+ allWorkEnumerated = false;
+ syncStartedTimeStampInMs = System.currentTimeMillis();
+ String txnID = NodeUtils.getRandomTxnId();
+ MdcContext.initialize(txnID, "GeoSynchronizer", "", "Sync", "");
+
+ collectAllTheWork();
+ return OperationState.OK;
+ }
+
+
+ /**
+ * Collect all the work.
+ *
+ * @return the operation state
+ */
+ public OperationState collectAllTheWork() {
+ final Map<String,String> contextMap = MDC.getCopyOfContextMap();
+
+ if (geoDescriptorMap.isEmpty()) {
+ setShouldSkipSync(true);
+ LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "geo entities");
+ return OperationState.ERROR;
+ }
+
+ Collection<String> syncTypes = geoDescriptorMap.keySet();
+
+ try {
+
+ /*
+ * launch a parallel async thread to process the documents for each entity-type (to max the of
+ * the configured executor anyway)
+ */
+
+ aaiWorkOnHand.set(syncTypes.size());
+
+ for (String key : syncTypes) {
+
+ supplyAsync(new Supplier<Void>() {
+
+ @Override
+ public Void get() {
+ MDC.setContextMap(contextMap);
+ OperationResult typeLinksResult = null;
+ try {
+ typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key);
+ aaiWorkOnHand.decrementAndGet();
+ processEntityTypeSelfLinks(typeLinksResult);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc);
+ }
+
+ return null;
+ }
+
+ }, aaiExecutor).whenComplete((result, error) -> {
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage());
+ }
+ });
+
+ }
+
+ while (aaiWorkOnHand.get() != 0) {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED);
+ }
+
+ Thread.sleep(1000);
+ }
+
+ aaiWorkOnHand.set(selflinks.size());
+ allWorkEnumerated = true;
+ syncEntityTypes();
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc);
+ }
+ return OperationState.OK;
+ }
+
+ /**
+ * Sync entity types.
+ */
+ private void syncEntityTypes() {
+
+ while (selflinks.peek() != null) {
+
+ SelfLinkDescriptor linkDescriptor = selflinks.poll();
+ aaiWorkOnHand.decrementAndGet();
+
+ OxmEntityDescriptor descriptor = null;
+
+ if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) {
+
+ descriptor = oxmEntityLookup.getEntityDescriptors().get(linkDescriptor.getEntityType());
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType());
+ // go to next element in iterator
+ continue;
+ }
+
+ NetworkTransaction txn = new NetworkTransaction();
+ txn.setDescriptor(descriptor);
+ txn.setLink(linkDescriptor.getSelfLink());
+ txn.setOperationType(HttpMethod.GET);
+ txn.setEntityType(linkDescriptor.getEntityType());
+
+ aaiWorkOnHand.incrementAndGet();
+
+ supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor)
+ .whenComplete((result, error) -> {
+
+ aaiWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage());
+ } else {
+ if (result == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_GET_NO_RESPONSE, linkDescriptor.getSelfLink());
+ } else {
+ processEntityTypeSelfLinkResult(result);
+ }
+ }
+ });
+ }
+ }
+ }
+
+ /**
+ * Process entity type self links.
+ *
+ * @param operationResult the operation result
+ */
+ private void processEntityTypeSelfLinks(OperationResult operationResult) {
+
+ JsonNode rootNode = null;
+
+ final String jsonResult = operationResult.getResult();
+
+ if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) {
+
+ try {
+ rootNode = mapper.readTree(jsonResult);
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, exc);
+ }
+
+ JsonNode resultData = rootNode.get("result-data");
+ ArrayNode resultDataArrayNode = null;
+
+ if (resultData.isArray()) {
+ resultDataArrayNode = (ArrayNode) resultData;
+
+ Iterator<JsonNode> elementIterator = resultDataArrayNode.elements();
+ JsonNode element = null;
+
+ while (elementIterator.hasNext()) {
+ element = elementIterator.next();
+
+ final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type");
+ final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link");
+
+ if (resourceType != null && resourceLink != null) {
+
+ if (geoDescriptorMap.containsKey(resourceType)) {
+ selflinks.add(new SelfLinkDescriptor(resourceLink + "?nodes-only", resourceType));
+ } else {
+ LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType);
+ // go to next element in iterator
+ continue;
+ }
+
+ }
+ }
+ }
+ }
+
+ }
+
+ /**
+ * Process entity type self link result.
+ *
+ * @param txn the txn
+ */
+ private void processEntityTypeSelfLinkResult(NetworkTransaction txn) {
+
+ updateActiveInventoryCounters(txn);
+
+ if (!txn.getOperationResult().wasSuccessful()) {
+ return;
+ }
+
+ GeoOxmEntityDescriptor descriptor = geoDescriptorMap.get(txn.getEntityType());
+
+ if ( descriptor == null ) {
+ return;
+ }
+
+ try {
+ if (descriptor.hasGeoEntity()) {
+
+ GeoIndexDocument geoDoc = new GeoIndexDocument();
+
+ final String jsonResult = txn.getOperationResult().getResult();
+
+ if (jsonResult != null && jsonResult.length() > 0) {
+
+ populateGeoDocument(geoDoc, jsonResult, txn.getDescriptor(), txn.getLink());
+
+ if (!geoDoc.isValidGeoDocument()) {
+
+ LOG.info(AaiUiMsgs.GEO_SYNC_IGNORING_ENTITY, geoDoc.getEntityType(), geoDoc.toString());
+
+ } else {
+
+ String link = null;
+ try {
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), geoDoc.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc);
+ }
+
+ if (link != null) {
+
+ NetworkTransaction n2 = new NetworkTransaction();
+ n2.setLink(link);
+ n2.setEntityType(txn.getEntityType());
+ n2.setDescriptor(txn.getDescriptor());
+ n2.setOperationType(HttpMethod.PUT);
+
+ esWorkOnHand.incrementAndGet();
+
+ supplyAsync(new StoreDocumentTask(geoDoc, n2, elasticSearchAdapter), esExecutor)
+ .whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_STORE_FAILURE, error.getMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ processStoreDocumentResult(result);
+ }
+ });
+ }
+ }
+ }
+ }
+ } catch (JsonProcessingException exc) {
+ LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc);
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc);
+ }
+
+ return;
+ }
+
+
+ /**
+ * Process store document result.
+ *
+ * @param txn the txn
+ */
+ private void processStoreDocumentResult(NetworkTransaction txn) {
+
+ OperationResult or = txn.getOperationResult();
+
+ if (!or.wasSuccessful()) {
+ LOG.error(AaiUiMsgs.ES_STORE_FAILURE, or.toString());
+ /*
+ * if(or.getResultCode() != 404 || (or.getResultCode() == 404 &&
+ * !synchronizerConfig.isResourceNotFoundErrorsSupressed())) { logger.error(
+ * "Skipping failed resource = " + "link" + " RC=[" + or.getResultCode() + "]. Message: " +
+ * or.getResult()); }
+ */
+
+ }
+
+ }
+
+
+ @Override
+ public SynchronizerState getState() {
+
+ if (!isSyncDone()) {
+ return SynchronizerState.PERFORMING_SYNCHRONIZATION;
+ }
+
+ return SynchronizerState.IDLE;
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean)
+ */
+ @Override
+ public String getStatReport(boolean showFinalReport) {
+ syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs;
+ return this.getStatReport(syncDurationInMs, showFinalReport);
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown()
+ */
+ @Override
+ public void shutdown() {
+ this.shutdownExecutors();
+ }
+
+ /**
+ * Populate geo document.
+ *
+ * @param doc the doc
+ * @param result the result
+ * @param resultDescriptor the result descriptor
+ * @param entityLink the entity link
+ * @throws JsonProcessingException the json processing exception
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ protected void populateGeoDocument(GeoIndexDocument doc, String result,
+ OxmEntityDescriptor resultDescriptor, String entityLink)
+ throws JsonProcessingException, IOException {
+
+ doc.setSelfLink(entityLink);
+ doc.setEntityType(resultDescriptor.getEntityName());
+
+ JsonNode entityNode = mapper.readTree(result);
+
+ List<String> primaryKeyValues = new ArrayList<String>();
+ String pkeyValue = null;
+
+ for (String keyName : resultDescriptor.getPrimaryKeyAttributeNames()) {
+ pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName);
+ if (pkeyValue != null) {
+ primaryKeyValues.add(pkeyValue);
+ } else {
+ LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName());
+ }
+ }
+
+ final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/");
+ doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue);
+
+ GeoOxmEntityDescriptor descriptor = geoDescriptorMap.get(resultDescriptor.getEntityName());
+
+ String geoLatKey = descriptor.getGeoLatName();
+ String geoLongKey = descriptor.getGeoLongName();
+
+ doc.setLatitude(NodeUtils.getNodeFieldAsText(entityNode, geoLatKey));
+ doc.setLongitude(NodeUtils.getNodeFieldAsText(entityNode, geoLongKey));
+ doc.deriveFields();
+
+ }
+
+ @Override
+ protected boolean isSyncDone() {
+ if (shouldSkipSync()) {
+ syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs;
+ return true;
+ }
+
+ int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get();
+
+ if (totalWorkOnHand > 0 || !allWorkEnumerated) {
+ return false;
+ }
+
+ return true;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java
new file mode 100644
index 0000000..cb6cc53
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java
@@ -0,0 +1,193 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.util;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+import java.util.Set;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+
+/**
+ * The Class ConfigHelper.
+ */
+public class ConfigHelper {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(ConfigHelper.class);
+
+ /**
+ * Gets the config with prefix.
+ *
+ * @param configPrefix the config prefix
+ * @param properties the properties
+ * @return the config with prefix
+ */
+ public static Properties getConfigWithPrefix(String configPrefix, Properties properties) {
+
+ /*
+ * The idea here is collect properties groups prefixed with the same origin
+ */
+
+ Set<Object> set = properties.keySet();
+ Properties newProps = new Properties();
+
+ for (Object k : set) {
+ String ks = (String) k;
+ if (ks.startsWith(configPrefix)) {
+
+ String temp = ks.replaceFirst(configPrefix + ".", "");
+ newProps.setProperty(temp, properties.getProperty(ks));
+ }
+ }
+
+ return newProps;
+ }
+
+ /**
+ * Load config.
+ *
+ * @param fileName the file name
+ * @return the properties
+ * @throws Exception the exception
+ */
+ public static Properties loadConfig(String fileName) throws Exception {
+
+ String basePath = System.getProperty("user.dir");
+ InputStream fileInputStream = new FileInputStream(basePath + "//" + fileName);
+
+ Properties props = new Properties();
+ props.load(fileInputStream);
+
+ return props;
+ }
+
+ /**
+ * Load config from explicit path.
+ *
+ * @param fileName the file name
+ * @return the properties
+ */
+ public static Properties loadConfigFromExplicitPath(String fileName) {
+
+ Properties props = new Properties();
+
+ try {
+ InputStream fileInputStream = new FileInputStream(fileName);
+ props.load(fileInputStream);
+ } catch (Exception exc) {
+ LOG.warn(AaiUiMsgs.CONFIG_NOT_FOUND_VERBOSE, fileName, exc.getLocalizedMessage());
+ }
+
+ return props;
+ }
+
+ /**
+ * Property fetch.
+ *
+ * @param config the config
+ * @param propName the prop name
+ * @param defaultValue the default value
+ * @return the string
+ */
+ public static String propertyFetch(Properties config, String propName, String defaultValue) {
+ return config.getProperty(propName, defaultValue);
+ }
+
+ public static boolean isEssDevModeEnabled() {
+ return Boolean.parseBoolean(System.getProperty("isEssDevMode", "false"));
+ }
+
+ /**
+ * Gets the filepath.
+ *
+ * @param fileName the file name
+ * @param isRelativePath the is relative path
+ * @return the filepath
+ */
+ public static String getFilepath(String fileName, boolean isRelativePath) {
+
+ String filepath = null;
+
+ if (isRelativePath) {
+ filepath = System.getProperty("user.dir") + "/" + fileName;
+
+ } else {
+ filepath = fileName;
+ }
+
+ return filepath;
+
+ }
+
+ /**
+ * Gets the file contents.
+ *
+ * @param fileName the file name
+ * @return the file contents
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ public static String getFileContents(String fileName) throws IOException {
+
+ LOG.debug(AaiUiMsgs.FILE_READ_IN_PROGRESS, fileName);
+
+ File file = new File(fileName);
+
+ if (!file.exists()) {
+ throw new FileNotFoundException("Failed to load file = " + fileName);
+ }
+
+ if (file.exists() && !file.isDirectory()) {
+ BufferedReader br = new BufferedReader(new FileReader(file));
+ try {
+ StringBuilder sb = new StringBuilder();
+ String line = br.readLine();
+
+ while (line != null) {
+ sb.append(line);
+ sb.append(System.lineSeparator());
+ line = br.readLine();
+ }
+
+ return sb.toString();
+ } finally {
+ br.close();
+ }
+ } else {
+ LOG.warn(AaiUiMsgs.FILE_NOT_FOUND, fileName);
+ }
+
+ return null;
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/EncryptConvertor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/EncryptConvertor.java
new file mode 100644
index 0000000..623ce38
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/EncryptConvertor.java
@@ -0,0 +1,149 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.util;
+
+/**
+ * The Class EncryptConvertor.
+ */
+public class EncryptConvertor {
+
+ private static final char[] HEX_CHARS = "0123456789abcdef".toCharArray();
+
+ /**
+ * toHexString(String) - convert a string into its hex equivalent.
+ *
+ * @param buf the buf
+ * @return the string
+ */
+ public static final String toHexString(String buf) {
+ if (buf == null) {
+ return "";
+ }
+ return toHexString(buf.getBytes());
+ }
+
+ /**
+ * toHexString(byte[]) - convert a byte-string into its hex equivalent.
+ *
+ * @param buf the buf
+ * @return the string
+ */
+ public static final String toHexString(byte[] buf) {
+
+ if (buf == null) {
+ return "";
+ }
+ char[] chars = new char[2 * buf.length];
+ for (int i = 0; i < buf.length; ++i) {
+ chars[2 * i] = HEX_CHARS[(buf[i] & 0xF0) >>> 4];
+ chars[2 * i + 1] = HEX_CHARS[buf[i] & 0x0F];
+ }
+ return new String(chars);
+ }
+
+ /**
+ * Convert a hex string to its equivalent value.
+ *
+ * @param hexString the hex string
+ * @return the string
+ * @throws Exception the exception
+ */
+ public static final String stringFromHex(String hexString) throws Exception {
+ if (hexString == null) {
+ return "";
+ }
+ return stringFromHex(hexString.toCharArray());
+ }
+
+ /**
+ * String from hex.
+ *
+ * @param hexCharArray the hex char array
+ * @return the string
+ * @throws Exception the exception
+ */
+ public static final String stringFromHex(char[] hexCharArray) throws Exception {
+ if (hexCharArray == null) {
+ return "";
+ }
+ return new String(bytesFromHex(hexCharArray));
+ }
+
+ /**
+ * Bytes from hex.
+ *
+ * @param hexString the hex string
+ * @return the byte[]
+ * @throws Exception the exception
+ */
+ public static final byte[] bytesFromHex(String hexString) throws Exception {
+ if (hexString == null) {
+ return new byte[0];
+ }
+ return bytesFromHex(hexString.toCharArray());
+ }
+
+ /**
+ * Bytes from hex.
+ *
+ * @param hexCharArray the hex char array
+ * @return the byte[]
+ * @throws Exception the exception
+ */
+ public static final byte[] bytesFromHex(char[] hexCharArray) throws Exception {
+ if (hexCharArray == null) {
+ return new byte[0];
+ }
+ int len = hexCharArray.length;
+ if ((len % 2) != 0) {
+ throw new Exception("Odd number of characters: '" + String.valueOf(hexCharArray) + "'");
+ }
+ byte[] txtInByte = new byte[len / 2];
+ int counter = 0;
+ for (int i = 0; i < len; i += 2) {
+ txtInByte[counter++] =
+ (byte) (((fromHexDigit(hexCharArray[i], i) << 4) | fromHexDigit(hexCharArray[i + 1], i))
+ & 0xFF);
+ }
+ return txtInByte;
+ }
+
+ /**
+ * From hex digit.
+ *
+ * @param ch the ch
+ * @param index the index
+ * @return the int
+ * @throws Exception the exception
+ */
+ protected static final int fromHexDigit(char ch, int index) throws Exception {
+ int digit = Character.digit(ch, 16);
+ if (digit == -1) {
+ throw new Exception("Illegal hex character '" + ch + "' at index " + index);
+ }
+ return digit;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/Encryptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/Encryptor.java
new file mode 100644
index 0000000..948df51
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/Encryptor.java
@@ -0,0 +1,155 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.util;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.eclipse.jetty.util.security.Password;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+
+/**
+ * The Class Encryptor.
+ */
+public class Encryptor {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(Encryptor.class);
+ /**
+ * Instantiates a new encryptor.
+ */
+ public Encryptor() {
+ }
+
+ /**
+ * Encrypt value.
+ *
+ * @param value to encrypt
+ * @return the encrypted string
+ */
+ public String encryptValue(String value) {
+ String encyptedValue = "";
+ try {
+ encyptedValue = Password.obfuscate(value);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ENCRYPTION_ERROR, value, exc.toString());
+ }
+ return encyptedValue;
+ }
+
+ /**
+ * Decrypt value.
+ *
+ * @param value the value
+ * @return the string
+ */
+ public String decryptValue(String value) {
+ String decyptedValue = "";
+ try {
+ decyptedValue = Password.deobfuscate(value);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.DECRYPTION_ERROR, value, exc.toString());
+ }
+
+ return decyptedValue;
+ }
+
+ /**
+ * Usage.
+ */
+ public static void usage() {
+ usage(null);
+ }
+
+ /**
+ * Usage.
+ *
+ * @param msg the msg
+ */
+ public static void usage(String msg) {
+ if (msg != null) {
+ System.err.println(msg);
+ }
+ System.err.println("Usage: java Encryptor -e value");
+ System.err.println("\tEncrypt the given value");
+ System.err.println("Usage: java Encryptor -d value");
+ System.err.println("\tDecrypt the given value");
+ System.exit(1);
+ }
+
+ /**
+ * The main method.
+ *
+ * @param args the arguments
+ */
+ public static void main(String[] args) {
+
+ Options options = new Options();
+ options.addOption("d", true, "value to decrypt");
+ options.addOption("h", false, "show help");
+ options.addOption("?", false, "show help");
+
+ String value = null;
+ boolean encrypt = false;
+ boolean decrypt = false;
+
+ CommandLineParser parser = new BasicParser();
+ CommandLine cmd = null;
+
+ try {
+ cmd = parser.parse(options, args);
+
+ if (cmd.hasOption("d")) {
+ value = cmd.getOptionValue("d");
+ decrypt = true;
+ }
+
+ if (cmd.hasOption("?") || cmd.hasOption("h")) {
+ usage();
+ System.exit(0);
+ }
+
+ if ((encrypt && decrypt) || (!encrypt && !decrypt)) {
+ usage("Must specify one (and only one) of the -e or -d options");
+ }
+
+ Encryptor encryptor = new Encryptor();
+
+ if (decrypt) {
+ String out = encryptor.decryptValue(value);
+ System.out.println(out);
+ }
+ } catch (ParseException exc) {
+ System.out.println("Failed to parse command line properties: " + exc.toString());
+ } catch (Exception exc) {
+ System.out.println("Failure: " + exc.toString());
+ }
+
+ System.exit(0);
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java
new file mode 100644
index 0000000..d2bea64
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java
@@ -0,0 +1,61 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.util;
+
+/**
+ * The Class ErrorUtil.
+ */
+public class ErrorUtil {
+
+ /**
+ * Extract stack trace elements.
+ *
+ * @param maxNumberOfElementsToCapture the max number of elements to capture
+ * @param exc the exc
+ * @return the string
+ */
+ public static String extractStackTraceElements(int maxNumberOfElementsToCapture, Exception exc) {
+ StringBuilder sb = new StringBuilder(128);
+
+ StackTraceElement[] stackTraceElements = exc.getStackTrace();
+
+ if (stackTraceElements != null) {
+
+ /*
+ * We want to avoid an index out-of-bounds error, so we will make sure to only extract the
+ * number of frames from the stack trace that actually exist.
+ */
+
+ int numFramesToExtract = Math.min(maxNumberOfElementsToCapture, stackTraceElements.length);
+
+ for (int x = 0; x < numFramesToExtract; x++) {
+ sb.append(stackTraceElements[x]).append("\n");
+ }
+
+ }
+
+ return sb.toString();
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/JsonXmlConverter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/JsonXmlConverter.java
new file mode 100644
index 0000000..af2e8ca
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/JsonXmlConverter.java
@@ -0,0 +1,79 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.util;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.XML;
+
+/**
+ * The Class JsonXmlConverter.
+ */
+public class JsonXmlConverter {
+
+ /**
+ * Checks if is valid json.
+ *
+ * @param text the text
+ * @return true, if is valid json
+ */
+ public static boolean isValidJson(String text) {
+ try {
+ new JSONObject(text);
+ } catch (JSONException ex) {
+ try {
+ new JSONArray(text);
+ } catch (JSONException ex1) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ /**
+ * Convert jsonto xml.
+ *
+ * @param jsonText the json text
+ * @return the string
+ */
+ public static String convertJsontoXml(String jsonText) {
+ JSONObject jsonObj = new JSONObject(jsonText);
+ String xmlText = XML.toString(jsonObj);
+ return xmlText;
+ }
+
+ /**
+ * Convert xmlto json.
+ *
+ * @param xmlText the xml text
+ * @return the string
+ */
+ public static String convertXmltoJson(String xmlText) {
+ JSONObject jsonObj = XML.toJSONObject(xmlText);
+ return jsonObj.toString();
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/NodeUtils.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/NodeUtils.java
new file mode 100644
index 0000000..68645e2
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/NodeUtils.java
@@ -0,0 +1,896 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.util;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.lang.Thread.UncaughtExceptionHandler;
+import java.net.URI;
+import java.nio.ByteBuffer;
+import java.security.SecureRandom;
+import java.sql.Timestamp;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.xml.stream.XMLStreamConstants;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+import org.restlet.Request;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.SerializationFeature;
+import com.fasterxml.jackson.databind.ser.FilterProvider;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+
+
+/**
+ * The Class NodeUtils.
+ */
+public class NodeUtils {
+ private static SecureRandom sRandom = new SecureRandom();
+
+ private static final Pattern AAI_VERSION_PREFIX = Pattern.compile("/aai/v[0-9]+/(.*)");
+ private static final Pattern GIZMO_VERSION_PREFIX = Pattern.compile("[/]*services/inventory/v[0-9]+/(.*)");
+ private static final Pattern GIZMO_RELATIONSHIP_VERSION_PREFIX = Pattern.compile("services/inventory/relationships/v[0-9]+/(.*)");
+
+
+ public static synchronized String getRandomTxnId(){
+ byte bytes[] = new byte[6];
+ sRandom.nextBytes(bytes);
+ return Integer.toUnsignedString(ByteBuffer.wrap(bytes).getInt());
+ }
+
+ /**
+ * Builds the depth padding.
+ *
+ * @param depth the depth
+ * @return the string
+ */
+ public static String buildDepthPadding(int depth) {
+ StringBuilder sb = new StringBuilder(32);
+
+ for (int x = 0; x < depth; x++) {
+ sb.append(" ");
+ }
+
+ return sb.toString();
+ }
+
+
+ public static String extractRawPathWithoutVersion(String selfLinkUri) {
+
+ try {
+
+ String rawPath = new URI(selfLinkUri).getRawPath();
+
+ Matcher m = AAI_VERSION_PREFIX.matcher(rawPath);
+
+ if (m.matches()) {
+
+ if ( m.groupCount() >= 1) {
+ return m.group(1);
+ }
+
+ }
+ } catch (Exception e) {
+ }
+
+ return null;
+
+ }
+
+ public static String extractRawGizmoPathWithoutVersion(String resourceLink) {
+
+ try {
+
+ String rawPath = new URI(resourceLink).getRawPath();
+
+ Matcher m = GIZMO_VERSION_PREFIX.matcher(rawPath);
+
+ if (m.matches()) {
+
+ if ( m.groupCount() >= 1) {
+ return m.group(1);
+ }
+
+ }
+ } catch (Exception e) {
+ }
+
+ return null;
+
+ }
+
+ public static String extractRawGizmoRelationshipPathWithoutVersion(String resourceLink) {
+
+ try {
+
+ String rawPath = new URI(resourceLink).getRawPath();
+
+ Matcher m = GIZMO_RELATIONSHIP_VERSION_PREFIX.matcher(rawPath);
+
+ if (m.matches()) {
+
+ if ( m.groupCount() >= 1) {
+ return m.group(1);
+ }
+
+ }
+ } catch (Exception e) {
+ }
+
+ return null;
+
+ }
+
+
+
+
+ /**
+ * Checks if is numeric.
+ *
+ * @param numberStr the number str
+ * @return true, if is numeric
+ */
+ public static boolean isNumeric(String numberStr) {
+
+ try {
+ Double.parseDouble(numberStr);
+ } catch (Exception exc) {
+ return false;
+ }
+
+ return true;
+
+ }
+
+ /**
+ * Creates the named executor.
+ *
+ * @param name the name
+ * @param numWorkers the num workers
+ * @param logger the logger
+ * @return the executor service
+ */
+ public static ExecutorService createNamedExecutor(String name, int numWorkers, final Logger logger) {
+ UncaughtExceptionHandler uncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() {
+
+ @Override
+ public void uncaughtException(Thread thread, Throwable exc) {
+
+ logger.error(AaiUiMsgs.ERROR_GENERIC, thread.getName() + ": " + exc);
+
+ }
+ };
+
+ ThreadFactory namedThreadFactory = new ThreadFactoryBuilder().setNameFormat(name + "-%d")
+ .setUncaughtExceptionHandler(uncaughtExceptionHandler).build();
+
+ return Executors.newScheduledThreadPool(numWorkers + 1, namedThreadFactory);
+ }
+
+
+ public static String calculateEditAttributeUri(String link) {
+ String uri = null;
+
+ if (link != null) {
+
+ Pattern pattern = Pattern.compile(SparkyConstants.URI_VERSION_REGEX_PATTERN);
+ Matcher matcher = pattern.matcher(link);
+ if (matcher.find()) {
+ uri = link.substring(matcher.end());
+ }
+ }
+ return uri;
+ }
+
+
+ /**
+ * Generate unique sha digest.
+ *
+ * @param keys the keys
+ * @return the string
+ */
+ public static String generateUniqueShaDigest(String... keys) {
+
+ if ((keys == null) || keys.length == 0) {
+ return null;
+ }
+
+ final String keysStr = Arrays.asList(keys).toString();
+ final String hashedId = org.apache.commons.codec.digest.DigestUtils.sha256Hex(keysStr);
+
+ return hashedId;
+ }
+
+ /**
+ * Gets the node field as text.
+ *
+ * @param node the node
+ * @param fieldName the field name
+ * @return the node field as text
+ */
+ public static String getNodeFieldAsText(JsonNode node, String fieldName) {
+
+ String fieldValue = null;
+
+ JsonNode valueNode = node.get(fieldName);
+
+ if (valueNode != null) {
+ fieldValue = valueNode.asText();
+ }
+
+ return fieldValue;
+ }
+
+ private static final String ENTITY_RESOURCE_KEY_FORMAT = "%s.%s";
+
+ /**
+ * Convert a millisecond duration to a string format
+ *
+ * @param millis A duration to convert to a string form
+ * @return A string of the form "X Days Y Hours Z Minutes A Seconds".
+ */
+
+ private static final String TIME_BREAK_DOWN_FORMAT =
+ "[ %d days, %d hours, %d minutes, %d seconds ]";
+
+ /**
+ * Gets the duration breakdown.
+ *
+ * @param millis the millis
+ * @return the duration breakdown
+ */
+ public static String getDurationBreakdown(long millis) {
+
+ if (millis < 0) {
+ return String.format(TIME_BREAK_DOWN_FORMAT, 0, 0, 0, 0);
+ }
+
+ long days = TimeUnit.MILLISECONDS.toDays(millis);
+ millis -= TimeUnit.DAYS.toMillis(days);
+ long hours = TimeUnit.MILLISECONDS.toHours(millis);
+ millis -= TimeUnit.HOURS.toMillis(hours);
+ long minutes = TimeUnit.MILLISECONDS.toMinutes(millis);
+ millis -= TimeUnit.MINUTES.toMillis(minutes);
+ long seconds = TimeUnit.MILLISECONDS.toSeconds(millis);
+
+ return String.format(TIME_BREAK_DOWN_FORMAT, days, hours, minutes, seconds);
+
+ }
+
+ /**
+ * Checks if is equal.
+ *
+ * @param n1 the n 1
+ * @param n2 the n 2
+ * @return true, if is equal
+ */
+ public static boolean isEqual(JsonNode n1, JsonNode n2) {
+
+ /*
+ * due to the inherent nature of json being unordered, comparing object representations of the
+ * same keys and values but different order makes comparison challenging. Let's try an
+ * experiment where we compare the structure of the json, and then simply compare the sorted
+ * order of that structure which should be good enough for what we are trying to accomplish.
+ */
+
+ TreeWalker walker = new TreeWalker();
+ List<String> n1Paths = new ArrayList<String>();
+ List<String> n2Paths = new ArrayList<String>();
+
+ walker.walkTree(n1Paths, n1);
+ walker.walkTree(n2Paths, n2);
+
+ Collections.sort(n1Paths);
+ Collections.sort(n2Paths);
+
+ return n1Paths.equals(n2Paths);
+
+ }
+
+ /**
+ * Concat array.
+ *
+ * @param list the list
+ * @return the string
+ */
+ public static String concatArray(List<String> list) {
+ return concatArray(list, " ");
+ }
+
+ private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ";
+
+ public static String getCurrentTimeStamp() {
+ SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT);
+ Timestamp timestamp = new Timestamp(System.currentTimeMillis());
+ return dateFormat.format(timestamp);
+ }
+
+ /**
+ * Concat array.
+ *
+ * @param list the list
+ * @param delimiter the delimiter
+ * @return the string
+ */
+ public static String concatArray(List<String> list, String delimiter) {
+
+ if (list == null || list.size() == 0) {
+ return "";
+ }
+
+ StringBuilder result = new StringBuilder(64);
+
+ boolean firstValue = true;
+
+ for (String item : list) {
+
+ if (firstValue) {
+ result.append(item);
+ firstValue = false;
+ } else {
+ result.append(delimiter).append(item);
+ }
+
+ }
+
+ return result.toString();
+
+ }
+
+ /**
+ * Concat array.
+ *
+ * @param values the values
+ * @return the string
+ */
+ public static String concatArray(String[] values) {
+
+ if (values == null || values.length == 0) {
+ return "";
+ }
+
+ StringBuilder result = new StringBuilder(64);
+
+ boolean firstValue = true;
+
+ for (String item : values) {
+
+ if (firstValue) {
+ result.append(item);
+ firstValue = false;
+ } else {
+ result.append(".").append(item);
+ }
+
+ }
+
+ return result.toString();
+
+ }
+
+ /**
+ * Builds the entity resource key.
+ *
+ * @param entityType the entity type
+ * @param resourceId the resource id
+ * @return the string
+ */
+ public static String buildEntityResourceKey(String entityType, String resourceId) {
+ return String.format(ENTITY_RESOURCE_KEY_FORMAT, entityType, resourceId);
+ }
+
+ /**
+ * Extract resource id from link.
+ *
+ * @param link the link
+ * @return the string
+ */
+ public static String extractResourceIdFromLink(String link) {
+
+ if (link == null) {
+ return null;
+ }
+
+ int linkLength = link.length();
+ if (linkLength == 0) {
+ return null;
+ }
+
+ /*
+ * if the last character != / then we need to change the lastIndex position
+ */
+
+ int startIndex = 0;
+ String resourceId = null;
+ if ("/".equals(link.substring(linkLength - 1))) {
+ // Use-case:
+ // https://ext1.test.onap.com:9292/aai/v7/business/customers/customer/customer-1/service-subscriptions/service-subscription/service-subscription-1/
+ startIndex = link.lastIndexOf("/", linkLength - 2);
+ resourceId = link.substring(startIndex + 1, linkLength - 1);
+ } else {
+ // Use-case:
+ // https://ext1.test.onap.com:9292/aai/v7/business/customers/customer/customer-1/service-subscriptions/service-subscription/service-subscription-1
+ startIndex = link.lastIndexOf("/");
+ resourceId = link.substring(startIndex + 1, linkLength);
+ }
+
+ String result = null;
+
+ if (resourceId != null) {
+ try {
+ result = java.net.URLDecoder.decode(resourceId, "UTF-8");
+ } catch (Exception exc) {
+ /*
+ * if there is a failure decoding the parameter we will just return the original value.
+ */
+ result = resourceId;
+ }
+ }
+
+ return result;
+
+ }
+
+ /**
+ * Gets the xml stream constant as str.
+ *
+ * @param value the value
+ * @return the xml stream constant as str
+ */
+ public static String getXmlStreamConstantAsStr(int value) {
+ switch (value) {
+ case XMLStreamConstants.ATTRIBUTE:
+ return "ATTRIBUTE";
+ case XMLStreamConstants.CDATA:
+ return "CDATA";
+ case XMLStreamConstants.CHARACTERS:
+ return "CHARACTERS";
+ case XMLStreamConstants.COMMENT:
+ return "COMMENT";
+ case XMLStreamConstants.DTD:
+ return "DTD";
+ case XMLStreamConstants.END_DOCUMENT:
+ return "END_DOCUMENT";
+ case XMLStreamConstants.END_ELEMENT:
+ return "END_ELEMENT";
+ case XMLStreamConstants.ENTITY_DECLARATION:
+ return "ENTITY_DECLARATION";
+ case XMLStreamConstants.ENTITY_REFERENCE:
+ return "ENTITY_REFERENCE";
+ case XMLStreamConstants.NAMESPACE:
+ return "NAMESPACE";
+ case XMLStreamConstants.NOTATION_DECLARATION:
+ return "NOTATION_DECLARATION";
+ case XMLStreamConstants.PROCESSING_INSTRUCTION:
+ return "PROCESSING_INSTRUCTION";
+ case XMLStreamConstants.SPACE:
+ return "SPACE";
+ case XMLStreamConstants.START_DOCUMENT:
+ return "START_DOCUMENT";
+ case XMLStreamConstants.START_ELEMENT:
+ return "START_ELEMENT";
+
+ default:
+ return "Unknown(" + value + ")";
+ }
+ }
+
+ /**
+ * Convert object to json.
+ *
+ * @param object the object
+ * @param pretty the pretty
+ * @return the string
+ * @throws JsonProcessingException the json processing exception
+ */
+ public static String convertObjectToJson(Object object, boolean pretty)
+ throws JsonProcessingException {
+ ObjectWriter ow = null;
+
+ ObjectMapper mapper = new ObjectMapper();
+ mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
+
+ if (pretty) {
+ ow = mapper.writer().withDefaultPrettyPrinter();
+
+ } else {
+ ow = mapper.writer();
+ }
+
+ return ow.writeValueAsString(object);
+ }
+
+ /**
+ * Convert object to json by selectively choosing certain fields thru filters.
+ * Example use case:
+ * based on request type we might need to send different serialization of the UiViewFilterEntity
+ *
+ * @param object the object
+ * @param pretty the pretty
+ * @return the string
+ * @throws JsonProcessingException the json processing exception
+ */
+ public static String convertObjectToJson(Object object, boolean pretty, FilterProvider filters)
+ throws JsonProcessingException {
+ ObjectWriter ow = null;
+
+ ObjectMapper mapper = new ObjectMapper();
+ mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
+
+ if (pretty) {
+ ow = mapper.writer(filters).withDefaultPrettyPrinter();
+
+ } else {
+ ow = mapper.writer(filters);
+ }
+
+ return ow.writeValueAsString(object);
+ }
+
+
+ /**
+ * Convert json str to json node.
+ *
+ * @param jsonStr the json str
+ * @return the json node
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ public static JsonNode convertJsonStrToJsonNode(String jsonStr) throws IOException {
+ ObjectMapper mapper = new ObjectMapper();
+ if (jsonStr == null || jsonStr.length() == 0) {
+ return null;
+ }
+
+ return mapper.readTree(jsonStr);
+ }
+
+ /**
+ * Convert object to xml.
+ *
+ * @param object the object
+ * @return the string
+ * @throws JsonProcessingException the json processing exception
+ */
+ public static String convertObjectToXml(Object object) throws JsonProcessingException {
+ ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
+ String jsonOutput = ow.writeValueAsString(object);
+
+ if (jsonOutput == null) {
+ return null;
+ }
+
+ return JsonXmlConverter.convertJsontoXml(jsonOutput);
+
+ }
+
+ /**
+ * Extract objects by key.
+ *
+ * @param node the node
+ * @param searchKey the search key
+ * @param foundObjects the found objects
+ */
+ public static void extractObjectsByKey(JsonNode node, String searchKey,
+ Collection<JsonNode> foundObjects) {
+
+ if ( node == null ) {
+ return;
+ }
+
+ if (node.isObject()) {
+ Iterator<Map.Entry<String, JsonNode>> nodeIterator = node.fields();
+
+ while (nodeIterator.hasNext()) {
+ Map.Entry<String, JsonNode> entry = nodeIterator.next();
+ if (!entry.getValue().isValueNode()) {
+ extractObjectsByKey(entry.getValue(), searchKey, foundObjects);
+ }
+
+ String name = entry.getKey();
+ if (name.equalsIgnoreCase(searchKey)) {
+
+ JsonNode entryNode = entry.getValue();
+
+ if (entryNode.isArray()) {
+
+ Iterator<JsonNode> arrayItemsIterator = entryNode.elements();
+ while (arrayItemsIterator.hasNext()) {
+ foundObjects.add(arrayItemsIterator.next());
+ }
+
+ } else {
+ foundObjects.add(entry.getValue());
+ }
+
+
+ }
+ }
+ } else if (node.isArray()) {
+ Iterator<JsonNode> arrayItemsIterator = node.elements();
+ while (arrayItemsIterator.hasNext()) {
+ extractObjectsByKey(arrayItemsIterator.next(), searchKey, foundObjects);
+ }
+
+ }
+ }
+
+ public static String extractObjectValueByKey(JsonNode node, String searchKey) {
+
+ if (node == null) {
+ return null;
+ }
+
+ if (node.isObject()) {
+ Iterator<Map.Entry<String, JsonNode>> nodeIterator = node.fields();
+
+ while (nodeIterator.hasNext()) {
+ Map.Entry<String, JsonNode> entry = nodeIterator.next();
+ if (!entry.getValue().isValueNode()) {
+ return extractObjectValueByKey(entry.getValue(), searchKey);
+ }
+
+ String name = entry.getKey();
+ if (name.equalsIgnoreCase(searchKey)) {
+
+ JsonNode entryNode = entry.getValue();
+
+ if (entryNode.isArray()) {
+
+ Iterator<JsonNode> arrayItemsIterator = entryNode.elements();
+ while (arrayItemsIterator.hasNext()) {
+ return arrayItemsIterator.next().asText();
+ }
+
+ } else {
+ return entry.getValue().asText();
+ }
+
+
+ }
+ }
+ } else if (node.isArray()) {
+ Iterator<JsonNode> arrayItemsIterator = node.elements();
+ while (arrayItemsIterator.hasNext()) {
+ return extractObjectValueByKey(arrayItemsIterator.next(), searchKey);
+ }
+
+ }
+
+ return null;
+
+ }
+
+ /**
+ * Convert array into list.
+ *
+ * @param node the node
+ * @param instances the instances
+ */
+ public static void convertArrayIntoList(JsonNode node, Collection<JsonNode> instances) {
+
+ if (node.isArray()) {
+ Iterator<JsonNode> arrayItemsIterator = node.elements();
+ while (arrayItemsIterator.hasNext()) {
+ instances.add(arrayItemsIterator.next());
+ }
+
+ } else {
+ instances.add(node);
+ }
+
+ }
+
+ /**
+ * Extract field values from object.
+ *
+ * @param node the node
+ * @param attributesToExtract the attributes to extract
+ * @param fieldValues the field values
+ */
+ public static void extractFieldValuesFromObject(JsonNode node,
+ Collection<String> attributesToExtract, Collection<String> fieldValues) {
+
+ if (node == null) {
+ return;
+ }
+
+ if (node.isObject()) {
+
+ JsonNode valueNode = null;
+
+ for (String attrToExtract : attributesToExtract) {
+
+ valueNode = node.get(attrToExtract);
+
+ if (valueNode != null) {
+
+ if (valueNode.isValueNode()) {
+ fieldValues.add(valueNode.asText());
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Extract field value from object.
+ *
+ * @param node the node
+ * @param fieldName the field name
+ * @return the string
+ */
+ public static String extractFieldValueFromObject(JsonNode node, String fieldName) {
+
+ if (node == null) {
+ return null;
+ }
+
+ if (node.isObject()) {
+
+ JsonNode valueNode = node.get(fieldName);
+
+ if (valueNode != null) {
+
+ if (valueNode.isValueNode()) {
+ return valueNode.asText();
+ }
+ }
+
+ }
+ return null;
+
+ }
+
+ /**
+ * Format timestamp.
+ *
+ * @param timestamp the timestamp
+ * @return the string
+ */
+ public static String formatTimestamp(String timestamp) {
+ try {
+ SimpleDateFormat originalFormat = new SimpleDateFormat("yyyyMMdd'T'HHmmss'Z'");
+ originalFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ Date toDate = originalFormat.parse(timestamp);
+ SimpleDateFormat newFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
+ newFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ return newFormat.format(toDate);
+
+ } catch (ParseException pe) {
+ return timestamp;
+ }
+ }
+
+ /**
+ * Gets the HttpRequest payload.
+ *
+ * @param request the request
+ * @return the body
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ public static String getBody(HttpServletRequest request) throws IOException {
+ InputStream inputStream = request.getInputStream();
+ return getBodyFromStream(inputStream);
+ }
+
+
+
+ /**
+ * Gets the Restlet Request payload.
+ *
+ * @param request the request
+ * @return the body
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ public static String getBody(Request request) throws IOException {
+ InputStream inputStream = request.getEntity().getStream();
+ return getBodyFromStream(inputStream);
+ }
+
+
+ /**
+ * Gets the payload from the input stream of a request.
+ *
+ * @param request the request
+ * @return the body
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ public static String getBodyFromStream(InputStream inputStream) throws IOException {
+
+ String body = null;
+ StringBuilder stringBuilder = new StringBuilder();
+ BufferedReader bufferedReader = null;
+
+ try {
+ if (inputStream != null) {
+ bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
+ char[] charBuffer = new char[128];
+ int bytesRead = -1;
+ while ((bytesRead = bufferedReader.read(charBuffer)) > 0) {
+ stringBuilder.append(charBuffer, 0, bytesRead);
+ }
+ } else {
+ stringBuilder.append("");
+ }
+ } catch (IOException ex) {
+ throw ex;
+ } finally {
+ if (bufferedReader != null) {
+ try {
+ bufferedReader.close();
+ } catch (IOException ex) {
+ throw ex;
+ }
+ }
+ }
+
+ body = stringBuilder.toString();
+ return body;
+ }
+
+
+ /**
+ * The main method.
+ *
+ * @param args the arguments
+ * @throws ParseException the parse exception
+ */
+ public static void main(String[] args) throws ParseException {
+ String date = "20170110T112312Z";
+ SimpleDateFormat originalFormat = new SimpleDateFormat("yyyyMMdd'T'hhmmss'Z'");
+ Date toDate = originalFormat.parse(date);
+ SimpleDateFormat newFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss'Z'");
+ System.out.println(newFormat.format(toDate));
+
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RawByteHelper.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RawByteHelper.java
new file mode 100644
index 0000000..99166ca
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RawByteHelper.java
@@ -0,0 +1,176 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.util;
+
+/**
+ * The Class RawByteHelper.
+ */
+public class RawByteHelper {
+ private static final byte[] HEX_CHAR =
+ new byte[] {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
+
+ /**
+ * Dump bytes.
+ *
+ * @param buffer the buffer
+ * @return the string
+ */
+ /*
+ * TODO -> DOCUMENT ME!
+ *
+ * @param buffer DOCUMENT ME!
+ *
+ * @return DOCUMENT ME!
+ */
+ public static String dumpBytes(byte[] buffer) {
+ if (buffer == null) {
+ return "";
+ }
+ String newLine = System.getProperty("line.separator");
+ StringBuffer sb = new StringBuffer();
+
+ for (int i = 0; i < buffer.length; i++) {
+ if (i != 0 && i % 16 == 0) {
+ sb.append(newLine);
+ }
+ // sb.append("0x").append((char) (HEX_CHAR[(buffer[i] & 0x00F0) >> 4])).append((char)
+ // (HEX_CHAR[buffer[i] & 0x000F])).append(" ");
+ sb.append((char) (HEX_CHAR[(buffer[i] & 0x00F0) >> 4]))
+ .append((char) (HEX_CHAR[buffer[i] & 0x000F])).append(" ");
+ }
+
+ return sb.toString();
+ }
+
+ // if you're trying to figure out why or's w/ FF's see:
+ /**
+ * Bytes to int.
+ *
+ * @param one the one
+ * @param two the two
+ * @param three the three
+ * @param four the four
+ * @return the int
+ */
+ // http://www.darksleep.com/player/JavaAndUnsignedTypes.html
+ public static int bytesToInt(byte one, byte two, byte three, byte four) {
+ return (((0xFF & one) << 24) | ((0xFF & two) << 16) | ((0xFF & three) << 8) | ((0xFF & four)));
+ }
+
+ /**
+ * Bytes to short.
+ *
+ * @param one the one
+ * @param two the two
+ * @return the short
+ */
+ public static short bytesToShort(byte one, byte two) {
+ return (short) (((0xFF & one) << 8) | (0xFF & two));
+ }
+
+ /**
+ * First byte.
+ *
+ * @param num the num
+ * @return the byte
+ */
+ // short helper functions
+ static byte firstByte(short num) {
+ return (byte) ((num >> 8) & 0xFF);
+ }
+
+ /**
+ * First byte.
+ *
+ * @param num the num
+ * @return the byte
+ */
+ // Int helper functions
+ static byte firstByte(int num) {
+ return (byte) ((num >> 24) & 0xFF);
+ }
+
+ /**
+ * Second byte.
+ *
+ * @param num the num
+ * @return the byte
+ */
+ static byte secondByte(short num) {
+ return (byte) (num & 0xFF);
+ }
+
+ /**
+ * Second byte.
+ *
+ * @param num the num
+ * @return the byte
+ */
+ static byte secondByte(int num) {
+ return (byte) ((num >> 16) & 0xFF);
+ }
+
+ /**
+ * Third byte.
+ *
+ * @param num the num
+ * @return the byte
+ */
+ static byte thirdByte(int num) {
+ return (byte) ((num >> 8) & 0xFF);
+ }
+
+ /**
+ * Fourth byte.
+ *
+ * @param num the num
+ * @return the byte
+ */
+ static byte fourthByte(int num) {
+ return (byte) (num & 0xFF);
+ }
+
+ /**
+ * Int to byte.
+ *
+ * @param value the value
+ * @return the byte
+ */
+ public static byte intToByte(int value) {
+ return fourthByte(value);
+ }
+
+ /**
+ * Int to short.
+ *
+ * @param value the value
+ * @return the short
+ */
+ public static short intToShort(int value) {
+ return (short) ((value & 0xFF00) | (value & 0xFF));
+ }
+
+}
+
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RestletUtils.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RestletUtils.java
new file mode 100644
index 0000000..26dbf62
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RestletUtils.java
@@ -0,0 +1,119 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.util;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.search.SearchServiceAdapter;
+import org.restlet.Response;
+import org.restlet.data.MediaType;
+import org.restlet.data.Status;
+
+public class RestletUtils {
+ /**
+ * Returns an HttpServletResponse based on values from a Restlet Response
+ *
+ * @param restletResponse Restlet Response to be converted to an HttpServletResponse
+ * @return An HttpServletResponse object built from the values of a Restlet Response
+ */
+ public HttpServletResponse convertRestletResponseToHttpServletResponse(Response restletResponse) {
+ return org.restlet.ext.servlet.ServletUtils.getResponse(restletResponse);
+ }
+
+ /**
+ * Execute post query
+ *
+ * @param logger The logger
+ * @param search The searchAdapter
+ * @param response The response
+ * @param requestUrl The request URL
+ * @param requestJsonPayload The request JSON payload
+ * @return The operation result
+ */
+ public OperationResult executePostQuery(Logger logger, SearchServiceAdapter search,
+ Response response, String requestUrl, String requestJsonPayload) {
+
+ OperationResult opResult = search.doPost(requestUrl, requestJsonPayload, "application/json");
+
+ if (opResult.getResultCode() > 300) {
+ setRestletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult());
+ } else {
+ response.setStatus(new Status(opResult.getResultCode()));
+ }
+
+ return opResult;
+ }
+
+ /**
+ * Generate JSON error response
+ *
+ * @param message The error message
+ * @return The error message formatted as a JSON string
+ */
+ public String generateJsonErrorResponse(String message) {
+ return String.format("{ \"errorMessage\" : \"%s\" }", message);
+ }
+
+ /**
+ * Log Restlet exceptions/errors & prepare Response object with exception/errors info
+ *
+ * @param logger The logger
+ * @param errorMsg The error message
+ * @param exc The exception
+ * @param response The response
+ */
+ public void handleRestletErrors(Logger logger, String errorMsg, Exception exc,
+ Response response) {
+ String errorLogMsg = (exc == null ? errorMsg : errorMsg + ". Error:" + exc.getLocalizedMessage());
+ logger.error(AaiUiMsgs.ERROR_GENERIC, errorLogMsg);
+ response.setEntity(generateJsonErrorResponse(errorMsg), MediaType.APPLICATION_JSON);
+ }
+
+ /**
+ * Sets the Restlet response
+ *
+ * @param logger The logger
+ * @param isError The error
+ * @param responseCode The response code
+ * @param response The response
+ * @param postPayload The post payload
+ */
+ public void setRestletResponse(Logger logger, boolean isError, int responseCode,
+ Response response, String postPayload) {
+
+ if (isError) {
+ logger.error(AaiUiMsgs.ERROR_GENERIC, postPayload);
+ }
+
+ response.setStatus(new Status(responseCode));
+
+ if (postPayload != null) {
+ response.setEntity(postPayload, MediaType.APPLICATION_JSON);
+ }
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java
new file mode 100644
index 0000000..05f6996
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java
@@ -0,0 +1,100 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.util;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class SuggestionsPermutation {
+
+ /*
+ * Will return all the unique combinations of the suggestions provided. The order of the
+ * permutation is not taken into account when computing the uniqueness. eg: A list of A,B,C,D will
+ * return [[A], [A, B, C, D], [A, C, D], [A, D], [B], [B, C, D], [B, D], [C], [C, D], [D]]
+ *
+ * @param list The list to create the unique permutations
+ *
+ * @return A Arraylist which contains a array list of all possible combinations
+ */
+ public static ArrayList<ArrayList<String>> getUniqueListForSuggestions(
+ List<String> originalList) {
+ ArrayList<ArrayList<String>> lists = new ArrayList<ArrayList<String>>();
+ if (originalList.isEmpty()) {
+ lists.add(new ArrayList<String>());
+ return lists;
+ }
+ List<String> list = new ArrayList<String>(originalList);
+ String head = list.get(0);
+ ArrayList<String> rest = new ArrayList<String>(list.subList(1, list.size()));
+
+ for (ArrayList<String> activeList : getUniqueListForSuggestions(rest)) {
+ ArrayList<String> newList = new ArrayList<String>();
+ newList.add(head);
+ newList.addAll(activeList);
+ lists.add(newList);
+ lists.add(activeList);
+ }
+ return lists;
+ }
+
+ public static ArrayList<ArrayList<String>> getNonEmptyUniqueLists(List<String> list){
+ ArrayList<ArrayList<String>> lists = getUniqueListForSuggestions(list);
+ // remove empty list from the power set
+ for (ArrayList<String> emptyList : lists ){
+ if ( emptyList.isEmpty() ) {
+ lists.remove(emptyList);
+ break;
+ }
+ }
+ return lists;
+ }
+
+ public static List<List<String>> getListPermutations(List<String> list) {
+ List<String> inputList = new ArrayList<String>();
+ inputList.addAll(list);
+ if (inputList.size() == 0) {
+ List<List<String>> result = new ArrayList<List<String>>();
+ result.add(new ArrayList<String>());
+ return result;
+ }
+
+ List<List<String>> listOfLists = new ArrayList<List<String>>();
+
+ String firstElement = inputList.remove(0);
+
+ List<List<String>> recursiveReturn = getListPermutations(inputList);
+ for (List<String> li : recursiveReturn) {
+
+ for (int index = 0; index <= li.size(); index++) {
+ List<String> temp = new ArrayList<String>(li);
+ temp.add(index, firstElement);
+ listOfLists.add(temp);
+ }
+
+ }
+ return listOfLists;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/TreeWalker.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/TreeWalker.java
new file mode 100644
index 0000000..d8bb7b9
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/TreeWalker.java
@@ -0,0 +1,136 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.util;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
+
+/**
+ * The Class TreeWalker.
+ */
+public class TreeWalker {
+
+ /**
+ * Convert json to node.
+ *
+ * @param json the json
+ * @return the json node
+ * @throws JsonProcessingException the json processing exception
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ public JsonNode convertJsonToNode(String json) throws JsonProcessingException, IOException {
+ ObjectMapper mapper = new ObjectMapper();
+
+ if (json == null) {
+ return null;
+ }
+
+ return mapper.readTree(json);
+
+ }
+
+ /**
+ * Walk tree.
+ *
+ * @param paths the paths
+ * @param root the root
+ */
+ public void walkTree(List<String> paths, JsonNode root) {
+ walker(paths, null, root);
+ }
+
+ /**
+ * Walker.
+ *
+ * @param paths the paths
+ * @param nodename the nodename
+ * @param node the node
+ */
+ private void walker(List<String> paths, String nodename, JsonNode node) {
+
+ if (node == null) {
+ return;
+ }
+
+ /*
+ * if ( nodename != null ) { paths.add(nodename); }
+ */
+
+ // System.out.println("path: " + nameToPrint);
+ if (node.isObject()) {
+ Iterator<Map.Entry<String, JsonNode>> iterator = node.fields();
+
+ ArrayList<Map.Entry<String, JsonNode>> nodesList = Lists.newArrayList(iterator);
+ // System.out.println("Walk Tree - root:" + node + ", elements
+ // keys:" + nodesList);
+
+ if (nodesList.isEmpty()) {
+
+ if (nodename != null) {
+ paths.add(nodename);
+ }
+
+ } else {
+
+ for (Map.Entry<String, JsonNode> nodEntry : nodesList) {
+ String name = nodEntry.getKey();
+ JsonNode newNode = nodEntry.getValue();
+
+ if (newNode.isValueNode()) {
+ if (nodename == null) {
+ paths.add(name + "=" + newNode.asText());
+ } else {
+ paths.add(nodename + "." + name + "=" + newNode.asText());
+ }
+ } else {
+
+ if (nodename == null) {
+ walker(paths, name, newNode);
+ } else {
+ walker(paths, nodename + "." + name, newNode);
+ }
+ }
+
+ }
+ }
+ } else if (node.isArray()) {
+ Iterator<JsonNode> arrayItemsIterator = node.elements();
+ ArrayList<JsonNode> arrayItemsList = Lists.newArrayList(arrayItemsIterator);
+ for (JsonNode arrayNode : arrayItemsList) {
+ walker(paths, nodename, arrayNode);
+ }
+ } else if (node.isValueNode()) {
+ paths.add(nodename + "=" + node.asText());
+ }
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java
new file mode 100644
index 0000000..c6d4666
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java
@@ -0,0 +1,61 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect;
+
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+
+
+
+
+/**
+ * The Class EntityTypeAggregation.
+ */
+public class EntityTypeAggregation {
+
+ private ConcurrentHashMap<String, AtomicInteger> counters;
+
+ /**
+ * Instantiates a new entity type aggregation.
+ */
+ public EntityTypeAggregation() {
+ counters = new ConcurrentHashMap<String, AtomicInteger>();
+ }
+
+ /**
+ * Peg counter.
+ *
+ * @param counterName the counter name
+ */
+ public void pegCounter(String counterName) {
+ counters.putIfAbsent(counterName, new AtomicInteger(0));
+ counters.get(counterName).incrementAndGet();
+ }
+
+ public ConcurrentHashMap<String, AtomicInteger> getCounters() {
+ return counters;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java
new file mode 100644
index 0000000..0133c9d
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java
@@ -0,0 +1,99 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect;
+
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.camel.Exchange;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.logging.util.ServletUtils;
+import org.onap.aai.sparky.viewandinspect.entity.QueryRequest;
+import org.onap.aai.sparky.viewandinspect.services.VisualizationService;
+import org.restlet.data.Status;
+
+public class SchemaVisualizationProcessor {
+
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(SchemaVisualizationProcessor.class);
+
+ private VisualizationService visualizationService;
+
+ public SchemaVisualizationProcessor()throws Exception{}
+
+ protected String generateJsonErrorResponse(String message) {
+ return String.format("{ \"errorMessage\" : %s }", message);
+ }
+
+ public void setVisualizationService(VisualizationService visualizationService){
+ this.visualizationService = visualizationService;
+ }
+ public VisualizationService getVisualizationService(){
+ return visualizationService;
+ }
+
+ public void processVisualizationRequest(Exchange exchange) {
+
+ HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class);
+ ServletUtils.setUpMdcContext(exchange, request);
+
+ QueryRequest hashId = null;
+ OperationResult operationResult = null;
+
+ String visualizationPayload = exchange.getIn().getBody(String.class);
+ hashId = this.getVisualizationService().analyzeQueryRequestBody(visualizationPayload);
+
+ if (hashId != null) {
+
+ operationResult = this.getVisualizationService().buildVisualizationUsingGenericQuery(hashId);
+
+ if (operationResult.getResultCode() != Status.SUCCESS_OK.getCode()) {
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, operationResult.getResultCode());
+ LOG.error(AaiUiMsgs.FAILURE_TO_PROCESS_REQUEST, String
+ .format("Failed to process Visualization Schema Payload = '%s'", visualizationPayload));
+ return;
+ }
+
+ } else {
+ operationResult = new OperationResult();
+ operationResult.setResult(String
+ .format("Failed to analyze Visualization Schema Payload = '%s'", visualizationPayload));
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, operationResult.getResultCode());
+ LOG.error(AaiUiMsgs.FAILED_TO_ANALYZE, String
+ .format("Failed to analyze Visualization Schema Payload = '%s'", visualizationPayload));
+ return;
+
+ }
+
+ exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, operationResult.getResultCode());
+ exchange.getOut().setBody(operationResult.getResult());
+
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/SparkyConstants.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/SparkyConstants.java
new file mode 100644
index 0000000..5624b20
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/SparkyConstants.java
@@ -0,0 +1,102 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.config;
+
+/**
+ * The Class TierSupportUiConstants.
+ */
+public class SparkyConstants {
+
+ public static String APP_NAME = "AAIUI";
+
+ /** Default to unix file separator if system property file.separator is null */
+ public static final String FILESEP =
+ (System.getProperty("file.separator") == null) ? "/" : System.getProperty("file.separator");
+
+ public static String CONFIG_HOME = System.getProperty("CONFIG_HOME") + FILESEP;
+ public static String DYNAMIC_CONFIG_APP_LOCATION = CONFIG_HOME;
+
+ public static String CONFIG_OXM_LOCATION = CONFIG_HOME + "model" + FILESEP;
+ public static String CONFIG_FILTERS_BASE_LOCATION = CONFIG_HOME + FILESEP;
+ //public static String CONFIG_AUTH_LOCATION = CONFIG_HOME + "auth" + FILESEP;
+
+ public static String HOST = "host";
+ public static String IP_ADDRESS = "ipAddress";
+ public static String PORT = "port";
+ public static String HTTP_PORT = "httpPort";
+ public static String RETRIES = "numRequestRetries";
+ public static String RESOURCE_VERSION = "resource-version";
+ public static String URI = "URI";
+
+ public static String AUTHORIZED_USERS_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "authorized-users.config";
+ public static String USERS_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "users.config";
+ public static String ROLES_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "roles.config";
+ public static String PORTAL_AUTHENTICATION_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "portal" + FILESEP + "portal-authentication.properties";
+
+ // Related to data-router properties
+ public static String DR_URI_SUFFIX = "uriSuffix";
+ public static String DR_CERT_NAME = "cert-name";
+ public static String DR_KEYSTORE_PASSWORD = "keystore-password";
+ public static String DR_KEYSTORE = "keystore";
+ public static String DR_CONNECT_TIMEOUT = "connectTimeoutMs";
+ public static String DR_READ_TIMEOUT = "readTimeoutMs";
+
+ public static final String APP_JSON = "application/json";
+
+ public static final String ES_SUGGEST_API = "_suggest";
+ public static final String ES_COUNT_API = "_count";
+ public static final String ES_SEARCH_API = "_search";
+
+ public static final String UI_FILTER_VIEW_NAME_PARAMETER = "viewName";
+ public static final String UI_FILTER_ID_LIST_PARAMETER = "filterIdList";
+
+ public static final String ENTITY_AUTO_SUGGEST_INDEX_NAME_DEFAULT =
+ "entityautosuggestindex-localhost";
+ public static final String ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT =
+ "/etc/autoSuggestSettings.json";
+ public static final String ENTITY_AUTO_SUGGEST_MAPPINGS_FILE_DEFAULT =
+ "/etc/autoSuggestMappings.json";
+ public static final String ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT =
+ "/etc/dynamicMappings.json";
+ /*public static final String FILTER_LIST_FILE_DEFAULT =
+ CONFIG_FILTERS_BASE_LOCATION + "filters" + FILESEP + "aaiui_filters.json";
+ public static final String FILTER_MAPPING_FILE_DEFAULT =
+ CONFIG_FILTERS_BASE_LOCATION + "filters" + FILESEP + "aaiui_views.json";*/
+
+ public static final String SUBSCRIPTION_OI_MAPPING =
+ CONFIG_FILTERS_BASE_LOCATION + "subscription_object_inspector_mapping.json";
+
+ public static final String SUGGESTION_TEXT_SEPARATOR = " -- ";
+
+ // Injected Attributes
+ public static String URI_ATTR_NAME = "uri";
+
+ public static final String URI_VERSION_REGEX_PATTERN = "aai/v[\\d]+/";
+
+ public static final String getConfigPath(String configFile){
+ return CONFIG_HOME + FILESEP + configFile;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java
new file mode 100644
index 0000000..e0cc9c6
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java
@@ -0,0 +1,102 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.config;
+
+/**
+ * The Class TierSupportUiConstants.
+ */
+public class TierSupportUiConstants {
+
+ public static String APP_NAME = "AAIUI";
+
+ /** Default to unix file separator if system property file.separator is null */
+ public static final String FILESEP =
+ (System.getProperty("file.separator") == null) ? "/" : System.getProperty("file.separator");
+
+ public static String CONFIG_HOME = System.getProperty("CONFIG_HOME") + FILESEP;
+ public static String AJSC_HOME = System.getProperty("AJSC_HOME") + FILESEP;
+ public static String CONFIG_ROOT_LOCATION =
+ AJSC_HOME + "bundleconfig" + FILESEP + "etc" + FILESEP;
+ public static String STATIC_CONFIG_APP_LOCATION = CONFIG_ROOT_LOCATION + "appprops" + FILESEP;
+ public static String DYNAMIC_CONFIG_APP_LOCATION = CONFIG_HOME;
+
+ public static String CONFIG_OXM_LOCATION = CONFIG_HOME + "model" + FILESEP;
+ public static String CONFIG_FILTERS_BASE_LOCATION = CONFIG_HOME + FILESEP;
+ public static String CONFIG_AUTH_LOCATION = CONFIG_HOME + "auth" + FILESEP;
+
+ public static String HOST = "host";
+ public static String IP_ADDRESS = "ipAddress";
+ public static String PORT = "port";
+ public static String HTTP_PORT = "httpPort";
+ public static String RETRIES = "numRequestRetries";
+ public static String RESOURCE_VERSION = "resource-version";
+ public static String URI = "URI";
+
+ public static String AUTHORIZED_USERS_FILE_LOCATION =
+ DYNAMIC_CONFIG_APP_LOCATION + "authorized-users.config";
+ public static String USERS_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "users.config";
+ public static String ROLES_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "roles.config";
+ public static String PORTAL_AUTHENTICATION_FILE_LOCATION =
+ DYNAMIC_CONFIG_APP_LOCATION + "portal" + FILESEP + "portal-authentication.properties";
+
+ // Related to data-router properties
+ public static String DR_URI_SUFFIX = "uriSuffix";
+ public static String DR_CERT_NAME = "cert-name";
+ public static String DR_KEYSTORE_PASSWORD = "keystore-password";
+ public static String DR_KEYSTORE = "keystore";
+ public static String DR_CONNECT_TIMEOUT = "connectTimeoutMs";
+ public static String DR_READ_TIMEOUT = "readTimeoutMs";
+
+ public static final String ES_SUGGEST_API = "_suggest";
+ public static final String ES_COUNT_API = "_count";
+ public static final String ES_SEARCH_API = "_search";
+
+ public static final String UI_FILTER_VIEW_NAME_PARAMETER = "viewName";
+ public static final String UI_FILTER_ID_LIST_PARAMETER = "filterIdList";
+
+ public static final String ENTITY_AUTO_SUGGEST_INDEX_NAME_DEFAULT =
+ "entityautosuggestindex-localhost";
+ public static final String ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT =
+ "/etc/autoSuggestSettings.json";
+ public static final String ENTITY_AUTO_SUGGEST_MAPPINGS_FILE_DEFAULT =
+ "/etc/autoSuggestMappings.json";
+ public static final String ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT = "/etc/dynamicMappings.json";
+ public static final String FILTER_LIST_FILE_DEFAULT =
+ CONFIG_FILTERS_BASE_LOCATION + "filters" + FILESEP + "aaiui_filters.json";
+ public static final String FILTER_MAPPING_FILE_DEFAULT =
+ CONFIG_FILTERS_BASE_LOCATION + "filters" + FILESEP + "aaiui_views.json";
+
+ public static final String SUGGESTION_TEXT_SEPARATOR = " -- ";
+
+ // Injected Attributes
+ public static String URI_ATTR_NAME = "uri";
+
+ public static final String URI_VERSION_REGEX_PATTERN = "aai/v[\\d]+/";
+
+ public static final String getConfigPath(String configFile) {
+ return AJSC_HOME + FILESEP + configFile;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java
new file mode 100644
index 0000000..169dbc6
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java
@@ -0,0 +1,174 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.config;
+
+import java.util.ArrayList;
+
+import org.onap.aai.sparky.config.SparkyResourceLoader;
+
+/**
+ * The Class VisualizationConfig.
+ */
+public class VisualizationConfigs {
+
+ private int maxSelfLinkTraversalDepth;
+
+ private boolean visualizationDebugEnabled;
+
+ private String aaiEntityNodeDescriptors;
+
+ private String generalNodeClassName;
+
+ private String searchNodeClassName;
+
+ private String selectedSearchedNodeClassName;
+
+ private int numOfThreadsToFetchNodeIntegrity;
+
+ private boolean makeAllNeighborsBidirectional;
+
+ private ArrayList<String> shallowEntities;
+
+ private boolean gizmoEnabled;
+
+ private SparkyResourceLoader resourceLoader;
+
+ /**
+ * Instantiates a new visualization config.
+ */
+ public VisualizationConfigs() {
+ this.gizmoEnabled = false;
+ }
+
+ public ArrayList<String> getShallowEntities() {
+ return shallowEntities;
+ }
+
+ public void setShallowEntities(ArrayList<String> shallowEntities) {
+ this.shallowEntities = shallowEntities;
+ }
+
+ /**
+ * Make all neighbors bidirectional.
+ *
+ * @return true, if successful
+ */
+ public boolean makeAllNeighborsBidirectional() {
+ return makeAllNeighborsBidirectional;
+ }
+
+ public void setMakeAllNeighborsBidirectional(boolean makeAllNeighborsBidirectional) {
+ this.makeAllNeighborsBidirectional = makeAllNeighborsBidirectional;
+ }
+
+ public String getSelectedSearchedNodeClassName() {
+ return selectedSearchedNodeClassName;
+ }
+
+ public void setSelectedSearchedNodeClassName(String selectedSearchedNodeClassName) {
+ this.selectedSearchedNodeClassName = selectedSearchedNodeClassName;
+ }
+
+ public String getGeneralNodeClassName() {
+ return generalNodeClassName;
+ }
+
+ public void setGeneralNodeClassName(String generalNodeClassName) {
+ this.generalNodeClassName = generalNodeClassName;
+ }
+
+ public String getSearchNodeClassName() {
+ return searchNodeClassName;
+ }
+
+ public void setSearchNodeClassName(String searchNodeClassName) {
+ this.searchNodeClassName = searchNodeClassName;
+ }
+
+ public String getAaiEntityNodeDescriptors() {
+ return aaiEntityNodeDescriptors;
+ }
+
+ public void setAaiEntityNodeDescriptors(String aaiEntityNodeDescriptors) {
+ this.aaiEntityNodeDescriptors = aaiEntityNodeDescriptors;
+ }
+
+ public boolean isVisualizationDebugEnabled() {
+ return visualizationDebugEnabled;
+ }
+
+ public void setVisualizationDebugEnabled(boolean visualizationDebugEnabled) {
+ this.visualizationDebugEnabled = visualizationDebugEnabled;
+ }
+
+ public void setMaxSelfLinkTraversalDepth(int maxSelfLinkTraversalDepth) {
+ this.maxSelfLinkTraversalDepth = maxSelfLinkTraversalDepth;
+ }
+
+ public int getMaxSelfLinkTraversalDepth() {
+ return maxSelfLinkTraversalDepth;
+ }
+
+ public int getNumOfThreadsToFetchNodeIntegrity() {
+ return numOfThreadsToFetchNodeIntegrity;
+ }
+
+ public void setNumOfThreadsToFetchNodeIntegrity(int numOfThreadsToFetchNodeIntegrity) {
+ this.numOfThreadsToFetchNodeIntegrity = numOfThreadsToFetchNodeIntegrity;
+ }
+
+ public boolean isGizmoEnabled() {
+ return gizmoEnabled;
+ }
+
+ public void setGizmoEnabled(boolean gizmoEnabled) {
+ this.gizmoEnabled = gizmoEnabled;
+ }
+
+ public SparkyResourceLoader getResourceLoader() {
+ return resourceLoader;
+ }
+
+ public void setResourceLoader(SparkyResourceLoader resourceLoader) {
+ this.resourceLoader = resourceLoader;
+ }
+
+ @Override
+ public String toString() {
+ return "VisualizationConfigs [maxSelfLinkTraversalDepth=" + maxSelfLinkTraversalDepth
+ + ", visualizationDebugEnabled=" + visualizationDebugEnabled + ", "
+ + (aaiEntityNodeDescriptors != null ? "aaiEntityNodeDescriptors=" + aaiEntityNodeDescriptors + ", "
+ : "")
+ + (generalNodeClassName != null ? "generalNodeClassName=" + generalNodeClassName + ", " : "")
+ + (searchNodeClassName != null ? "searchNodeClassName=" + searchNodeClassName + ", " : "")
+ + (selectedSearchedNodeClassName != null
+ ? "selectedSearchedNodeClassName=" + selectedSearchedNodeClassName + ", " : "")
+ + "numOfThreadsToFetchNodeIntegrity=" + numOfThreadsToFetchNodeIntegrity
+ + ", makeAllNeighborsBidirectional=" + makeAllNeighborsBidirectional + ", "
+ + (shallowEntities != null ? "shallowEntities=" + shallowEntities + ", " : "") + "gizmoEnabled="
+ + gizmoEnabled + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java
new file mode 100644
index 0000000..3981626
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java
@@ -0,0 +1,831 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ConcurrentLinkedDeque;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.config.oxm.OxmModelLoader;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs;
+import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingAction;
+import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+/**
+ * The Class ActiveInventoryNode.
+ */
+public class ActiveInventoryNode {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(
+ ActiveInventoryNode.class);
+ private static final String URIRegexPattern = "aai/v[\\d]/";
+
+ public static final int DEFAULT_INIT_NODE_DEPTH = 1000;
+
+ private String nodeId;
+ private String selfLink;
+
+ private boolean isRootNode;
+ private ConcurrentLinkedDeque<String> inboundNeighbors;
+ private ConcurrentLinkedDeque<String> outboundNeighbors;
+
+ private ConcurrentLinkedDeque<String> inboundNeighborSelfLinks;
+ private ConcurrentLinkedDeque<String> outboundNeighborSelfLinks;
+
+ private List<JsonNode> complexGroups;
+ private List<RelationshipList> relationshipLists;
+ private int nodeDepth;
+ private OperationResult opResult;
+
+ private boolean processingErrorOccurred;
+ private List<String> errorCauses;
+ private boolean selflinkRetrievalFailure;
+ private NodeProcessingState state;
+
+ private boolean processedNeighbors;
+
+ private boolean selfLinkPendingResolve;
+
+ /*
+ * I think we shouldn't be using this crutch flags. If these things are meant
+ * to represent the current state of the node, then they should be legitimate
+ * state transitions.
+ */
+
+ private boolean selfLinkDeterminationPending;
+
+ private AtomicBoolean selfLinkProcessed;
+ private AtomicBoolean nodeIntegrityProcessed;
+
+ private OxmModelLoader oxmModelLoader;
+ private VisualizationConfigs visualizationConfigs;
+
+ private String entityType;
+ private String primaryKeyName;
+ private String primaryKeyValue;
+
+ private boolean nodeValidated;
+ private boolean nodeIssue;
+ private boolean ignoredByFilter;
+
+ private boolean resolvedSelfLink;
+
+ private Map<String, String> properties;
+ private ArrayList<String> queryParams;
+
+ private ObjectMapper mapper;
+
+ private OxmEntityLookup oxmEntityLookup;
+
+ /**
+ * Instantiates a new active inventory node.
+ *
+ * @param key the key
+ */
+ public ActiveInventoryNode(VisualizationConfigs visualizationConfigs, OxmEntityLookup oxmEntityLookup) {
+ this.oxmEntityLookup = oxmEntityLookup;
+ this.nodeId = null;
+ this.entityType = null;
+ this.selfLink = null;
+ this.properties = new HashMap<String, String>();
+ this.processingErrorOccurred = false;
+ this.errorCauses = new ArrayList<String>();
+ this.selflinkRetrievalFailure = false;
+ this.nodeIssue = false;
+ this.nodeValidated = false;
+ this.state = NodeProcessingState.INIT;
+ this.selfLinkPendingResolve = false;
+ this.selfLinkDeterminationPending = false;
+
+ selfLinkProcessed = new AtomicBoolean(Boolean.FALSE);
+ nodeIntegrityProcessed = new AtomicBoolean(Boolean.FALSE);
+ oxmModelLoader = null;
+ this.visualizationConfigs = visualizationConfigs ;
+
+ isRootNode = false;
+ inboundNeighbors = new ConcurrentLinkedDeque<String>();
+ outboundNeighbors = new ConcurrentLinkedDeque<String>();
+
+ inboundNeighborSelfLinks = new ConcurrentLinkedDeque<String>();
+ outboundNeighborSelfLinks = new ConcurrentLinkedDeque<String>();
+
+ complexGroups = new ArrayList<JsonNode>();
+ relationshipLists = new ArrayList<RelationshipList>();
+ nodeDepth = DEFAULT_INIT_NODE_DEPTH;
+ queryParams = new ArrayList<String>();
+
+ mapper = new ObjectMapper();
+
+ processedNeighbors = false;
+ resolvedSelfLink = false;
+
+
+ }
+
+ public void clearQueryParams() {
+ queryParams.clear();
+ }
+
+ public void addQueryParam(String queryParam) {
+ if ( queryParam!= null) {
+ if( !queryParams.contains(queryParam)) {
+ queryParams.add(queryParam);
+ }
+ }
+ }
+
+ public void addInboundSelfLink(String link) {
+
+ if (link == null) {
+ return;
+ }
+
+ if (!inboundNeighborSelfLinks.contains(link)) {
+ inboundNeighborSelfLinks.add(link);
+ }
+
+ }
+
+ public void addOutboundSelfLink(String link) {
+
+ if (link == null) {
+ return;
+ }
+
+ if (!outboundNeighborSelfLinks.contains(link)) {
+ outboundNeighborSelfLinks.add(link);
+ }
+
+ }
+
+ public Collection<String> getInboundNeighborSelfLinks() {
+ return inboundNeighborSelfLinks;
+ }
+
+ public Collection<String> getOutboundNeighborSelfLinks() {
+ return outboundNeighborSelfLinks;
+ }
+
+ public void addQueryParams(Collection<String> params) {
+
+ if (params != null & params.size() > 0) {
+
+ for (String param : params) {
+ addQueryParam(param);
+ }
+ }
+ }
+
+
+ public List<String> getQueryParams() {
+ return queryParams;
+ }
+
+ public void setSelfLinkDeterminationPending(boolean selfLinkDeterminationPending) {
+ this.selfLinkDeterminationPending = selfLinkDeterminationPending;
+ }
+
+ public boolean isSelfLinkDeterminationPending() {
+ return selfLinkDeterminationPending;
+ }
+
+ public NodeProcessingState getState() {
+ return state;
+ }
+
+ public List<JsonNode> getComplexGroups() {
+ return complexGroups;
+ }
+
+ public List<RelationshipList> getRelationshipLists() {
+ return relationshipLists;
+ }
+
+ public OperationResult getOpResult() {
+ return opResult;
+ }
+
+ public void setOpResult(OperationResult opResult) {
+ this.opResult = opResult;
+ }
+
+ public String getPrimaryKeyName() {
+ return primaryKeyName;
+ }
+
+ /**
+ * Gets the visualization config.
+ *
+ * @return the visualization config
+ */
+ public VisualizationConfigs getvisualizationConfigs() {
+ return visualizationConfigs;
+ }
+
+ public int getNodeDepth() {
+ return nodeDepth;
+ }
+
+ public void setNodeDepth(int nodeDepth) {
+ this.nodeDepth = nodeDepth;
+ }
+
+ /**
+ * Sets the visualization config.
+ *
+ * @param visualizationConfig the new visualization config
+ */
+ public void setvisualizationConfig(VisualizationConfigs visualizationConfigs) {
+ this.visualizationConfigs = visualizationConfigs;
+ }
+
+ public OxmModelLoader getOxmModelLoader() {
+ return oxmModelLoader;
+ }
+
+ public void setPrimaryKeyName(String primaryKeyName) {
+ this.primaryKeyName = primaryKeyName;
+ }
+
+ public String getPrimaryKeyValue() {
+ return primaryKeyValue;
+ }
+
+ public void setPrimaryKeyValue(String primaryKeyValue) {
+ this.primaryKeyValue = primaryKeyValue;
+ }
+
+ public boolean isNodeValidated() {
+ return nodeValidated;
+ }
+
+ public void setNodeValidated(boolean nodeValidated) {
+ this.nodeValidated = nodeValidated;
+ }
+
+ public boolean isNodeIssue() {
+ return nodeIssue;
+ }
+
+ public boolean isIgnoredByFilter() {
+ return ignoredByFilter;
+ }
+
+ public void setIgnoredByFilter(boolean ignoredByFilter) {
+ this.ignoredByFilter = ignoredByFilter;
+ }
+
+ public void setNodeIssue(boolean nodeIssue) {
+ this.nodeIssue = nodeIssue;
+ }
+
+ /**
+ * Checks for processed neighbors.
+ *
+ * @return true, if successful
+ */
+ public boolean hasProcessedNeighbors() {
+ return processedNeighbors;
+ }
+
+ public void setProcessedNeighbors(boolean processedNeighbors) {
+ this.processedNeighbors = processedNeighbors;
+ }
+
+ /**
+ * Checks for resolved self link.
+ *
+ * @return true, if successful
+ */
+ public boolean hasResolvedSelfLink() {
+ return resolvedSelfLink;
+ }
+
+ public void setResolvedSelfLink(boolean resolvedSelfLink) {
+ this.resolvedSelfLink = resolvedSelfLink;
+ }
+
+ /**
+ * Checks for neighbors.
+ *
+ * @return true, if successful
+ */
+ public boolean hasNeighbors() {
+ return (inboundNeighbors.size() > 0 || outboundNeighbors.size() > 0);
+ }
+
+ /**
+ * Adds the inbound neighbor.
+ *
+ * @param nodeId the node id
+ */
+ public void addInboundNeighbor(String nodeId) {
+
+ if (nodeId == null) {
+ return;
+ }
+
+ if (!inboundNeighbors.contains(nodeId)) {
+ inboundNeighbors.add(nodeId);
+ }
+
+ }
+
+ /**
+ * Adds the outbound neighbor.
+ *
+ * @param nodeId the node id
+ */
+ public void addOutboundNeighbor(String nodeId) {
+
+ if (nodeId == null) {
+ return;
+ }
+
+ if (!outboundNeighbors.contains(nodeId)) {
+ outboundNeighbors.add(nodeId);
+ }
+
+ }
+
+ public boolean isAtMaxDepth() {
+ return (nodeDepth >= this.visualizationConfigs.getMaxSelfLinkTraversalDepth());
+ }
+
+ public ConcurrentLinkedDeque<String> getInboundNeighbors() {
+ return inboundNeighbors;
+ }
+
+ public void setInboundNeighbors(ConcurrentLinkedDeque<String> inboundNeighbors) {
+ this.inboundNeighbors = inboundNeighbors;
+ }
+
+ public Collection<String> getOutboundNeighbors() {
+ List<String> result = new ArrayList<String>();
+
+ Iterator<String> neighborIterator = outboundNeighbors.iterator();
+
+ while (neighborIterator.hasNext()) {
+ result.add(neighborIterator.next());
+ }
+
+ return result;
+ }
+
+ /**
+ * Change depth.
+ *
+ * @param newDepth the new depth
+ * @return true, if successful
+ */
+ public boolean changeDepth(int newDepth) {
+
+ boolean nodeDepthWasChanged = false;
+
+ if (newDepth < nodeDepth) {
+ LOG.info(AaiUiMsgs.ACTIVE_INV_NODE_CHANGE_DEPTH, nodeId,
+ String.valueOf(this.nodeDepth), String.valueOf(newDepth));
+ this.nodeDepth = newDepth;
+ nodeDepthWasChanged = true;
+ }
+
+ return nodeDepthWasChanged;
+
+ }
+
+ public void setOutboundNeighbors(ConcurrentLinkedDeque<String> outboundNeighbors) {
+ this.outboundNeighbors = outboundNeighbors;
+ }
+
+ public boolean isRootNode() {
+ return isRootNode;
+ }
+
+ public void setRootNode(boolean isRootNode) {
+ this.isRootNode = isRootNode;
+ }
+
+ /**
+ * Change state.
+ *
+ * @param newState the new state
+ * @param action the action
+ */
+ public void changeState(NodeProcessingState newState, NodeProcessingAction action) {
+ /*
+ * NodeId may be null depending on the current node life-cycle state
+ */
+
+ if (getNodeId() != null) {
+ LOG.info(AaiUiMsgs.ACTIVE_INV_NODE_CHANGE_STATE, state.toString(), newState.toString(), action.toString());
+ } else {
+ LOG.info(AaiUiMsgs.ACTIVE_INV_NODE_CHANGE_STATE_NO_NODE_ID, state.toString(), newState.toString(), action.toString());
+ }
+ this.state = newState;
+ }
+
+ public boolean isSelfLinkPendingResolve() {
+ return selfLinkPendingResolve;
+ }
+
+ public void setSelfLinkPendingResolve(boolean selfLinkPendingResolve) {
+ this.selfLinkPendingResolve = selfLinkPendingResolve;
+ }
+
+ public boolean isSelflinkRetrievalFailure() {
+ return selflinkRetrievalFailure;
+ }
+
+ public void setSelflinkRetrievalFailure(boolean selflinkRetrievalFailure) {
+ this.selflinkRetrievalFailure = selflinkRetrievalFailure;
+ }
+
+ public void setOxmModelLoader(OxmModelLoader loader) {
+ this.oxmModelLoader = loader;
+ }
+
+ public boolean getSelfLinkProcessed() {
+ return selfLinkProcessed.get();
+ }
+
+ public void setSelfLinkProcessed(boolean selfLinkProcessed) {
+ this.selfLinkProcessed.set(selfLinkProcessed);
+ }
+
+ public boolean getNodeIntegrityProcessed() {
+ return nodeIntegrityProcessed.get();
+ }
+
+ public void setNodeIntegrityProcessed(boolean nodeIntegrityProcessed) {
+ this.nodeIntegrityProcessed.set(nodeIntegrityProcessed);
+ }
+
+ public boolean isDirectSelfLink() {
+ return isDirectSelfLink(this.selfLink);
+ }
+
+ /**
+ * Checks if is direct self link.
+ *
+ * @param link the link
+ * @return true, if is direct self link
+ */
+ public static boolean isDirectSelfLink(String link) {
+
+ if (link == null) {
+ return false;
+ }
+
+ return link.contains("/resources/id/");
+
+ }
+
+ public Map<String, String> getProperties() {
+ return properties;
+ }
+
+ /**
+ * Adds the error cause.
+ *
+ * @param error the error
+ */
+ public void addErrorCause(String error) {
+ if (!errorCauses.contains(error)) {
+ errorCauses.add(error);
+ }
+ }
+
+ /**
+ * Adds the property.
+ *
+ * @param key the key
+ * @param value the value
+ */
+ public void addProperty(String key, String value) {
+ properties.put(key, value);
+ }
+
+ public boolean isProcessingErrorOccurred() {
+ return processingErrorOccurred;
+ }
+
+ public void setProcessingErrorOccurred(boolean processingErrorOccurred) {
+ this.processingErrorOccurred = processingErrorOccurred;
+ }
+
+ public String getNodeId() {
+ return nodeId;
+ }
+
+ public void setNodeId(String nodeId) {
+ this.nodeId = nodeId;
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public String getSelfLink() {
+ return selfLink;
+ }
+
+ /**
+ * Calculate edit attribute uri.
+ *
+ * @param link the link
+ * @return the string
+ */
+ public String calculateEditAttributeUri(String link) {
+ String uri = null;
+ Pattern pattern = Pattern.compile(URIRegexPattern);
+ Matcher matcher = pattern.matcher(link);
+ if (matcher.find()) {
+ uri = link.substring(matcher.end());
+ }
+ return uri;
+ }
+
+ /**
+ * Analyze self link relationship list.
+ *
+ * @param jsonResult the json result
+ * @return the relationship list
+ */
+ private RelationshipList analyzeSelfLinkRelationshipList(String jsonResult) {
+
+
+ RelationshipList relationshipList = null;
+
+ try {
+ relationshipList = mapper.readValue(jsonResult, RelationshipList.class);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.SELF_LINK_RELATIONSHIP_LIST_ERROR, exc.toString());
+ }
+
+ return relationshipList;
+ }
+
+ /**
+ * Adds the relationship list.
+ *
+ * @param relationshipList the relationship list
+ */
+ public void addRelationshipList(RelationshipList relationshipList) {
+
+ if (!relationshipLists.contains(relationshipList)) {
+ relationshipLists.add(relationshipList);
+ }
+
+ }
+
+ /**
+ * Process pathed self link response.
+ *
+ * @param selfLinkJsonResponse the self link json response
+ * @param startNodeType the start node type
+ * @param startNodeResourceKey the start node resource key
+ */
+ public void processPathedSelfLinkResponse(String selfLinkJsonResponse, String startNodeType,
+ String startNodeResourceKey) {
+
+ if (selfLinkJsonResponse == null || selfLinkJsonResponse.length() == 0) {
+ LOG.error(AaiUiMsgs.SELF_LINK_NULL_EMPTY_RESPONSE);
+ return;
+ }
+
+ try {
+ JsonNode jsonNode = mapper.readValue(selfLinkJsonResponse, JsonNode.class);
+
+ Iterator<Entry<String, JsonNode>> fieldNames = jsonNode.fields();
+ Entry<String, JsonNode> field = null;
+
+ while (fieldNames.hasNext()) {
+
+ field = fieldNames.next();
+
+ /*
+ * Is there a way to tell if the field is an aggregate or an atomic value? This is where our
+ * flattening code needs to live
+ */
+
+ String fieldName = field.getKey();
+
+ if ("relationship-list".equals(fieldName)) {
+
+ /*
+ * Parse the relationship list like we were doing before, so we can determine whether or
+ * not to keep it or traverse it after we have performed the evaluative node depth logic.
+ */
+ RelationshipList relationshipList =
+ analyzeSelfLinkRelationshipList(field.getValue().toString());
+
+ if (relationshipList != null) {
+ this.relationshipLists.add(relationshipList);
+ } else {
+ LOG.info(AaiUiMsgs.NO_RELATIONSHIP_DISCOVERED, nodeId);
+ }
+ } else {
+ JsonNode nodeValue = field.getValue();
+
+ if (nodeValue != null && nodeValue.isValueNode()) {
+
+ /*
+ * before we blindly add the fieldName and value to our property set, let's do one more
+ * check to see if the field name is an entity type. If it is, then our complex
+ * attribute processing code will pick it up and process it instead, but this is
+ * probably more likely just for array node types, but we'll see.
+ */
+
+ if (oxmEntityLookup.getEntityDescriptors().get(fieldName) == null) {
+ /*
+ * this is no an entity type as far as we can tell, so we can add it to our property
+ * set.
+ */
+
+ addProperty(fieldName, nodeValue.asText());
+
+ }
+
+ } else {
+
+ if (nodeValue.isArray()) {
+
+ /*
+ * make sure array entity-type collection is not an entityType before adding it to the
+ * property set. The expetation is that it will be added the visualization through a
+ * complex group or relationship.
+ */
+
+ if (oxmEntityLookup.getEntityDescriptors().get(field.getKey()) == null) {
+ /*
+ * this is no an entity type as far as we can tell, so we can add it to our property
+ * set.
+ */
+
+ addProperty(field.getKey(), nodeValue.toString());
+
+ }
+
+ } else {
+
+ complexGroups.add(nodeValue);
+
+ }
+
+ }
+
+ }
+
+ }
+
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, "POJO", exc.getLocalizedMessage());
+ this.setProcessingErrorOccurred(true);
+ this.addErrorCause(
+ "An error occurred while converting JSON into POJO = " + exc.getLocalizedMessage());
+ }
+
+ }
+
+ public void setSelfLink(String selfLink) {
+ this.selfLink = selfLink;
+ }
+
+ /**
+ * Adds the complex group.
+ *
+ * @param complexGroup the complex group
+ */
+ public void addComplexGroup(JsonNode complexGroup) {
+
+ if (!complexGroups.contains(complexGroup)) {
+ complexGroups.add(complexGroup);
+ }
+
+ }
+
+ /**
+ * Gets the padding.
+ *
+ * @param level the level
+ * @param paddingString the padding string
+ * @return the padding
+ */
+ private static String getPadding(int level, String paddingString) {
+ StringBuilder sb = new StringBuilder(32);
+ for (int x = 0; x < level; x++) {
+ sb.append(paddingString);
+ }
+ return sb.toString();
+ }
+
+ /**
+ * Dump node tree.
+ *
+ * @param showProperties the show properties
+ * @return the string
+ */
+ public String dumpNodeTree(boolean showProperties) {
+ return dumpNodeTree(0, showProperties);
+ }
+
+ /**
+ * Dump node tree.
+ *
+ * @param level the level
+ * @param showProperties the show properties
+ * @return the string
+ */
+ private String dumpNodeTree(int level, boolean showProperties) {
+ StringBuilder sb = new StringBuilder(128);
+ String padding = getPadding(level, " ");
+
+ sb.append(padding + " -> " + getNodeId() + "]").append("\n");
+ sb.append(padding + " -> primaryKeyName = " + primaryKeyName + "]").append("\n");
+ sb.append(padding + " -> primaryKeyValue = " + primaryKeyValue + "]").append("\n");
+ sb.append(padding + " -> entityType = " + entityType + "]").append("\n");
+
+ if (showProperties) {
+ Set<Entry<String, String>> entries = properties.entrySet();
+ for (Entry<String, String> entry : entries) {
+ sb.append(
+ padding + " ----> " + String.format("[ %s => %s ]", entry.getKey(), entry.getValue()))
+ .append("\n");
+ }
+ }
+
+ sb.append(padding + " ----> " + String.format("[ selfLink => %s ]", getSelfLink()))
+ .append("\n");
+
+ sb.append("\n").append(padding + " ----> Inbound Neighbors:").append("\n");
+
+ for (String inboundNeighbor : inboundNeighbors) {
+ sb.append("\n").append(inboundNeighbor.toString());
+ }
+
+ sb.append(padding + " ----> Outbound Neighbors:").append("\n");
+ sb.append("\n").append(padding + " ----> Outbound Neighbors:").append("\n");
+
+ for (String outboundNeighbor : outboundNeighbors) {
+ sb.append("\n").append(outboundNeighbor.toString());
+ }
+
+ return sb.toString();
+
+ }
+
+ public String getProcessingErrorCauses() {
+
+ StringBuilder sb = new StringBuilder(128);
+
+ for (String c : this.errorCauses) {
+ sb.append(c).append("\n");
+ }
+
+ return sb.toString();
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java
new file mode 100644
index 0000000..5da9c20
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java
@@ -0,0 +1,93 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * The Class D3VisualizationOutput.
+ */
+public class D3VisualizationOutput {
+
+ public GraphMeta graphMeta;
+ public List<SparkyGraphNode> nodes;
+ public List<SparkyGraphLink> links;
+ public InlineMessage inlineMessage;
+
+ /**
+ * Instantiates a new d 3 visualization output.
+ */
+ public D3VisualizationOutput() {
+ nodes = new ArrayList<SparkyGraphNode>();
+ links = new ArrayList<SparkyGraphLink>();
+ inlineMessage = null;
+ }
+
+ public GraphMeta getGraphMeta() {
+ return graphMeta;
+ }
+
+ /**
+ * Peg counter.
+ *
+ * @param counterName the counter name
+ */
+ public void pegCounter(String counterName) {
+ graphMeta.pegCounter(counterName);
+ }
+
+ public void setGraphMeta(GraphMeta graphMeta) {
+ this.graphMeta = graphMeta;
+ }
+
+ /**
+ * Adds the nodes.
+ *
+ * @param nodes the nodes
+ */
+ public void addNodes(List<SparkyGraphNode> nodes) {
+ this.nodes.addAll(nodes);
+ }
+
+ /**
+ * Adds the links.
+ *
+ * @param links the links
+ */
+ public void addLinks(List<SparkyGraphLink> links) {
+ this.links.addAll(links);
+ }
+
+ public InlineMessage getInlineMessage() {
+ return inlineMessage;
+ }
+
+ public void setInlineMessage(InlineMessage inlineMessage) {
+ this.inlineMessage = inlineMessage;
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/EntityEntry.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/EntityEntry.java
new file mode 100644
index 0000000..91c615e
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/EntityEntry.java
@@ -0,0 +1,81 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+/**
+ * The Class EntityEntry.
+ */
+public class EntityEntry {
+
+ private String entityType;
+
+ private String entityPrimaryKeyValue;
+
+ private String searchTags;
+
+ private String entityId;
+
+ public String getEntityId() {
+ return entityId;
+ }
+
+ public void setEntityId(String entityId) {
+ this.entityId = entityId;
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public String getEntityPrimaryKeyValue() {
+ return entityPrimaryKeyValue;
+ }
+
+ public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) {
+ this.entityPrimaryKeyValue = entityPrimaryKeyValue;
+ }
+
+ public String getSearchTags() {
+ return searchTags;
+ }
+
+ public void setSearchTags(String searchTags) {
+ this.searchTags = searchTags;
+ }
+
+ @Override
+ public String toString() {
+ return "EntityEntry [" + (entityType != null ? "entityType=" + entityType + ", " : "")
+ + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", "
+ : "")
+ + (searchTags != null ? "searchTags=" + searchTags + ", " : "")
+ + (entityId != null ? "entityId=" + entityId : "") + "]";
+ }
+
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoEntity.java
new file mode 100644
index 0000000..39106d2
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoEntity.java
@@ -0,0 +1,98 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import java.util.Arrays;
+import java.util.Map;
+
+public class GizmoEntity {
+
+ private String id;
+ private String type;
+ private String url;
+ private Map<String, String> properties;
+ private GizmoRelationshipHint[] in;
+ private GizmoRelationshipHint[] out;
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ public void setUrl(String url) {
+ this.url = url;
+ }
+
+ public Map<String, String> getProperties() {
+ return properties;
+ }
+
+ public void setProperties(Map<String, String> properties) {
+ this.properties = properties;
+ }
+
+ public GizmoRelationshipHint[] getIn() {
+ return in;
+ }
+
+ public void setIn(GizmoRelationshipHint[] in) {
+ this.in = in;
+ }
+
+ public GizmoRelationshipHint[] getOut() {
+ return out;
+ }
+
+ public void setOut(GizmoRelationshipHint[] out) {
+ this.out = out;
+ }
+
+ @Override
+ public String toString() {
+ return "GizmoEntity [" + (id != null ? "id=" + id + ", " : "")
+ + (type != null ? "type=" + type + ", " : "") + (url != null ? "url=" + url + ", " : "")
+ + (properties != null ? "properties=" + properties + ", " : "")
+ + (in != null ? "in=" + Arrays.toString(in) + ", " : "")
+ + (out != null ? "out=" + Arrays.toString(out) : "") + "]";
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipEntity.java
new file mode 100644
index 0000000..31ea78a
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipEntity.java
@@ -0,0 +1,103 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import java.util.Map;
+
+public class GizmoRelationshipEntity {
+
+ /*
+ * {"id":"oe4ur-3a0-27th-fu8","type":"has","url":
+ * "services/inventory/relationships/v8/has/oe4ur-3a0-27th-fu8","source":
+ * "services/inventory/v8/generic-vnf/4248","target":
+ * "services/inventory/v8/vserver/20528",
+ * "properties":{"is-parent":"true","multiplicity":"many","has-del-target":
+ * "true","uses-resource": "true"}}
+ */
+
+ private String id;
+ private String type;
+ private String url;
+ private String source;
+ private String target;
+ private Map<String, String> properties;
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ public void setUrl(String url) {
+ this.url = url;
+ }
+
+ public String getSource() {
+ return source;
+ }
+
+ public void setSource(String source) {
+ this.source = source;
+ }
+
+ public String getTarget() {
+ return target;
+ }
+
+ public void setTarget(String target) {
+ this.target = target;
+ }
+
+ public Map<String, String> getProperties() {
+ return properties;
+ }
+
+ public void setProperties(Map<String, String> properties) {
+ this.properties = properties;
+ }
+
+ @Override
+ public String toString() {
+ return "GizmoRelationshipEntity [" + (id != null ? "id=" + id + ", " : "")
+ + (type != null ? "type=" + type + ", " : "") + (url != null ? "url=" + url + ", " : "")
+ + (source != null ? "source=" + source + ", " : "") + (target != null ? "target=" + target + ", " : "")
+ + (properties != null ? "properties=" + properties : "") + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipHint.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipHint.java
new file mode 100644
index 0000000..5e22164
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipHint.java
@@ -0,0 +1,77 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+public class GizmoRelationshipHint {
+
+ private String id;
+ private String type;
+ private String url;
+ private String source;
+ private String target;
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ public void setUrl(String url) {
+ this.url = url;
+ }
+
+ public String getSource() {
+ return source;
+ }
+
+ public void setSource(String source) {
+ this.source = source;
+ }
+
+ public String getTarget() {
+ return target;
+ }
+
+ public void setTarget(String target) {
+ this.target = target;
+ }
+
+
+
+ }
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphMeta.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphMeta.java
new file mode 100644
index 0000000..7e53665
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphMeta.java
@@ -0,0 +1,147 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import org.onap.aai.sparky.viewandinspect.EntityTypeAggregation;
+
+/**
+ * The Class GraphMeta.
+ */
+public class GraphMeta {
+
+ private com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDescriptors;
+
+ private int numNodes;
+
+ private int numLinks;
+
+ private long renderTimeInMs;
+
+ private int numLinksResolvedSuccessfullyFromCache;
+
+ private int numLinksResolvedSuccessfullyFromServer;
+
+ private int numLinkResolveFailed;
+
+ private EntityTypeAggregation entitySummary;
+
+ /**
+ * Instantiates a new graph meta.
+ */
+ public GraphMeta() {
+ entitySummary = new EntityTypeAggregation();
+ }
+
+ public EntityTypeAggregation getEntitySummary() {
+ return entitySummary;
+ }
+
+ public void setEntitySummary(EntityTypeAggregation entitySummary) {
+ this.entitySummary = entitySummary;
+ }
+
+ public com.fasterxml.jackson.databind.JsonNode getAaiEntityNodeDescriptors() {
+ return aaiEntityNodeDescriptors;
+ }
+
+ public void setAaiEntityNodeDescriptors(
+ com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDefinitions) {
+ this.aaiEntityNodeDescriptors = aaiEntityNodeDefinitions;
+ }
+
+ public int getNumLinksResolvedSuccessfullyFromCache() {
+ return numLinksResolvedSuccessfullyFromCache;
+ }
+
+ public void setNumLinksResolvedSuccessfullyFromCache(int numLinksResolvedSuccessfullyFromCache) {
+ this.numLinksResolvedSuccessfullyFromCache = numLinksResolvedSuccessfullyFromCache;
+ }
+
+ public int getNumLinksResolvedSuccessfullyFromServer() {
+ return numLinksResolvedSuccessfullyFromServer;
+ }
+
+ public void setNumLinksResolvedSuccessfullyFromServer(
+ int numLinksResolvedSuccessfullyFromServer) {
+ this.numLinksResolvedSuccessfullyFromServer = numLinksResolvedSuccessfullyFromServer;
+ }
+
+ public int getNumLinkResolveFailed() {
+ return numLinkResolveFailed;
+ }
+
+ public void setNumLinkResolveFailed(int numLinkResolveFailed) {
+ this.numLinkResolveFailed = numLinkResolveFailed;
+ }
+
+ public int getNumNodes() {
+ return numNodes;
+ }
+
+ public void setNumNodes(int numNodes) {
+ this.numNodes = numNodes;
+ }
+
+ public int getNumLinks() {
+ return numLinks;
+ }
+
+ public void setNumLinks(int numLinks) {
+ this.numLinks = numLinks;
+ }
+
+ public long getRenderTimeInMs() {
+ return renderTimeInMs;
+ }
+
+ public void setRenderTimeInMs(long renderTimeInMs) {
+ this.renderTimeInMs = renderTimeInMs;
+ }
+
+ /**
+ * Peg counter.
+ *
+ * @param counterName the counter name
+ */
+ public void pegCounter(String counterName) {
+ entitySummary.pegCounter(counterName);
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "GraphMeta ["
+ + (aaiEntityNodeDescriptors != null
+ ? "aaiEntityNodeDescriptors=" + aaiEntityNodeDescriptors + ", " : "")
+ + "numNodes=" + numNodes + ", numLinks=" + numLinks + ", renderTimeInMs=" + renderTimeInMs
+ + ", numLinksResolvedSuccessfullyFromCache=" + numLinksResolvedSuccessfullyFromCache
+ + ", numLinksResolvedSuccessfullyFromServer=" + numLinksResolvedSuccessfullyFromServer
+ + ", numLinkResolveFailed=" + numLinkResolveFailed + ", "
+ + (entitySummary != null ? "entitySummary=" + entitySummary : "") + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java
new file mode 100644
index 0000000..23e50a9
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java
@@ -0,0 +1,58 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+public class GraphRequest {
+
+ private String hashId;
+ private boolean includeGraphMeta;
+
+ public GraphRequest() {
+
+ }
+
+ public String getHashId() {
+ return hashId;
+ }
+
+ public void setHashId(String hashId) {
+ this.hashId = hashId;
+ }
+
+ public boolean isIncludeGraphMeta() {
+ return includeGraphMeta;
+ }
+
+ public void setIncludeGraphMeta(boolean includeGraphMeta) {
+ this.includeGraphMeta = includeGraphMeta;
+ }
+
+ @Override
+ public String toString() {
+ return "QueryRequest [" + (hashId != null ? "hashId=" + hashId + ", " : "")
+ + "includeGraphMeta=" + includeGraphMeta + "]";
+ }
+
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/InlineMessage.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/InlineMessage.java
new file mode 100644
index 0000000..f6f85bb
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/InlineMessage.java
@@ -0,0 +1,70 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+/**
+ * The Class InlineMessage.
+ */
+public class InlineMessage {
+
+ private String level;
+ private String message;
+
+ /**
+ * Instantiates a new inline message.
+ *
+ * @param level the level
+ * @param message the message
+ */
+ public InlineMessage(String level, String message) {
+ this.level = level;
+ this.message = message;
+ }
+
+ public String getLevel() {
+ return level;
+ }
+
+ public void setLevel(String level) {
+ this.level = level;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public void setMessage(String message) {
+ this.message = message;
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return level + " : " + message;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java
new file mode 100644
index 0000000..09e5956
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java
@@ -0,0 +1,207 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+
+/*
+ * We can use annotations to differentiate between intermediate data we use to build the node, and
+ * the data that we actually want to appear in the exported JSON.
+ */
+
+/*
+ * This is our current ( 14-June-2016 ) working schema that will remain organic until we get it just
+ * right.
+ *
+ * { "item-type": "customer", "item-name-key": "subscriber-name", “item-name-value” :
+ * “subscriber-name-123456789-aai847-data-01”, "item-properties": [{ "property-name":
+ * "subscriber-name", "property-value": "subscriber-name-123456789-aai847-data-01" }, {
+ * "property-name": "global-customer-id", "property-value":
+ * "global-customer-id-123456789-aai847-data-01" } ], "node-meta": { “color” : “#f2d2d2”,
+ * "isSearchTarget" : false, "nodeGroups" : "1,2,3,4" }, }
+ *
+ */
+
+
+/**
+ * The Class JsonNode.
+ */
+public class JsonNode {
+
+ private String id;
+ private String itemType;
+ private String itemNameKey;
+ private String itemNameValue;
+ private Map<String, String> itemProperties;
+ private NodeMeta nodeMeta;
+
+ @JsonIgnore
+ private boolean isRootNode;
+
+
+ @JsonIgnore
+ private String resourceKey;
+ @JsonIgnore
+ private Collection<String> inboundNeighbors;
+
+ @JsonIgnore
+ private Collection<String> outboundNeighbors;
+
+
+ @JsonIgnore
+ private static final Logger LOG = Logger.getLogger(JsonNode.class);
+
+ private VisualizationConfigs visualizationConfigs;
+
+
+ /**
+ * Instantiates a new json node.
+ *
+ * @param ain the ain
+ */
+ public JsonNode(ActiveInventoryNode ain, VisualizationConfigs visualizationConfigs) {
+ this.resourceKey = ain.getNodeId();
+ this.itemProperties = ain.getProperties();
+ this.setItemType(ain.getEntityType());
+ this.setItemNameKey(ain.getPrimaryKeyName());
+ this.setItemNameValue(ain.getPrimaryKeyValue());
+ this.setId(ain.getNodeId());
+ this.isRootNode = ain.isRootNode();
+ this.visualizationConfigs = visualizationConfigs;
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("---");
+ LOG.debug("JsonNode constructor using AIN = " + ain.dumpNodeTree(true));
+ LOG.debug("---");
+ }
+
+ inboundNeighbors = ain.getInboundNeighbors();
+ outboundNeighbors = ain.getOutboundNeighbors();
+
+ nodeMeta = new NodeMeta(this.visualizationConfigs);
+
+ nodeMeta.setNodeIssue(ain.isNodeIssue());
+ nodeMeta.setNodeValidated(ain.isNodeValidated());
+ nodeMeta.setNodeDepth(ain.getNodeDepth());
+
+ nodeMeta.setNumInboundNeighbors(ain.getInboundNeighbors().size());
+ nodeMeta.setNumOutboundNeighbors(ain.getOutboundNeighbors().size());
+
+ nodeMeta.setAtMaxDepth(ain.isAtMaxDepth());
+ nodeMeta.setSelfLinkResolved(!ain.isSelflinkRetrievalFailure());
+ nodeMeta.setProcessingErrorOccurred(ain.isProcessingErrorOccurred());
+ nodeMeta.setHasNeighbors(
+ ain.getOutboundNeighbors().size() > 0 || ain.getInboundNeighbors().size() > 0);
+ nodeMeta.setProcessingState(ain.getState());
+
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getItemNameKey() {
+ return itemNameKey;
+ }
+
+ public String getItemNameValue() {
+ return itemNameValue;
+ }
+
+ public Map<String, String> getItemProperties() {
+ return itemProperties;
+ }
+
+ public String getItemType() {
+ return itemType;
+ }
+
+ public String getResourceKey() {
+ return resourceKey;
+ }
+
+ public void setItemNameKey(String itemNameKey) {
+ this.itemNameKey = itemNameKey;
+ }
+
+ public void setItemNameValue(String itemNameValue) {
+ this.itemNameValue = itemNameValue;
+ }
+
+ public void setItemProperties(HashMap<String, String> itemProperties) {
+ this.itemProperties = itemProperties;
+ }
+
+ public void setItemType(String itemType) {
+ this.itemType = itemType;
+ }
+
+ public void setResourceKey(String resourceKey) {
+ this.resourceKey = resourceKey;
+ }
+
+ public NodeMeta getNodeMeta() {
+ return nodeMeta;
+ }
+
+ public void setNodeMeta(NodeMeta nodeMeta) {
+ this.nodeMeta = nodeMeta;
+ }
+
+ public boolean isRootNode() {
+ return isRootNode;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "JsonNode [" + (id != null ? "id=" + id + ", " : "")
+ + (itemType != null ? "itemType=" + itemType + ", " : "")
+ + (itemNameKey != null ? "itemNameKey=" + itemNameKey + ", " : "")
+ + (itemNameValue != null ? "itemNameValue=" + itemNameValue + ", " : "")
+ + (itemProperties != null ? "itemProperties=" + itemProperties + ", " : "")
+ + (nodeMeta != null ? "nodeMeta=" + nodeMeta + ", " : "")
+ + (resourceKey != null ? "resourceKey=" + resourceKey + ", " : "")
+ + (inboundNeighbors != null ? "inboundNeighbors=" + inboundNeighbors + ", " : "")
+ + (outboundNeighbors != null ? "outboundNeighbors=" + outboundNeighbors : "") + "]";
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNodeLink.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNodeLink.java
new file mode 100644
index 0000000..5891d51
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNodeLink.java
@@ -0,0 +1,77 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+/*
+ * Expected JSON Output:
+ *
+ * { JsonNodeLink : { id : <value>, source : <value>, target : <value> } }
+ *
+ */
+
+/**
+ * The Class JsonNodeLink.
+ */
+public class JsonNodeLink {
+
+ protected String id;
+ protected String source;
+ protected String target;
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getSource() {
+ return source;
+ }
+
+ public void setSource(String source) {
+ this.source = source;
+ }
+
+ public String getTarget() {
+ return target;
+ }
+
+ public void setTarget(String target) {
+ this.target = target;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "JsonNodeLink [id=" + id + ", source=" + source + ", target=" + target + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeDebug.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeDebug.java
new file mode 100644
index 0000000..0cc0746
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeDebug.java
@@ -0,0 +1,59 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+/**
+ * The Class NodeDebug.
+ */
+public class NodeDebug {
+ private boolean maxTraversalDepthReached;
+ private boolean processingError;
+ private String processingErrorCauses;
+
+ public boolean isMaxTraversalDepthReached() {
+ return maxTraversalDepthReached;
+ }
+
+ public void setMaxTraversalDepthReached(boolean maxTraversalDepthReached) {
+ this.maxTraversalDepthReached = maxTraversalDepthReached;
+ }
+
+ public boolean isProcessingError() {
+ return processingError;
+ }
+
+ public void setProcessingError(boolean processingError) {
+ this.processingError = processingError;
+ }
+
+ public String getProcessingErrorCauses() {
+ return processingErrorCauses;
+ }
+
+ public void setProcessingErrorCauses(String processingErrorCauses) {
+ this.processingErrorCauses = processingErrorCauses;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java
new file mode 100644
index 0000000..bc21941
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java
@@ -0,0 +1,207 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs;
+import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState;
+
+/**
+ * The Class NodeMeta.
+ */
+public class NodeMeta {
+
+ private String className;
+
+ private boolean isSearchTarget;
+
+ private NodeDebug nodeDebug;
+ private boolean nodeIssue;
+ private boolean nodeValidated;
+ private long selfLinkResponseTimeInMs;
+ private long numInboundNeighbors;
+ private long numOutboundNeighbors;
+
+ private boolean atMaxDepth;
+ private boolean selfLinkResolved;
+ private boolean processingErrorOccurred;
+ private boolean neighborsProcessed;
+ private int nodeDepth;
+ private boolean hasNeighbors;
+
+ private NodeProcessingState processingState;
+
+ private VisualizationConfigs visualizationConfigs;
+
+
+
+ /**
+ * Instantiates a new node meta.
+ */
+ public NodeMeta(VisualizationConfigs visualizationConfigs) {
+ this.isSearchTarget = false;
+ this.visualizationConfigs = visualizationConfigs;
+
+
+ if (this.visualizationConfigs.isVisualizationDebugEnabled()) {
+ nodeDebug = new NodeDebug();
+ }
+ this.numInboundNeighbors = 0;
+ this.numOutboundNeighbors = 0;
+
+ this.selfLinkResponseTimeInMs = 0;
+
+ this.atMaxDepth = false;
+ this.selfLinkResolved = false;
+ this.processingErrorOccurred = false;
+ this.hasNeighbors = false;
+ this.neighborsProcessed = false;
+ this.nodeDepth = ActiveInventoryNode.DEFAULT_INIT_NODE_DEPTH;
+ this.processingState = NodeProcessingState.INIT;
+
+ }
+
+ public boolean isAtMaxDepth() {
+ return atMaxDepth;
+ }
+
+ public void setAtMaxDepth(boolean atMaxDepth) {
+ this.atMaxDepth = atMaxDepth;
+ }
+
+ public boolean isSelfLinkResolved() {
+ return selfLinkResolved;
+ }
+
+
+
+ public NodeProcessingState getProcessingState() {
+ return processingState;
+ }
+
+ public void setProcessingState(NodeProcessingState processingState) {
+ this.processingState = processingState;
+ }
+
+ public void setSelfLinkResolved(boolean selfLinkResolved) {
+ this.selfLinkResolved = selfLinkResolved;
+ }
+
+ public boolean isProcessingErrorOccurred() {
+ return processingErrorOccurred;
+ }
+
+ public void setProcessingErrorOccurred(boolean processingErrorOccurred) {
+ this.processingErrorOccurred = processingErrorOccurred;
+ }
+
+ public boolean isHasNeighbors() {
+ return hasNeighbors;
+ }
+
+ public void setHasNeighbors(boolean hasNeighbors) {
+ this.hasNeighbors = hasNeighbors;
+ }
+
+ public boolean isNeighborsProcessed() {
+ return neighborsProcessed;
+ }
+
+ public void setNeighborsProcessed(boolean neighborsProcessed) {
+ this.neighborsProcessed = neighborsProcessed;
+ }
+
+ public int getNodeDepth() {
+ return nodeDepth;
+ }
+
+ public void setNodeDepth(int nodeDepth) {
+ this.nodeDepth = nodeDepth;
+ }
+
+ public void setNodeDebug(NodeDebug nodeDebug) {
+ this.nodeDebug = nodeDebug;
+ }
+
+ public String getClassName() {
+ return className;
+ }
+
+ public long getNumInboundNeighbors() {
+ return numInboundNeighbors;
+ }
+
+ public void setNumInboundNeighbors(long numInboundNeighbors) {
+ this.numInboundNeighbors = numInboundNeighbors;
+ }
+
+ public long getNumOutboundNeighbors() {
+ return numOutboundNeighbors;
+ }
+
+ public void setNumOutboundNeighbors(long numOutboundNeighbors) {
+ this.numOutboundNeighbors = numOutboundNeighbors;
+ }
+
+ public NodeDebug getNodeDebug() {
+ return nodeDebug;
+ }
+
+ public long getSelfLinkResponseTimeInMs() {
+ return selfLinkResponseTimeInMs;
+ }
+
+ public boolean isNodeIssue() {
+ return nodeIssue;
+ }
+
+ public boolean isNodeValidated() {
+ return nodeValidated;
+ }
+
+ public boolean isSearchTarget() {
+ return isSearchTarget;
+ }
+
+ public void setClassName(String className) {
+ this.className = className;
+ }
+
+ public void setNodeIssue(boolean nodeIssue) {
+ this.nodeIssue = nodeIssue;
+ }
+
+ public void setNodeValidated(boolean nodeValidated) {
+ this.nodeValidated = nodeValidated;
+ }
+
+ public void setSearchTarget(boolean isSearchTarget) {
+ this.isSearchTarget = isSearchTarget;
+ }
+
+ public void setSelfLinkResponseTimeInMs(long selfLinkResponseTimeInMs) {
+ this.selfLinkResponseTimeInMs = selfLinkResponseTimeInMs;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java
new file mode 100644
index 0000000..8b1cb8d
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java
@@ -0,0 +1,109 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import org.onap.aai.restclient.client.OperationResult;
+
+/**
+ * The Class NodeProcessingTransaction.
+ */
+public class NodeProcessingTransaction {
+
+ private ActiveInventoryNode processingNode;
+ private OperationResult opResult;
+ private String selfLinkWithModifiers;
+ private String requestParameters;
+
+ /**
+ * Instantiates a new node processing transaction.
+ */
+ public NodeProcessingTransaction() {}
+
+ public String getRequestParameters() {
+ return requestParameters;
+ }
+
+ public void setRequestParameters(String requestParameters) {
+ this.requestParameters = requestParameters;
+ }
+
+ public String getSelfLink() {
+ if (processingNode == null) {
+ return null;
+ }
+
+ return processingNode.getSelfLink();
+ }
+
+ public String getSelfLinkWithModifiers() {
+ if (processingNode == null) {
+ return null;
+ }
+
+ return processingNode.getSelfLink() + requestParameters;
+ }
+
+ public ActiveInventoryNode getProcessingNode() {
+ return processingNode;
+ }
+
+ public void setProcessingNode(ActiveInventoryNode processingNode) {
+ this.processingNode = processingNode;
+ }
+
+ public OperationResult getOpResult() {
+ return opResult;
+ }
+
+ public void setOpResult(OperationResult opResult) {
+ this.opResult = opResult;
+ }
+
+ /**
+ * Processing error occurred.
+ *
+ * @return true, if successful
+ */
+ public boolean processingErrorOccurred() {
+ if (opResult == null) {
+ return true;
+ }
+
+ return !opResult.wasSuccessful();
+
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "NodeProcessingTransaction ["
+ + (processingNode != null ? "processingNode=" + processingNode + ", " : "")
+ + (opResult != null ? "opResult=" + opResult + ", " : "") + "processorErrorOccurred="
+ + processingErrorOccurred() + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryParams.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryParams.java
new file mode 100644
index 0000000..f1a8e4e
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryParams.java
@@ -0,0 +1,57 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+/**
+ * The Class QueryParams.
+ */
+public class QueryParams {
+
+ private String searchTargetPrimaryKeyValues;
+ private String searchTargetNodeId;
+
+ /**
+ * Instantiates a new query params.
+ */
+ public QueryParams() {
+
+ }
+
+ public String getSearchTargetPrimaryKeyValues() {
+ return searchTargetPrimaryKeyValues;
+ }
+
+ public void setSearchTargetPrimaryKeyValues(String searchTargetPrimaryKeyValues) {
+ this.searchTargetPrimaryKeyValues = searchTargetPrimaryKeyValues;
+ }
+
+ public String getSearchTargetNodeId() {
+ return searchTargetNodeId;
+ }
+
+ public void setSearchTargetNodeId(String searchTargetNodeId) {
+ this.searchTargetNodeId = searchTargetNodeId;
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryRequest.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryRequest.java
new file mode 100644
index 0000000..a542efd
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryRequest.java
@@ -0,0 +1,47 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+/**
+ * The Class QueryRequest.
+ */
+public class QueryRequest {
+
+ private String hashId;
+
+ public String getHashId() {
+ return hashId;
+ }
+
+ public void setHashId(String hashId) {
+ this.hashId = hashId;
+ }
+
+ @Override
+ public String toString() {
+ return "QueryRequest [hashId=" + hashId + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelatedToProperty.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelatedToProperty.java
new file mode 100644
index 0000000..5d0f8c3
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelatedToProperty.java
@@ -0,0 +1,64 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * The Class RelatedToProperty.
+ */
+public class RelatedToProperty {
+ protected String propertyKey;
+ protected String propertyValue;
+
+ @JsonProperty("property-key")
+ public String getPropertyKey() {
+ return propertyKey;
+ }
+
+ public void setPropertyKey(String propertyKey) {
+ this.propertyKey = propertyKey;
+ }
+
+ @JsonProperty("property-value")
+ public String getPropertyValue() {
+ return propertyValue;
+ }
+
+ public void setPropertyValue(String propertyValue) {
+ this.propertyValue = propertyValue;
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "RelatedToProperty [propertyKey=" + propertyKey + ", propertyValue=" + propertyValue
+ + "]";
+ }
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java
new file mode 100644
index 0000000..813dec6
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java
@@ -0,0 +1,96 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import java.util.Arrays;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * The Class Relationship.
+ */
+public class Relationship {
+
+ protected String relatedTo;
+ protected String relationshipLabel;
+ protected String relatedLink;
+ protected RelationshipData[] relationshipData;
+ protected RelatedToProperty[] relatedToProperty;
+
+ public String getRelatedTo() {
+ return relatedTo;
+ }
+
+ @JsonProperty("related-to")
+ public void setRelatedTo(String relatedTo) {
+ this.relatedTo = relatedTo;
+ }
+
+ public String getRelationshipLabel() {
+ return relationshipLabel;
+ }
+
+ @JsonProperty("relationship-label")
+ public void setRelationshipLabel(String relationshipLabel) {
+ this.relationshipLabel = relationshipLabel;
+ }
+
+ public String getRelatedLink() {
+ return relatedLink;
+ }
+
+ @JsonProperty("related-link")
+ public void setRelatedLink(String relatedLink) {
+ this.relatedLink = relatedLink;
+ }
+
+ public RelationshipData[] getRelationshipData() {
+ return relationshipData;
+ }
+
+ @JsonProperty("relationship-data")
+ public void setRelationshipData(RelationshipData[] relationshipData) {
+ this.relationshipData = relationshipData;
+ }
+
+ public RelatedToProperty[] getRelatedToProperty() {
+ return relatedToProperty;
+ }
+
+ @JsonProperty("related-to-property")
+ public void setRelatedToProperty(RelatedToProperty[] relatedToProperty) {
+ this.relatedToProperty = relatedToProperty;
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "Relationship [relatedTo=" + relatedTo + ", relationshipLabel=" + relationshipLabel
+ + ", relatedLink=" + relatedLink + ", relationshipData=" + Arrays.toString(relationshipData)
+ + ", relatedToProperty=" + Arrays.toString(relatedToProperty) + "]";
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipData.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipData.java
new file mode 100644
index 0000000..c8dfefe
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipData.java
@@ -0,0 +1,63 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * The Class RelationshipData.
+ */
+public class RelationshipData {
+ protected String relationshipKey;
+ protected String relationshipValue;
+
+ @JsonProperty("relationship-key")
+ public String getRelationshipKey() {
+ return relationshipKey;
+ }
+
+ public void setRelationshipKey(String relationshipKey) {
+ this.relationshipKey = relationshipKey;
+ }
+
+ @JsonProperty("relationship-value")
+ public String getRelationshipValue() {
+ return relationshipValue;
+ }
+
+ public void setRelationshipValue(String relationshipValue) {
+ this.relationshipValue = relationshipValue;
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "RelationshipData [relationshipKey=" + relationshipKey + ", relationshipValue="
+ + relationshipValue + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipDirectionality.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipDirectionality.java
new file mode 100644
index 0000000..13d0537
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipDirectionality.java
@@ -0,0 +1,42 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+/**
+ * This enumeration is intended to be used to help us discriminate neighbor relationships for the
+ * purpose of visualization and conceptualization to model in/out relationships between
+ * ActiveInventoryNodes.
+ * Possible visualization behaviors could be the following: - IN ( draw a line with 1 arrow ) - OUT
+ * ( draw a line with 1 arrow ) - BOTH ( draw a line with 2 arrows, or 2 lines with 1 arrow each ) -
+ * UNKNOWN ( draw a line with no arrows )
+ * The UNKNOWN case is what we have at the moment where we have a collection neighbors with no
+ * knowledge of relationship directionality.
+ *
+ * @author davea
+ *
+ */
+public enum RelationshipDirectionality {
+ IN, OUT, BOTH, UNKNOWN
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java
new file mode 100644
index 0000000..9c81a3d
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java
@@ -0,0 +1,57 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import java.util.Arrays;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * The Class RelationshipList.
+ */
+public class RelationshipList {
+
+ protected Relationship[] relationship;
+
+ public Relationship[] getRelationshipList() {
+ return relationship;
+ }
+
+ @JsonProperty("relationship")
+ public void setRelationshipList(Relationship[] relationship) {
+ this.relationship = relationship;
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "RelationshipList [relationshipList=" + Arrays.toString(relationship) + "]";
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java
new file mode 100644
index 0000000..d853673
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java
@@ -0,0 +1,116 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.onap.aai.sparky.sync.entity.SearchableEntity;
+
+import java.util.Set;
+
+public class SearchableEntityList {
+
+ private List<SearchableEntity> entities;
+
+ public SearchableEntityList() {
+ entities = new ArrayList<SearchableEntity>();
+ }
+
+ public List<SearchableEntity> getEntities() {
+ return entities;
+ }
+
+ public void setEntities(List<SearchableEntity> entities) {
+ this.entities = entities;
+ }
+
+ public void addEntity(SearchableEntity entity) {
+
+ if ( !entities.contains(entity)) {
+ entities.add(entity);
+ }
+
+ }
+
+ protected static SearchableEntity buildEntity(String entityType, String pkeyValue, String link, Map<String,String> searchTags ) {
+
+ SearchableEntity se = new SearchableEntity();
+
+ se.setEntityType(entityType);
+ se.setEntityPrimaryKeyValue(pkeyValue);
+ se.setLink(link);
+
+ if ( searchTags != null) {
+
+ Set<Entry<String, String>> entrySet = searchTags.entrySet();
+
+ for ( Entry<String, String> entry : entrySet ) {
+ se.addSearchTagWithKey(entry.getKey(), entry.getValue());
+ }
+ }
+
+ se.deriveFields();
+
+ return se;
+
+ }
+
+ protected static Map<String,String> getSearchTagMap(String... tags) {
+
+ HashMap<String,String> dataMap = new HashMap<String,String>();
+
+ if ( tags != null && tags.length >= 2 ) {
+
+ int numTags = tags.length;
+ int index = 0;
+
+ while ( index < numTags ) {
+
+ if ( index + 1 < numTags ) {
+ // we have enough parameters for the current set
+ dataMap.put(tags[index], tags[index+1]);
+ index += 2;
+ } else {
+ break;
+ }
+ }
+
+ }
+
+ return dataMap;
+
+
+ }
+
+ @Override
+ public String toString() {
+ return "SearchableEntityList [" + (entities != null ? "entities=" + entities : "") + "]";
+ }
+
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java
new file mode 100644
index 0000000..d69994b
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java
@@ -0,0 +1,80 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import org.onap.aai.restclient.client.OperationResult;
+
+public class SelfLinkDeterminationTransaction {
+
+ private String parentNodeId;
+ private ActiveInventoryNode newNode;
+ private String queryString;
+ private String entityUrl;
+ private OperationResult opResult;
+
+
+ public String getParentNodeId() {
+ return parentNodeId;
+ }
+
+ public void setParentNodeId(String parentNodeId) {
+ this.parentNodeId = parentNodeId;
+ }
+
+ public ActiveInventoryNode getNewNode() {
+ return newNode;
+ }
+
+ public void setNewNode(ActiveInventoryNode newNode) {
+ this.newNode = newNode;
+ }
+
+ public OperationResult getOpResult() {
+ return opResult;
+ }
+
+ public void setOpResult(OperationResult opResult) {
+ this.opResult = opResult;
+ }
+
+ public String getQueryString() {
+ return queryString;
+ }
+
+ public void setQueryString(String queryString) {
+ this.queryString = queryString;
+ }
+
+ public String getEntityUrl() {
+ return entityUrl;
+ }
+
+ public void setEntityUrl(String entityUrl) {
+ this.entityUrl = entityUrl;
+ }
+
+
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphLink.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphLink.java
new file mode 100644
index 0000000..9b6e4e9
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphLink.java
@@ -0,0 +1,75 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+/*
+ * Expected JSON Output:
+ *
+ * { JsonNodeLink : { id : <value>, source : <value>, target : <value> } }
+ *
+ */
+
+/**
+ * The Class JsonNodeLink.
+ */
+public class SparkyGraphLink {
+
+ protected String id;
+ protected String source;
+ protected String target;
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getSource() {
+ return source;
+ }
+
+ public void setSource(String source) {
+ this.source = source;
+ }
+
+ public String getTarget() {
+ return target;
+ }
+
+ public void setTarget(String target) {
+ this.target = target;
+ }
+
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "JsonNodeLink [id=" + id + ", source=" + source + ", target=" + target + "]";
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphNode.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphNode.java
new file mode 100644
index 0000000..5171eaf
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphNode.java
@@ -0,0 +1,248 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.entity;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.aggregatevnf.search.AggregateSummaryProcessor;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.subscription.config.SubscriptionConfig;
+import org.onap.aai.sparky.subscription.payload.entity.ObjectInspectorPayload;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+
+/*
+ * We can use annotations to differentiate between intermediate data we use to build the node, and
+ * the data that we actually want to appear in the exported JSON.
+ */
+
+/*
+ * This is our current ( 14-June-2016 ) working schema that will remain organic until we get it just
+ * right.
+ *
+ * { "item-type": "customer", "item-name-key": "subscriber-name", “item-name-value” :
+ * “subscriber-name-123456789-aai847-data-01”, "item-properties": [{ "property-name":
+ * "subscriber-name", "property-value": "subscriber-name-123456789-aai847-data-01" }, {
+ * "property-name": "global-customer-id", "property-value":
+ * "global-customer-id-123456789-aai847-data-01" } ], "node-meta": { “color” : “#f2d2d2”,
+ * "isSearchTarget" : false, "nodeGroups" : "1,2,3,4" }, }
+ *
+ */
+
+
+/**
+ * The Class JsonNode.
+ */
+public class SparkyGraphNode {
+
+ private String id;
+ private String itemType;
+ private String itemNameKey;
+ private String itemNameValue;
+ private Map<String, String> itemProperties;
+ private NodeMeta nodeMeta;
+ private ObjectInspectorPayload externalResourcePayload;
+
+ @JsonIgnore
+ private boolean isRootNode;
+
+
+ @JsonIgnore
+ private String resourceKey;
+ @JsonIgnore
+ private Collection<String> inboundNeighbors;
+
+ @JsonIgnore
+ private Collection<String> outboundNeighbors;
+
+
+ @JsonIgnore
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(SparkyGraphNode.class);
+
+ private VisualizationConfigs visualizationConfigs;
+ private SubscriptionConfig subConfig;
+
+ /**
+ * Instantiates a new SparkyGraphNode.
+ *
+ * @param ain the ain
+ */
+ public SparkyGraphNode(ActiveInventoryNode ain, VisualizationConfigs visualizationConfigs, SubscriptionConfig subConfig) {
+ this.resourceKey = ain.getNodeId();
+ this.itemProperties = ain.getProperties();
+ this.setItemType(ain.getEntityType());
+ this.setItemNameKey(ain.getPrimaryKeyName());
+ this.setItemNameValue(ain.getPrimaryKeyValue());
+ this.setId(ain.getNodeId());
+ this.isRootNode = ain.isRootNode();
+ this.visualizationConfigs = visualizationConfigs;
+ this.setSubConfig(subConfig);
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("---");
+ LOG.debug("JsonNode constructor using AIN = " + ain.dumpNodeTree(true));
+ LOG.debug("---");
+ }
+
+ inboundNeighbors = ain.getInboundNeighbors();
+ outboundNeighbors = ain.getOutboundNeighbors();
+
+ nodeMeta = new NodeMeta(this.visualizationConfigs);
+
+ nodeMeta.setNodeIssue(ain.isNodeIssue());
+ nodeMeta.setNodeValidated(ain.isNodeValidated());
+ nodeMeta.setNodeDepth(ain.getNodeDepth());
+
+ nodeMeta.setNumInboundNeighbors(ain.getInboundNeighbors().size());
+ nodeMeta.setNumOutboundNeighbors(ain.getOutboundNeighbors().size());
+
+ nodeMeta.setAtMaxDepth(ain.isAtMaxDepth());
+ nodeMeta.setSelfLinkResolved(!ain.isSelflinkRetrievalFailure());
+ nodeMeta.setProcessingErrorOccurred(ain.isProcessingErrorOccurred());
+ nodeMeta.setHasNeighbors(
+ ain.getOutboundNeighbors().size() > 0 || ain.getInboundNeighbors().size() > 0);
+
+ if (subConfig.getIsLaunchOIEnabled()) {
+ try {
+ Collection<String> entityTypes = subConfig.getAnnEntitiyTypes();
+ for (String entityType : entityTypes) {
+ if (entityType.equals(this.getItemType())) {
+ ObjectInspectorPayload lic = ObjectInspectorPayload.getOIPayload(subConfig);
+ lic.getMessage().getPayload().getParams().setObjectName(this.getItemNameValue());
+ this.setExternalResourcePayload(lic);
+ break;
+ }
+ }
+ } catch (IOException e) {
+ String message = "Could not map JSON to object " + "Attempted to convert: "
+ + SparkyConstants.SUBSCRIPTION_OI_MAPPING + ". Error: " + e.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message);
+ }
+ }
+ nodeMeta.setProcessingState(ain.getState());
+
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getItemNameKey() {
+ return itemNameKey;
+ }
+
+ public String getItemNameValue() {
+ return itemNameValue;
+ }
+
+ public Map<String, String> getItemProperties() {
+ return itemProperties;
+ }
+
+ public String getItemType() {
+ return itemType;
+ }
+
+ public String getResourceKey() {
+ return resourceKey;
+ }
+
+ public void setItemNameKey(String itemNameKey) {
+ this.itemNameKey = itemNameKey;
+ }
+
+ public void setItemNameValue(String itemNameValue) {
+ this.itemNameValue = itemNameValue;
+ }
+
+ public void setItemProperties(HashMap<String, String> itemProperties) {
+ this.itemProperties = itemProperties;
+ }
+
+ public void setItemType(String itemType) {
+ this.itemType = itemType;
+ }
+
+ public void setResourceKey(String resourceKey) {
+ this.resourceKey = resourceKey;
+ }
+
+ public NodeMeta getNodeMeta() {
+ return nodeMeta;
+ }
+
+ public void setNodeMeta(NodeMeta nodeMeta) {
+ this.nodeMeta = nodeMeta;
+ }
+
+ public boolean isRootNode() {
+ return isRootNode;
+ }
+
+ public ObjectInspectorPayload getExternalResourcePayload() {
+ return externalResourcePayload;
+ }
+
+ public void setExternalResourcePayload(ObjectInspectorPayload externalResourcePayload) {
+ this.externalResourcePayload = externalResourcePayload;
+ }
+
+ public SubscriptionConfig getSubConfig() {
+ return subConfig;
+ }
+
+ public void setSubConfig(SubscriptionConfig subConfig) {
+ this.subConfig = subConfig;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return "JsonNode [" + (id != null ? "id=" + id + ", " : "")
+ + (itemType != null ? "itemType=" + itemType + ", " : "")
+ + (itemNameKey != null ? "itemNameKey=" + itemNameKey + ", " : "")
+ + (itemNameValue != null ? "itemNameValue=" + itemNameValue + ", " : "")
+ + (itemProperties != null ? "itemProperties=" + itemProperties + ", " : "")
+ + (nodeMeta != null ? "nodeMeta=" + nodeMeta + ", " : "")
+ + (resourceKey != null ? "resourceKey=" + resourceKey + ", " : "")
+ + (inboundNeighbors != null ? "inboundNeighbors=" + inboundNeighbors + ", " : "")
+ + (outboundNeighbors != null ? "outboundNeighbors=" + outboundNeighbors : "") + "]";
+ }
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java
new file mode 100644
index 0000000..5c6cdd8
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java
@@ -0,0 +1,36 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.enumeration;
+
+/**
+ * The Enum NodeProcessingAction.
+ */
+public enum NodeProcessingAction {
+ SELF_LINK_SET, NEW_NODE_PROCESSED, SELF_LINK_RESOLVE_ERROR, SELF_LINK_DETERMINATION_ERROR,
+ SELF_LINK_RESOLVE_OK, SELF_LINK_RESPONSE_PARSE_ERROR, SELF_LINK_RESPONSE_PARSE_OK,
+ NEIGHBORS_PROCESSED_ERROR, NEIGHBORS_PROCESSED_OK, COMPLEX_ATTRIBUTE_GROUP_PARSE_ERROR,
+ COMPLEX_ATTRIBUTE_GROUP_PARSE_OK, NODE_IDENTITY_ERROR,UNEXPECTED_STATE_TRANSITION
+}
+
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingState.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingState.java
new file mode 100644
index 0000000..18673ef
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingState.java
@@ -0,0 +1,31 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.enumeration;
+
+/**
+ * The Enum NodeProcessingState.
+ */
+public enum NodeProcessingState {
+ INIT, SELF_LINK_UNRESOLVED, SELF_LINK_RESPONSE_UNPROCESSED, NEIGHBORS_UNPROCESSED, READY, ERROR}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java
new file mode 100644
index 0000000..c0a7711
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java
@@ -0,0 +1,426 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.search;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.common.search.CommonSearchSuggestion;
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.search.SearchServiceAdapter;
+import org.onap.aai.sparky.search.api.SearchProvider;
+import org.onap.aai.sparky.search.config.SuggestionConfig;
+import org.onap.aai.sparky.search.entity.QuerySearchEntity;
+import org.onap.aai.sparky.search.entity.SearchSuggestion;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+
+public class ViewInspectSearchProvider implements SearchProvider {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(ViewInspectSearchProvider.class);
+
+ private SearchServiceAdapter searchServiceAdapter = null;
+ private SuggestionConfig suggestionConfig;
+ private String additionalSearchSuggestionText;
+
+ private static final String KEY_SEARCH_RESULT = "searchResult";
+ private static final String KEY_HITS = "hits";
+ private static final String KEY_DOCUMENT = "document";
+ private static final String KEY_CONTENT = "content";
+
+ private static final String KEY_SEARCH_TAG_IDS = "searchTagIDs";
+ private static final String KEY_SEARCH_TAGS = "searchTags";
+ private static final String KEY_LINK = "link";
+ private static final String KEY_ENTITY_TYPE = "entityType";
+
+ private final String viewInspectIndexName;
+ private final String viewInspectSuggestionRoute;
+ private OxmEntityLookup oxmEntityLookup;
+
+ public ViewInspectSearchProvider(SearchServiceAdapter searchServiceAdapter,
+ SuggestionConfig suggestionConfig, String viewInspectIndexName,
+ String viewInspectSuggestionRoute, OxmEntityLookup oxmEntityLookup) throws Exception {
+
+ this.searchServiceAdapter = searchServiceAdapter;
+ this.oxmEntityLookup = oxmEntityLookup;
+ this.suggestionConfig = suggestionConfig;
+ additionalSearchSuggestionText = null;
+ this.viewInspectIndexName = viewInspectIndexName;
+ this.viewInspectSuggestionRoute = viewInspectSuggestionRoute;
+
+ }
+
+ @Override
+ public List<SearchSuggestion> search(QuerySearchEntity queryRequest) {
+
+ List<SearchSuggestion> suggestionEntityList = new ArrayList<SearchSuggestion>();
+
+ /*
+ * Based on the configured stop words, we need to strip any matched stop-words ( case
+ * insensitively ) from the query string, before hitting elastic to prevent the words from being
+ * used against the elastic view-and-inspect index. Another alternative to this approach would
+ * be to define stop words on the elastic search index configuration for the
+ * entity-search-index, but but that may be more complicated / more risky than just a simple bug
+ * fix, but it's something we should think about for the future.
+ */
+
+ try {
+ final String queryStringWithoutStopWords =
+ stripStopWordsFromQuery(queryRequest.getQueryStr());
+
+ final String fullUrlStr = searchServiceAdapter.buildSearchServiceQueryUrl(viewInspectIndexName);
+
+ String postBody = String.format(VIUI_SEARCH_TEMPLATE, Integer.parseInt(queryRequest.getMaxResults()),
+ queryStringWithoutStopWords);
+
+ OperationResult opResult = searchServiceAdapter.doPost(fullUrlStr, postBody, "application/json");
+ if (opResult.getResultCode() == 200) {
+ suggestionEntityList =
+ generateSuggestionsForSearchResponse(opResult.getResult(), queryRequest.getQueryStr());
+ }
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR,
+ "View and inspect query failed with error = " + exc.getMessage());
+ }
+ return suggestionEntityList;
+
+
+ }
+
+ public String getAdditionalSearchSuggestionText() {
+ return additionalSearchSuggestionText;
+ }
+
+ public void setAdditionalSearchSuggestionText(String additionalSearchSuggestionText) {
+ this.additionalSearchSuggestionText = additionalSearchSuggestionText;
+ }
+
+
+
+
+ /**
+ * Builds the search response.
+ *
+ * @param operationResult The Elasticsearch query result
+ * @param queryStr The string the user typed into the search bar
+ * @return A list of search suggestions and corresponding UI filter values
+ */
+ private List<SearchSuggestion> generateSuggestionsForSearchResponse(String operationResult,
+ String queryStr) {
+
+
+ if (operationResult == null || operationResult.length() == 0) {
+ return null;
+ }
+
+ ObjectMapper mapper = new ObjectMapper();
+ JsonNode rootNode = null;
+ List<SearchSuggestion> suggestionEntityList = new ArrayList<SearchSuggestion>();
+ try {
+ rootNode = mapper.readTree(operationResult);
+
+ JsonNode hitsNode = rootNode.get(KEY_SEARCH_RESULT);
+
+
+
+ // Check if there are hits that are coming back
+ if (hitsNode.has(KEY_HITS)) {
+ ArrayNode hitsArray = (ArrayNode) hitsNode.get(KEY_HITS);
+
+ /*
+ * next we iterate over the values in the hit array elements
+ */
+
+ Iterator<JsonNode> nodeIterator = hitsArray.elements();
+ JsonNode entityNode = null;
+ CommonSearchSuggestion suggestionEntity = null;
+ JsonNode sourceNode = null;
+ while (nodeIterator.hasNext()) {
+ entityNode = nodeIterator.next();
+ sourceNode = entityNode.get(KEY_DOCUMENT).get(KEY_CONTENT);
+
+ // do the point transformation as we build the response?
+ suggestionEntity = new CommonSearchSuggestion();
+ suggestionEntity.setRoute(viewInspectSuggestionRoute);
+
+ /*
+ * This is where we probably want to annotate the search tags because we also have access
+ * to the seachTagIds
+ */
+
+ String searchTagIds = getValueFromNode(sourceNode, KEY_SEARCH_TAG_IDS);
+ String searchTags = getValueFromNode(sourceNode, KEY_SEARCH_TAGS);
+ String entityType = getValueFromNode(sourceNode, KEY_ENTITY_TYPE);
+ String link = getValueFromNode(sourceNode, KEY_LINK);
+
+ if (link != null) {
+ suggestionEntity.setHashId(NodeUtils.generateUniqueShaDigest(link));
+ }
+
+ try {
+ suggestionEntity
+ .setText(annotateSearchTags(searchTags, searchTagIds, entityType, queryStr));
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(),
+ exc.getLocalizedMessage());
+ // at least send back the un-annotated search tags
+ suggestionEntity.setText(searchTags);
+ }
+
+ if ( getAdditionalSearchSuggestionText() != null ) {
+ String suggestionText = suggestionEntity.getText() ;
+ suggestionText += SparkyConstants.SUGGESTION_TEXT_SEPARATOR
+ + getAdditionalSearchSuggestionText();
+ suggestionEntity.setText(suggestionText);
+ }
+
+ if (searchTags != null) {
+ suggestionEntityList.add(suggestionEntity);
+ }
+
+ }
+ }
+ } catch (IOException exc) {
+ LOG.warn(AaiUiMsgs.SEARCH_RESPONSE_BUILDING_EXCEPTION, exc.getLocalizedMessage());
+ }
+ return suggestionEntityList;
+ }
+
+
+
+ /**
+ * The current format of an UI-dropdown-item is like: "search-terms entityType att1=attr1_val".
+ * Example, for pserver: search-terms pserver hostname=djmAG-72060,
+ * pserver-name2=example-pserver-name2-val-17254, pserver-id=example-pserver-id-val-17254,
+ * ipv4-oam-address=example-ipv4-oam-address-val-17254 SearchController.js parses the above
+ * format. So if you are modifying the parsing below, please update SearchController.js as well.
+ *
+ * @param searchTags the search tags
+ * @param searchTagIds the search tag ids
+ * @param entityType the entity type
+ * @param queryStr the query str
+ * @return the string
+ */
+
+ private String annotateSearchTags(String searchTags, String searchTagIds, String entityType,
+ String queryStr) {
+
+ if (searchTags == null || searchTagIds == null) {
+ String valueOfSearchTags = String.valueOf(searchTags);
+ String valueOfSearchTagIds = String.valueOf(searchTagIds);
+
+ LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, "See error",
+ "Search tags = " + valueOfSearchTags + " and Seach tag IDs = " + valueOfSearchTagIds);
+ return searchTags;
+ }
+
+ if (entityType == null) {
+ LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), "EntityType is null");
+ return searchTags;
+ }
+
+ if (queryStr == null) {
+ LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(),
+ "Query string is null");
+ return searchTags;
+ }
+
+ /*
+ * The ElasticSearch analyzer has already applied the lowercase filter, so we don't have to
+ * covert them again
+ */
+ String[] searchTagsArray = searchTags.split(";");
+ String[] searchTagIdsArray = searchTagIds.split(";");
+
+ // specifically apply lower case to the the query terms to make matching
+ // simpler
+ String[] queryTerms = queryStr.toLowerCase().split(" ");
+
+ OxmEntityDescriptor desc = oxmEntityLookup.getEntityDescriptors().get(entityType);
+
+ if (desc == null) {
+ LOG.error(AaiUiMsgs.ENTITY_NOT_FOUND_IN_OXM, entityType.toString());
+ return searchTags;
+ }
+
+ String primaryKeyName = NodeUtils.concatArray(desc.getPrimaryKeyAttributeNames(), "/");
+ String primaryKeyValue = null;
+
+ /*
+ * For each used attribute, get the fieldName for the attribute index and transform the search
+ * tag into t1,t2,t3 => h1=t1, h2=t2, h3=t3;
+ */
+ StringBuilder searchTagsBuilder = new StringBuilder(128);
+ searchTagsBuilder.append(entityType);
+
+ String primaryKeyConjunctionValue = null;
+ boolean queryTermsMatchedSearchTags = false;
+
+ if (searchTagsArray.length == searchTagIdsArray.length) {
+ for (int i = 0; i < searchTagsArray.length; i++) {
+ String searchTagAttributeId = searchTagIdsArray[i];
+ String searchTagAttributeValue = searchTagsArray[i];
+
+ // Find the concat conjunction
+ Map<String, String> pairConjunctionList = suggestionConfig.getPairingList();
+
+ String suggConjunction = null;
+ if (pairConjunctionList.get(searchTagAttributeId) != null) {
+ suggConjunction = pairConjunctionList.get(searchTagAttributeId);
+ } else {
+ suggConjunction = suggestionConfig.getDefaultPairingValue();
+ }
+
+ if (primaryKeyName.equals(searchTagAttributeId)) {
+ primaryKeyValue = searchTagAttributeValue;
+ primaryKeyConjunctionValue = suggConjunction;
+ }
+
+ if (queryTermsMatchSearchTag(queryTerms, searchTagAttributeValue)) {
+ searchTagsBuilder.append(" " + suggConjunction + " " + searchTagAttributeValue);
+ queryTermsMatchedSearchTags = true;
+ }
+ }
+ } else {
+ String errorMessage =
+ "Search tags length did not match search tag ID length for entity type " + entityType;
+ LOG.error(AaiUiMsgs.ENTITY_SYNC_SEARCH_TAG_ANNOTATION_FAILED, errorMessage);
+ }
+
+
+
+ /*
+ * if none of the user query terms matched the index entity search tags then we should still tag
+ * the matched entity with a conjunction set to at least it's entity primary key value to
+ * discriminate between the entities of the same type in the search results displayed in the UI
+ * search bar results
+ */
+
+ if (!queryTermsMatchedSearchTags) {
+
+ if (primaryKeyValue != null && primaryKeyConjunctionValue != null) {
+ searchTagsBuilder.append(" " + primaryKeyConjunctionValue + " " + primaryKeyValue);
+ } else {
+ LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, "See error",
+ "Could not annotate user query terms " + queryStr
+ + " from available entity search tags = " + searchTags);
+ return searchTags;
+ }
+
+ }
+
+ return searchTagsBuilder.toString();
+
+ }
+
+ /**
+ * Query terms match search tag.
+ *
+ * @param queryTerms the query terms
+ * @param searchTag the search tag
+ * @return true, if successful @return.
+ */
+ private boolean queryTermsMatchSearchTag(String[] queryTerms, String searchTag) {
+
+ if (queryTerms == null || queryTerms.length == 0 || searchTag == null) {
+ return false;
+ }
+
+ for (String queryTerm : queryTerms) {
+ if (searchTag.toLowerCase().contains(queryTerm.toLowerCase())) {
+ return true;
+ }
+ }
+
+ return false;
+
+ }
+
+ /**
+ * Gets the value from node.
+ *
+ * @param node the node
+ * @param fieldName the field name
+ * @return the value from node
+ */
+ private String getValueFromNode(JsonNode node, String fieldName) {
+
+ if (node == null || fieldName == null) {
+ return null;
+ }
+
+ JsonNode valueNode = node.get(fieldName);
+
+ if (valueNode != null) {
+ return valueNode.asText();
+ }
+
+ return null;
+
+ }
+
+ private static final String VIUI_SEARCH_TEMPLATE =
+ "{ " + "\"results-start\": 0," + "\"results-size\": %d," + "\"queries\": [{" + "\"must\": {"
+ + "\"match\": {" + "\"field\": \"entityType searchTags crossEntityReferenceValues\","
+ + "\"value\": \"%s\"," + "\"operator\": \"and\", "
+ + "\"analyzer\": \"whitespace_analyzer\"" + "}" + "}" + "}]" + "}";
+
+ //private SuggestionConfig suggestionConfig = null;
+
+ /**
+ * @param queryStr - space separate query search terms
+ * @return - query string with stop-words removed
+ */
+ private String stripStopWordsFromQuery(String queryStr) {
+
+ if (queryStr == null) {
+ return queryStr;
+ }
+
+ Collection<String> stopWords = suggestionConfig.getStopWords();
+ ArrayList<String> queryTerms =
+ new ArrayList<String>(Arrays.asList(queryStr.toLowerCase().split(" ")));
+
+ queryTerms.removeAll(stopWords);
+
+ return String.join(" ", queryTerms);
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseGizmoVisualizationContext.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseGizmoVisualizationContext.java
new file mode 100644
index 0000000..d0cabfe
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseGizmoVisualizationContext.java
@@ -0,0 +1,990 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.services;
+
+import static java.util.concurrent.CompletableFuture.supplyAsync;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.dal.GizmoAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.entity.SearchableEntity;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs;
+import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode;
+import org.onap.aai.sparky.viewandinspect.entity.GizmoEntity;
+import org.onap.aai.sparky.viewandinspect.entity.GizmoRelationshipEntity;
+import org.onap.aai.sparky.viewandinspect.entity.GizmoRelationshipHint;
+import org.onap.aai.sparky.viewandinspect.entity.InlineMessage;
+import org.onap.aai.sparky.viewandinspect.entity.NodeProcessingTransaction;
+import org.onap.aai.sparky.viewandinspect.entity.QueryParams;
+import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingAction;
+import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState;
+import org.onap.aai.sparky.viewandinspect.task.PerformGizmoNodeSelfLinkProcessingTask;
+
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.PropertyNamingStrategy;
+
+/**
+ * The Class SelfLinkNodeCollector.
+ */
+public class BaseGizmoVisualizationContext implements VisualizationContext {
+
+ private static final int MAX_DEPTH_EVALUATION_ATTEMPTS = 100;
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(BaseGizmoVisualizationContext.class);
+
+ private final GizmoAdapter gizmoAdapter;
+
+ private AtomicInteger numLinksDiscovered;
+ private AtomicInteger numSuccessfulLinkResolveFromCache;
+ private AtomicInteger numSuccessfulLinkResolveFromFromServer;
+ private AtomicInteger numFailedLinkResolve;
+ private AtomicInteger aaiWorkOnHand;
+
+ private VisualizationConfigs visualizationConfigs;
+
+ private AtomicInteger totalLinksRetrieved;
+
+ private final long contextId;
+ private final String contextIdStr;
+ private long lastProcessStatesSummaryLogInMs = -1;
+
+
+ private ObjectMapper mapper;
+ private InlineMessage inlineMessage = null;
+
+ private ExecutorService graphExecutorService;
+ private OxmEntityLookup oxmEntityLookup;
+ private boolean rootNodeFound;
+
+ /*
+ * The node cache is intended to be a flat structure indexed by a primary key to avoid needlessly
+ * re-requesting the same self-links over-and-over again, to speed up the overall render time and
+ * more importantly to reduce the network cost of determining information we already have.
+ */
+ private ConcurrentHashMap<String, ActiveInventoryNode> nodeCache;
+
+ /**
+ * Instantiates a new self link node collector.
+ *
+ * @param loader the loader
+ * @throws Exception the exception
+ */
+ public BaseGizmoVisualizationContext(long contextId, GizmoAdapter gizmoAdapter,
+ ExecutorService graphExecutorService, VisualizationConfigs visualizationConfigs,
+ OxmEntityLookup oxmEntityLookup) throws Exception {
+
+ this.contextId = contextId;
+ this.contextIdStr = "[Context-Id=" + contextId + "]";
+ this.gizmoAdapter = gizmoAdapter;
+ this.graphExecutorService = graphExecutorService;
+ this.visualizationConfigs = visualizationConfigs;
+ this.oxmEntityLookup = oxmEntityLookup;
+
+ this.nodeCache = new ConcurrentHashMap<String, ActiveInventoryNode>();
+ this.numLinksDiscovered = new AtomicInteger(0);
+ this.totalLinksRetrieved = new AtomicInteger(0);
+ this.numSuccessfulLinkResolveFromCache = new AtomicInteger(0);
+ this.numSuccessfulLinkResolveFromFromServer = new AtomicInteger(0);
+ this.numFailedLinkResolve = new AtomicInteger(0);
+ this.aaiWorkOnHand = new AtomicInteger(0);
+
+ this.mapper = new ObjectMapper();
+ mapper.setSerializationInclusion(Include.NON_EMPTY);
+ mapper.setPropertyNamingStrategy(new PropertyNamingStrategy.KebabCaseStrategy());
+ this.rootNodeFound = false;
+ }
+
+ protected boolean isRootNodeFound() {
+ return rootNodeFound;
+ }
+
+ protected void setRootNodeFound(boolean rootNodeFound) {
+ this.rootNodeFound = rootNodeFound;
+ }
+
+ public long getContextId() {
+ return contextId;
+ }
+
+ public GizmoAdapter getGizmoAdapter() {
+ return gizmoAdapter;
+ }
+
+ /**
+ * Process self link response.
+ *
+ * @param nodeId the node id
+ */
+ private void processSelfLinkResponse(String nodeId) {
+
+ if (nodeId == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR,
+ "Cannot process self link" + " response because nodeId is null");
+ return;
+ }
+
+ ActiveInventoryNode ain = nodeCache.get(nodeId);
+
+ if (ain == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR,
+ "Cannot process self link response" + " because can't find node for id = " + nodeId);
+ return;
+ }
+
+ GizmoEntity gizmoEntity = null;
+
+ try {
+ gizmoEntity = mapper.readValue(ain.getOpResult().getResult(), GizmoEntity.class);
+ } catch (Exception exc) {
+ exc.printStackTrace();
+ LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to marshal json"
+ + " response str into JsonNode with error, " + exc.getLocalizedMessage());
+ ain.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR);
+ return;
+ }
+
+ if (gizmoEntity == null) {
+
+ LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR,
+ "Failed to parse json node str." + " Parse resulted a null value.");
+ ain.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR);
+ return;
+ }
+
+ /*
+ * Now that we have the gizmo entity we can populate the AIN node with it, as well as the
+ * relationships
+ */
+
+ ain.setEntityType(gizmoEntity.getType());
+
+ ain.setPrimaryKeyName(getEntityTypePrimaryKeyName(gizmoEntity.getType()));
+
+ OxmEntityDescriptor descriptor = oxmEntityLookup.getEntityDescriptors().get(gizmoEntity);
+
+ if (descriptor != null) {
+ ain.setPrimaryKeyValue(getPrimaryKeyValues(gizmoEntity.getProperties(),
+ descriptor.getPrimaryKeyAttributeNames()));
+ } else {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "Could not determine oxm descriptor for entity type = " + gizmoEntity.getType());
+ }
+
+ gizmoEntity.getProperties().forEach((key, value) -> {
+ ain.getProperties().put(key, value);
+ });
+
+ // add edit attributes link
+ if (ain.getSelfLink() != null) {
+ ain.addProperty(SparkyConstants.URI_ATTR_NAME, ain.getSelfLink());
+ }
+
+
+
+ /*
+ * Only discover neighbors if our depth is less than the Max-Traversal-Depth
+ */
+
+ if (ain.getNodeDepth() < this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) {
+
+ /*
+ * I think the next thing to do is:
+ *
+ * 1. Calculate the source / target node id 2. Add the nodeId to the incoming / outgoing links
+ * collection 3. Add the node to the node cache for processing
+ */
+
+ String resourceLink = null;
+ String relationshipNodeId = null;
+ ActiveInventoryNode relationshipNode = null;
+
+ for (GizmoRelationshipHint inRelationship : gizmoEntity.getIn()) {
+
+ if (inRelationship.getSource() != null) {
+
+ resourceLink = NodeUtils.extractRawGizmoPathWithoutVersion(inRelationship.getSource());
+ relationshipNodeId = NodeUtils.generateUniqueShaDigest(resourceLink);
+
+ if (!nodeCache.containsKey(relationshipNodeId)) {
+
+ relationshipNode = new ActiveInventoryNode(visualizationConfigs, oxmEntityLookup);
+ relationshipNode.setNodeId(relationshipNodeId);
+ relationshipNode.setSelfLink(resourceLink);
+ relationshipNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED,
+ NodeProcessingAction.NEW_NODE_PROCESSED);
+
+ ain.addInboundNeighbor(relationshipNodeId);
+
+ addNode(relationshipNode);
+
+ }
+ }
+
+ }
+
+ for (GizmoRelationshipHint outRelationship : gizmoEntity.getOut()) {
+
+ if (outRelationship.getTarget() != null) {
+
+ resourceLink = NodeUtils.extractRawGizmoPathWithoutVersion(outRelationship.getTarget());
+ relationshipNodeId = NodeUtils.generateUniqueShaDigest(resourceLink);
+
+ if (!nodeCache.containsKey(relationshipNodeId)) {
+
+ relationshipNode = new ActiveInventoryNode(visualizationConfigs, oxmEntityLookup);
+ relationshipNode.setNodeId(relationshipNodeId);
+ relationshipNode.setSelfLink(resourceLink);
+ relationshipNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED,
+ NodeProcessingAction.NEW_NODE_PROCESSED);
+
+ ain.addOutboundNeighbor(relationshipNodeId);
+
+ addNode(relationshipNode);
+
+ }
+ }
+
+ }
+ }
+
+ ain.changeState(NodeProcessingState.READY, NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK);
+
+ }
+
+ /**
+ * Perform self link resolve.
+ *
+ * @param nodeId the node id
+ */
+ private void performSelfLinkResolve(String nodeId) {
+
+ if (nodeId == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR,
+ "Resolve of self-link" + " has been skipped because provided nodeId is null");
+ return;
+ }
+
+ ActiveInventoryNode ain = nodeCache.get(nodeId);
+
+ if (ain == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Failed to find node with id, " + nodeId
+ + ", from node cache. Resolve self-link method has been skipped.");
+ return;
+ }
+
+ if (!ain.isSelfLinkPendingResolve()) {
+
+ ain.setSelfLinkPendingResolve(true);
+
+ // kick off async self-link resolution
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "About to process node in SELF_LINK_UNPROCESSED State, link = " + ain.getSelfLink());
+ }
+
+ numLinksDiscovered.incrementAndGet();
+
+ /*
+ * If the current node is the search target, we want to see everything the node has to offer
+ * from the self-link and not filter it to a single node.
+ */
+
+ NodeProcessingTransaction txn = new NodeProcessingTransaction();
+ txn.setProcessingNode(ain);
+ txn.setRequestParameters(null);
+ aaiWorkOnHand.incrementAndGet();
+ supplyAsync(new PerformGizmoNodeSelfLinkProcessingTask(txn, null, gizmoAdapter),
+ graphExecutorService).whenComplete((nodeTxn, error) -> {
+
+ if (error != null) {
+
+ /*
+ * an error processing the self link should probably result in the node processing
+ * state shifting to ERROR
+ */
+
+ nodeTxn.getProcessingNode().setSelflinkRetrievalFailure(true);
+
+ nodeTxn.getProcessingNode().changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.SELF_LINK_RESOLVE_ERROR);
+
+ nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false);
+
+ } else {
+
+ totalLinksRetrieved.incrementAndGet();
+
+ OperationResult opResult = nodeTxn.getOpResult();
+
+ if (opResult != null && opResult.wasSuccessful()) {
+
+ if (!opResult.wasSuccessful()) {
+ numFailedLinkResolve.incrementAndGet();
+ }
+
+ if (opResult.isFromCache()) {
+ numSuccessfulLinkResolveFromCache.incrementAndGet();
+ } else {
+ numSuccessfulLinkResolveFromFromServer.incrementAndGet();
+ }
+
+ // success path
+ nodeTxn.getProcessingNode().setOpResult(opResult);
+ nodeTxn.getProcessingNode().changeState(
+ NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED,
+ NodeProcessingAction.SELF_LINK_RESOLVE_OK);
+
+ nodeTxn.getProcessingNode().setSelfLinkProcessed(true);
+ nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false);
+
+ } else {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR,
+ "Self Link retrieval for link," + txn.getSelfLinkWithModifiers()
+ + ", failed with error code," + nodeTxn.getOpResult().getResultCode()
+ + ", and message," + nodeTxn.getOpResult().getResult());
+
+ nodeTxn.getProcessingNode().setSelflinkRetrievalFailure(true);
+ nodeTxn.getProcessingNode().setSelfLinkProcessed(true);
+
+ nodeTxn.getProcessingNode().changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.SELF_LINK_RESOLVE_ERROR);
+
+ nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false);
+
+ }
+ }
+
+ aaiWorkOnHand.decrementAndGet();
+
+ });
+
+ }
+
+ }
+
+ public GizmoRelationshipEntity getGizmoRelationshipEntity(String gizmoJsonResponse) {
+
+ GizmoRelationshipEntity gizmoRelationship = null;
+ try {
+ gizmoRelationship = mapper.readValue(gizmoJsonResponse, GizmoRelationshipEntity.class);
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "Failed to map json to GizmoRelationshipEntity. Error: " + exc.getMessage());
+ }
+
+ return gizmoRelationship;
+
+ }
+
+ public String getPrimaryKeyValues(Map<String, String> properties, List<String> pkeyNames) {
+
+ StringBuilder sb = new StringBuilder(64);
+
+ if (pkeyNames.size() > 0) {
+ String primaryKey = properties.get(pkeyNames.get(0));
+ if (primaryKey != null) {
+ sb.append(primaryKey);
+ } else {
+ // this should be a fatal error because unless we can
+ // successfully retrieve all the expected keys we'll end up
+ // with a garbage node
+ LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract" + " keyName, "
+ + pkeyNames.get(0) + ", from properties , " + properties);
+ return null;
+ }
+
+ for (int i = 1; i < pkeyNames.size(); i++) {
+
+ String kv = properties.get(pkeyNames.get(i));
+ if (kv != null) {
+ sb.append("/").append(kv);
+ } else {
+ // this should be a fatal error because unless we can
+ // successfully retrieve all the expected keys we'll end up
+ // with a garbage node
+ LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: failed to extract keyName, "
+ + pkeyNames.get(i) + ", from properties, " + properties);
+ return null;
+ }
+ }
+
+ return sb.toString();
+
+ }
+
+ return null;
+
+ }
+
+
+
+ /**
+ * Find and mark root node.
+ *
+ * @param queryParams the query params
+ * @return true, if successful
+ */
+ private void findAndMarkRootNode(QueryParams queryParams) {
+
+ if (isRootNodeFound()) {
+ return;
+ }
+
+ for (ActiveInventoryNode cacheNode : nodeCache.values()) {
+
+ if (queryParams.getSearchTargetNodeId().equals(cacheNode.getNodeId())) {
+ cacheNode.setNodeDepth(0);
+ cacheNode.setRootNode(true);
+ LOG.info(AaiUiMsgs.ROOT_NODE_DISCOVERED, queryParams.getSearchTargetNodeId());
+ setRootNodeFound(true);
+ }
+ }
+
+ }
+
+ public void addNode(ActiveInventoryNode node) {
+
+ if (node == null) {
+ return;
+ }
+
+ nodeCache.putIfAbsent(node.getNodeId(), node);
+ }
+
+ public VisualizationConfigs getVisualizationConfigs() {
+ return visualizationConfigs;
+ }
+
+ public void setVisualizationConfigs(VisualizationConfigs visualizationConfigs) {
+ this.visualizationConfigs = visualizationConfigs;
+ }
+
+ public OxmEntityLookup getOxmEntityLookup() {
+ return oxmEntityLookup;
+ }
+
+ public void setOxmEntityLookup(OxmEntityLookup oxmEntityLookup) {
+ this.oxmEntityLookup = oxmEntityLookup;
+ }
+
+ public ObjectMapper getMapper() {
+ return mapper;
+ }
+
+ public void setMapper(ObjectMapper mapper) {
+ this.mapper = mapper;
+ }
+
+ private void dumpThrottledWorkOnHandLog() {
+ dumpThrottledWorkOnHandLog(false);
+ }
+
+ private void dumpThrottledWorkOnHandLog(boolean override) {
+
+ if ((lastProcessStatesSummaryLogInMs < 0)
+ || ((System.currentTimeMillis() > (lastProcessStatesSummaryLogInMs + 5000))) || override) {
+
+ lastProcessStatesSummaryLogInMs = System.currentTimeMillis();
+
+ int numInit = 0;
+ int numReady = 0;
+ int numError = 0;
+ int numSelfLinkUnresolved = 0;
+ int numSelfLinkResponseUnprocessed = 0;
+
+ for (ActiveInventoryNode cacheNode : nodeCache.values()) {
+
+ switch (cacheNode.getState()) {
+
+ case INIT: {
+ numInit++;
+ break;
+ }
+
+ case READY: {
+ numReady++;
+ break;
+ }
+ case ERROR: {
+ numError++;
+ break;
+ }
+
+ case SELF_LINK_UNRESOLVED: {
+ numSelfLinkUnresolved++;
+ break;
+ }
+
+ case SELF_LINK_RESPONSE_UNPROCESSED: {
+ numSelfLinkResponseUnprocessed++;
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ }
+
+ LOG.info(AaiUiMsgs.INFO_GENERIC,
+ String.format(
+ "ProcessCurrentStates for ContextId=%s, [PendingTxns=%d, numInit=%d, numSelfLinkUnresolved=%d, numSelfLinkResponseUnProcessed=%d, numReady=%d, numError=%d]",
+ contextIdStr, aaiWorkOnHand.get(), numInit, numSelfLinkUnresolved, numSelfLinkResponseUnprocessed,
+ numReady, numError));
+ }
+
+ }
+
+ /**
+ * Process current node states.
+ *
+ * @param rootNodeDiscovered the root node discovered
+ */
+ private void processCurrentNodeStates(QueryParams queryParams) {
+ /*
+ * Force an evaluation of node depths before determining if we should limit state-based
+ * traversal or processing.
+ */
+
+ findAndMarkRootNode(queryParams);
+
+ verifyOutboundNeighbors();
+
+ for (ActiveInventoryNode cacheNode : nodeCache.values()) {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "processCurrentNodeState(), nid = "
+ + cacheNode.getNodeId() + " , nodeDepth = " + cacheNode.getNodeDepth());
+ }
+
+ switch (cacheNode.getState()) {
+
+ case INIT: {
+ processInitialState(cacheNode.getNodeId());
+ break;
+ }
+
+ case READY:
+ case ERROR: {
+ break;
+ }
+
+ case SELF_LINK_UNRESOLVED: {
+ performSelfLinkResolve(cacheNode.getNodeId());
+ break;
+ }
+
+ case SELF_LINK_RESPONSE_UNPROCESSED: {
+ processSelfLinkResponse(cacheNode.getNodeId());
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ }
+
+ dumpThrottledWorkOnHandLog();
+
+ }
+
+
+
+ public int getNumSuccessfulLinkResolveFromCache() {
+ return numSuccessfulLinkResolveFromCache.get();
+ }
+
+ public int getNumSuccessfulLinkResolveFromFromServer() {
+ return numSuccessfulLinkResolveFromFromServer.get();
+ }
+
+ public int getNumFailedLinkResolve() {
+ return numFailedLinkResolve.get();
+ }
+
+ public InlineMessage getInlineMessage() {
+ return inlineMessage;
+ }
+
+ public void setInlineMessage(InlineMessage inlineMessage) {
+ this.inlineMessage = inlineMessage;
+ }
+
+ public ConcurrentHashMap<String, ActiveInventoryNode> getNodeCache() {
+ return nodeCache;
+ }
+
+
+
+ /**
+ * Process initial state.
+ *
+ * @param nodeId the node id
+ */
+ private void processInitialState(String nodeId) {
+
+ if (nodeId == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_INITIAL_STATE, "Node id is null");
+ return;
+ }
+
+ ActiveInventoryNode cachedNode = nodeCache.get(nodeId);
+
+ if (cachedNode == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_INITIAL_STATE,
+ "Node cannot be" + " found for nodeId, " + nodeId);
+ return;
+ }
+
+ if (cachedNode.getSelfLink() == null) {
+
+ if (cachedNode.getNodeId() == null) {
+
+ /*
+ * if the self link is null at the INIT state, which could be valid if this node is a
+ * complex attribute group which didn't originate from a self-link, but in that situation
+ * both the node id and node key should already be set.
+ */
+
+ cachedNode.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NODE_IDENTITY_ERROR);
+
+ }
+
+ if (cachedNode.getNodeId() != null) {
+
+ /*
+ * This should be the success path branch if the self-link is not set
+ */
+
+ cachedNode.changeState(NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED,
+ NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK);
+
+ }
+
+ } else {
+
+ if (cachedNode.hasResolvedSelfLink()) {
+ LOG.error(AaiUiMsgs.INVALID_RESOLVE_STATE_DURING_INIT);
+ cachedNode.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.UNEXPECTED_STATE_TRANSITION);
+ } else {
+ cachedNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED,
+ NodeProcessingAction.SELF_LINK_SET);
+ }
+ }
+ }
+
+ /**
+ * Process skeleton node.
+ *
+ * @param skeletonNode the skeleton node
+ * @param queryParams the query params
+ */
+ private void processSearchableEntity(SearchableEntity searchTargetEntity,
+ QueryParams queryParams) {
+
+ if (searchTargetEntity == null) {
+ return;
+ }
+
+ if (searchTargetEntity.getId() == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_SKELETON_NODE, "Failed to process skeleton"
+ + " node because nodeId is null for node, " + searchTargetEntity.getLink());
+ return;
+ }
+
+ ActiveInventoryNode newNode =
+ new ActiveInventoryNode(this.visualizationConfigs, oxmEntityLookup);
+
+ newNode.setNodeId(searchTargetEntity.getId());
+
+ newNode.setNodeDepth(0);
+ newNode.setRootNode(true);
+ LOG.info(AaiUiMsgs.ROOT_NODE_DISCOVERED, queryParams.getSearchTargetNodeId());
+ setRootNodeFound(true);
+
+ newNode.setSelfLink(searchTargetEntity.getLink());
+
+ nodeCache.putIfAbsent(newNode.getNodeId(), newNode);
+ }
+
+ private int getTotalWorkOnHand() {
+
+ int numNodesWithPendingStates = 0;
+
+ if (isRootNodeFound()) {
+ evaluateNodeDepths();
+ }
+
+ for (ActiveInventoryNode n : nodeCache.values()) {
+
+ switch (n.getState()) {
+
+ case READY:
+ case ERROR: {
+ // do nothing, these are our normal
+ // exit states
+ break;
+ }
+
+ default: {
+
+ /*
+ * for all other states, there is work to be done
+ */
+ numNodesWithPendingStates++;
+ }
+
+ }
+
+ }
+
+ return (aaiWorkOnHand.get() + numNodesWithPendingStates);
+
+ }
+
+ /**
+ * Checks for out standing work.
+ *
+ * @return true, if successful
+ */
+ private void processOutstandingWork(QueryParams queryParams) {
+
+ while (getTotalWorkOnHand() > 0) {
+
+ /*
+ * Force an evaluation of node depths before determining if we should limit state-based
+ * traversal or processing.
+ */
+
+ processCurrentNodeStates(queryParams);
+
+ try {
+ Thread.sleep(10);
+ } catch (InterruptedException exc) {
+ LOG.error(AaiUiMsgs.PROCESSING_LOOP_INTERUPTED, exc.getMessage());
+ return;
+ }
+
+ }
+
+ dumpThrottledWorkOnHandLog(true);
+
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ * org.onap.aai.sparky.viewandinspect.services.VisualizationContext#processSelfLinks(org.onap.aai.
+ * sparky.sync.entity.SearchableEntity, org.onap.aai.sparky.viewandinspect.entity.QueryParams)
+ */
+ @Override
+ public void processSelfLinks(SearchableEntity searchtargetEntity, QueryParams queryParams) {
+
+ try {
+
+
+ if (searchtargetEntity == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR,
+ contextIdStr + " - Failed to" + " processSelfLinks, searchtargetEntity is null");
+ return;
+ }
+
+ long startTimeInMs = System.currentTimeMillis();
+
+ processSearchableEntity(searchtargetEntity, queryParams);
+
+ /*
+ * This method is blocking until we decouple it with a CountDownLatch await condition, and
+ * make the internal graph processing more event-y.
+ */
+
+ processOutstandingWork(queryParams);
+
+ long totalResolveTime = (System.currentTimeMillis() - startTimeInMs);
+
+ long opTime = System.currentTimeMillis() - startTimeInMs;
+
+ LOG.info(AaiUiMsgs.ALL_TRANSACTIONS_RESOLVED, String.valueOf(totalResolveTime),
+ String.valueOf(totalLinksRetrieved.get()), String.valueOf(opTime));
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.VISUALIZATION_OUTPUT_ERROR, exc.getMessage());
+ }
+
+ }
+
+ /**
+ * Verify outbound neighbors.
+ */
+ private void verifyOutboundNeighbors() {
+
+ for (ActiveInventoryNode srcNode : nodeCache.values()) {
+
+ for (String targetNodeId : srcNode.getOutboundNeighbors()) {
+
+ ActiveInventoryNode targetNode = nodeCache.get(targetNodeId);
+
+ if (targetNode != null && srcNode.getNodeId() != null) {
+
+ targetNode.addInboundNeighbor(srcNode.getNodeId());
+
+ if (this.visualizationConfigs.makeAllNeighborsBidirectional()) {
+ targetNode.addOutboundNeighbor(srcNode.getNodeId());
+ }
+
+ }
+
+ }
+
+ }
+
+ }
+
+ /**
+ * Evaluate node depths.
+ */
+ private void evaluateNodeDepths() {
+
+ int numChanged = -1;
+ int numAttempts = 0;
+
+ while (numChanged != 0) {
+
+ numChanged = 0;
+ numAttempts++;
+
+ for (ActiveInventoryNode srcNode : nodeCache.values()) {
+
+ if (srcNode.getState() == NodeProcessingState.INIT) {
+
+ /*
+ * this maybe the only state that we don't want to to process the node depth on, because
+ * typically it won't have any valid fields set, and it may remain in a partial state
+ * until we have processed the self-link.
+ */
+
+ continue;
+
+ }
+
+ for (String targetNodeId : srcNode.getOutboundNeighbors()) {
+ ActiveInventoryNode targetNode = nodeCache.get(targetNodeId);
+
+ if (targetNode != null) {
+
+ if (targetNode.changeDepth(srcNode.getNodeDepth() + 1)) {
+ numChanged++;
+ }
+ }
+ }
+
+ for (String targetNodeId : srcNode.getInboundNeighbors()) {
+ ActiveInventoryNode targetNode = nodeCache.get(targetNodeId);
+
+ if (targetNode != null) {
+
+ if (targetNode.changeDepth(srcNode.getNodeDepth() + 1)) {
+ numChanged++;
+ }
+ }
+ }
+ }
+
+ if (numAttempts >= MAX_DEPTH_EVALUATION_ATTEMPTS) {
+ LOG.info(AaiUiMsgs.MAX_EVALUATION_ATTEMPTS_EXCEEDED);
+ return;
+ }
+
+ }
+
+ if (LOG.isDebugEnabled()) {
+ if (numAttempts > 0) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Evaluate node depths completed in " + numAttempts + " attempts");
+ } else {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Evaluate node depths completed in 0 attempts because all nodes at correct depth");
+ }
+ }
+
+ }
+
+
+ /**
+ * Gets the entity type primary key name.
+ *
+ * @param entityType the entity type
+ * @return the entity type primary key name
+ */
+
+
+ private String getEntityTypePrimaryKeyName(String entityType) {
+
+ if (entityType == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE,
+ "node primary key" + " name because entity type is null");
+ return null;
+ }
+
+ OxmEntityDescriptor descriptor = oxmEntityLookup.getEntityDescriptors().get(entityType);
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE,
+ "oxm entity" + " descriptor for entityType = " + entityType);
+ return null;
+ }
+
+ List<String> pkeyNames = descriptor.getPrimaryKeyAttributeNames();
+
+ if (pkeyNames == null || pkeyNames.size() == 0) {
+ LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE,
+ "node primary" + " key because descriptor primary key names is empty");
+ return null;
+ }
+
+ return NodeUtils.concatArray(pkeyNames, "/");
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationContext.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationContext.java
new file mode 100644
index 0000000..b2bdb43
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationContext.java
@@ -0,0 +1,1631 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.services;
+
+import static java.util.concurrent.CompletableFuture.supplyAsync;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.http.client.utils.URIBuilder;
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.entity.SearchableEntity;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.viewandinspect.config.SparkyConstants;
+import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs;
+import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode;
+import org.onap.aai.sparky.viewandinspect.entity.InlineMessage;
+import org.onap.aai.sparky.viewandinspect.entity.NodeProcessingTransaction;
+import org.onap.aai.sparky.viewandinspect.entity.QueryParams;
+import org.onap.aai.sparky.viewandinspect.entity.Relationship;
+import org.onap.aai.sparky.viewandinspect.entity.RelationshipData;
+import org.onap.aai.sparky.viewandinspect.entity.RelationshipList;
+import org.onap.aai.sparky.viewandinspect.entity.SelfLinkDeterminationTransaction;
+import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingAction;
+import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState;
+import org.onap.aai.sparky.viewandinspect.task.PerformNodeSelfLinkProcessingTask;
+import org.onap.aai.sparky.viewandinspect.task.PerformSelfLinkDeterminationTask;
+
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.PropertyNamingStrategy;
+
+/**
+ * The Class SelfLinkNodeCollector.
+ */
+public class BaseVisualizationContext implements VisualizationContext {
+
+ private static final int MAX_DEPTH_EVALUATION_ATTEMPTS = 100;
+ private static final String DEPTH_ALL_MODIFIER = "?depth=all";
+ private static final String NODES_ONLY_MODIFIER = "?nodes-only";
+ private static final String SERVICE_INSTANCE = "service-instance";
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(
+ BaseVisualizationContext.class);
+ private final ActiveInventoryAdapter aaiAdapter;
+
+ private int maxSelfLinkTraversalDepth;
+ private AtomicInteger numLinksDiscovered;
+ private AtomicInteger numSuccessfulLinkResolveFromCache;
+ private AtomicInteger numSuccessfulLinkResolveFromFromServer;
+ private AtomicInteger numFailedLinkResolve;
+ private AtomicInteger aaiWorkOnHand;
+
+ private VisualizationConfigs visualizationConfigs;
+
+ private AtomicInteger totalLinksRetrieved;
+
+ private final long contextId;
+ private final String contextIdStr;
+
+ private ObjectMapper mapper;
+ private InlineMessage inlineMessage = null;
+
+ private ExecutorService aaiExecutorService;
+ private OxmEntityLookup oxmEntityLookup;
+ private boolean rootNodeFound;
+
+ /*
+ * The node cache is intended to be a flat structure indexed by a primary key to avoid needlessly
+ * re-requesting the same self-links over-and-over again, to speed up the overall render time and
+ * more importantly to reduce the network cost of determining information we already have.
+ */
+ private ConcurrentHashMap<String, ActiveInventoryNode> nodeCache;
+
+ /**
+ * Instantiates a new self link node collector.
+ *
+ * @param loader the loader
+ * @throws Exception the exception
+ */
+ public BaseVisualizationContext(long contextId, ActiveInventoryAdapter aaiAdapter,
+ ExecutorService aaiExecutorService, VisualizationConfigs visualizationConfigs,
+ OxmEntityLookup oxmEntityLookup)
+ throws Exception {
+
+ this.contextId = contextId;
+ this.contextIdStr = "[Context-Id=" + contextId + "]";
+ this.aaiAdapter = aaiAdapter;
+ this.aaiExecutorService = aaiExecutorService;
+ this.visualizationConfigs = visualizationConfigs;
+ this.oxmEntityLookup = oxmEntityLookup;
+
+ this.nodeCache = new ConcurrentHashMap<String, ActiveInventoryNode>();
+ this.numLinksDiscovered = new AtomicInteger(0);
+ this.totalLinksRetrieved = new AtomicInteger(0);
+ this.numSuccessfulLinkResolveFromCache = new AtomicInteger(0);
+ this.numSuccessfulLinkResolveFromFromServer = new AtomicInteger(0);
+ this.numFailedLinkResolve = new AtomicInteger(0);
+ this.aaiWorkOnHand = new AtomicInteger(0);
+
+ this.maxSelfLinkTraversalDepth = this.visualizationConfigs.getMaxSelfLinkTraversalDepth();
+
+ this.mapper = new ObjectMapper();
+ mapper.setSerializationInclusion(Include.NON_EMPTY);
+ mapper.setPropertyNamingStrategy(new PropertyNamingStrategy.KebabCaseStrategy());
+ this.rootNodeFound = false;
+ }
+
+ protected boolean isRootNodeFound() {
+ return rootNodeFound;
+ }
+
+ protected void setRootNodeFound(boolean rootNodeFound) {
+ this.rootNodeFound = rootNodeFound;
+ }
+
+ public long getContextId() {
+ return contextId;
+ }
+
+ /**
+ * A utility method for extracting all entity-type primary key values from a provided self-link
+ * and return a set of generic-query API keys.
+ *
+ * @param parentEntityType
+ * @param link
+ * @return a list of key values that can be used for this entity with the AAI generic-query API
+ */
+ protected List<String> extractQueryParamsFromSelfLink(String link) {
+
+ List<String> queryParams = new ArrayList<String>();
+
+ if (link == null) {
+ LOG.error(AaiUiMsgs.QUERY_PARAM_EXTRACTION_ERROR, "self link is null");
+ return queryParams;
+ }
+
+ Map<String, OxmEntityDescriptor> entityDescriptors = oxmEntityLookup.getEntityDescriptors();
+
+ try {
+
+ URIBuilder urlBuilder = new URIBuilder(link);
+ String urlPath = urlBuilder.getPath();
+
+ OxmEntityDescriptor descriptor = null;
+ String[] urlPathElements = urlPath.split("/");
+ List<String> primaryKeyNames = null;
+ int index = 0;
+ String entityType = null;
+
+ while (index < urlPathElements.length) {
+
+ descriptor = entityDescriptors.get(urlPathElements[index]);
+
+ if (descriptor != null) {
+ entityType = urlPathElements[index];
+ primaryKeyNames = descriptor.getPrimaryKeyAttributeNames();
+
+ /*
+ * Make sure from what ever index we matched the parent entity-type on that we can extract
+ * additional path elements for the primary key values.
+ */
+
+ if (index + primaryKeyNames.size() < urlPathElements.length) {
+
+ for (String primaryKeyName : primaryKeyNames) {
+ index++;
+ queryParams.add(entityType + "." + primaryKeyName + ":" + urlPathElements[index]);
+ }
+ } else {
+ LOG.error(AaiUiMsgs.QUERY_PARAM_EXTRACTION_ERROR,
+ "Could not extract query parametrs for entity-type = '" + entityType
+ + "' from self-link = " + link);
+ }
+ }
+
+ index++;
+ }
+
+ } catch (URISyntaxException exc) {
+
+ LOG.error(AaiUiMsgs.QUERY_PARAM_EXTRACTION_ERROR,
+ "Error extracting query parameters from self-link = " + link + ". Error = "
+ + exc.getMessage());
+ }
+
+ return queryParams;
+
+ }
+
+ /**
+ * Decode complex attribute group.
+ *
+ * @param ain the ain
+ * @param attributeGroup the attribute group
+ * @return boolean indicating whether operation was successful (true), / failure(false).
+ */
+ public boolean decodeComplexAttributeGroup(ActiveInventoryNode ain, JsonNode attributeGroup) {
+
+ try {
+
+ Iterator<Entry<String, JsonNode>> entityArrays = attributeGroup.fields();
+ Entry<String, JsonNode> entityArray = null;
+
+ if (entityArrays == null) {
+ LOG.error(AaiUiMsgs.ATTRIBUTE_GROUP_FAILURE, attributeGroup.toString());
+ ain.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR);
+ return false;
+ }
+
+ while (entityArrays.hasNext()) {
+
+ entityArray = entityArrays.next();
+
+ String entityType = entityArray.getKey();
+ JsonNode entityArrayObject = entityArray.getValue();
+
+ if (entityArrayObject.isArray()) {
+
+ Iterator<JsonNode> entityCollection = entityArrayObject.elements();
+ JsonNode entity = null;
+ while (entityCollection.hasNext()) {
+ entity = entityCollection.next();
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "decodeComplexAttributeGroup(),"
+ + " entity = " + entity.toString());
+ }
+
+ /**
+ * Here's what we are going to do:
+ *
+ * <li>In the ActiveInventoryNode, on construction maintain a collection of queryParams
+ * that is added to for the purpose of discovering parent->child hierarchies.
+ *
+ * <li>When we hit this block of the code then we'll use the queryParams to feed the
+ * generic query to resolve the self-link asynchronously.
+ *
+ * <li>Upon successful link determination, then and only then will we create a new node
+ * in the nodeCache and process the child
+ *
+ */
+
+ ActiveInventoryNode newNode = new ActiveInventoryNode(this.visualizationConfigs, oxmEntityLookup);
+ newNode.setEntityType(entityType);
+
+ /*
+ * This is partially a lie because we actually don't have a self-link for complex nodes
+ * discovered in this way.
+ */
+ newNode.setSelfLinkProcessed(true);
+ newNode.changeState(NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED,
+ NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_OK);
+
+ /*
+ * copy parent query params into new child
+ */
+
+ if (SERVICE_INSTANCE.equals(entityType)) {
+
+ /*
+ * 1707 AAI has an issue being tracked with AAI-8932 where the generic-query cannot be
+ * resolved if all the service-instance path keys are provided. The query only works
+ * if only the service-instance key and valude are passed due to a historical reason.
+ * A fix is being worked on for 1707, and when it becomes available we can revert this
+ * small change.
+ */
+
+ newNode.clearQueryParams();
+
+ } else {
+
+ /*
+ * For all other entity-types we want to copy the parent query parameters into the new node
+ * query parameters.
+ */
+
+ for (String queryParam : ain.getQueryParams()) {
+ newNode.addQueryParam(queryParam);
+ }
+
+ }
+
+
+ if (!addComplexGroupToNode(newNode, entity)) {
+ LOG.error(AaiUiMsgs.ATTRIBUTE_GROUP_FAILURE, "Failed to add child to parent for child = " + entity.toString());
+ }
+
+ if (!addNodeQueryParams(newNode)) {
+ LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE_NODE_ID, "Error determining node id and key for node = " + newNode.dumpNodeTree(true)
+ + " skipping relationship processing");
+ newNode.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.NODE_IDENTITY_ERROR);
+ return false;
+ } else {
+
+ newNode.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED,
+ NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_OK);
+
+ }
+
+
+ /*
+ * Order matters for the query params. We need to set the parent ones before the child
+ * node
+ */
+
+ String selfLinkQuery =
+ aaiAdapter.getGenericQueryForSelfLink(entityType, newNode.getQueryParams());
+
+ /**
+ * <li>get the self-link
+ * <li>add it to the new node
+ * <li>generate node id
+ * <li>add node to node cache
+ * <li>add node id to parent outbound links list
+ * <li>process node children (should be automatic) (but don't query and resolve
+ * self-link as we already have all the data)
+ */
+
+ SelfLinkDeterminationTransaction txn = new SelfLinkDeterminationTransaction();
+
+ txn.setQueryString(selfLinkQuery);
+ txn.setNewNode(newNode);
+ txn.setParentNodeId(ain.getNodeId());
+ aaiWorkOnHand.incrementAndGet();
+ supplyAsync(new PerformSelfLinkDeterminationTask(txn, null, aaiAdapter),
+ aaiExecutorService).whenComplete((nodeTxn, error) -> {
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_DETERMINATION_FAILED_GENERIC, selfLinkQuery);
+ } else {
+
+ OperationResult opResult = nodeTxn.getOpResult();
+
+ ActiveInventoryNode newChildNode = txn.getNewNode();
+
+ if (opResult != null && opResult.wasSuccessful()) {
+
+ if (!opResult.wasSuccessful()) {
+ numFailedLinkResolve.incrementAndGet();
+ }
+
+ if (opResult.isFromCache()) {
+ numSuccessfulLinkResolveFromCache.incrementAndGet();
+ } else {
+ numSuccessfulLinkResolveFromFromServer.incrementAndGet();
+ }
+
+ /*
+ * extract the self-link from the operational result.
+ */
+
+ Collection<JsonNode> entityLinks = new ArrayList<JsonNode>();
+ JsonNode genericQueryResult = null;
+ try {
+ genericQueryResult =
+ NodeUtils.convertJsonStrToJsonNode(nodeTxn.getOpResult().getResult());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, JsonNode.class.toString(), exc.getMessage());
+ }
+
+ NodeUtils.extractObjectsByKey(genericQueryResult, "resource-link",
+ entityLinks);
+
+ String selfLink = null;
+
+ if (entityLinks.size() != 1) {
+
+ LOG.error(AaiUiMsgs.SELF_LINK_DETERMINATION_FAILED_UNEXPECTED_LINKS, String.valueOf(entityLinks.size()));
+
+ } else {
+ selfLink = ((JsonNode) entityLinks.toArray()[0]).asText();
+ selfLink = ActiveInventoryAdapter.extractResourcePath(selfLink);
+
+ newChildNode.setSelfLink(selfLink);
+ newChildNode.setNodeId(NodeUtils.generateUniqueShaDigest(selfLink));
+
+ String uri = NodeUtils.calculateEditAttributeUri(selfLink);
+ if (uri != null) {
+ newChildNode.addProperty(SparkyConstants.URI_ATTR_NAME, uri);
+ }
+
+ ActiveInventoryNode parent = nodeCache.get(txn.getParentNodeId());
+
+ if (parent != null) {
+ parent.addOutboundNeighbor(newChildNode.getNodeId());
+ newChildNode.addInboundNeighbor(parent.getNodeId());
+ }
+
+ newChildNode.setSelfLinkPendingResolve(false);
+ newChildNode.setSelfLinkProcessed(true);
+ newChildNode.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED,
+ NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK);
+
+ nodeCache.putIfAbsent(newChildNode.getNodeId(), newChildNode);
+
+ }
+
+ } else {
+ LOG.error(AaiUiMsgs.SELF_LINK_RETRIEVAL_FAILED, txn.getQueryString(),
+ String.valueOf(nodeTxn.getOpResult().getResultCode()), nodeTxn.getOpResult().getResult());
+ newChildNode.setSelflinkRetrievalFailure(true);
+ newChildNode.setSelfLinkProcessed(true);
+ newChildNode.setSelfLinkPendingResolve(false);
+
+ newChildNode.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.SELF_LINK_DETERMINATION_ERROR);
+
+ }
+
+ }
+
+ aaiWorkOnHand.decrementAndGet();
+
+ });
+
+ }
+
+ return true;
+
+ } else {
+ LOG.error(AaiUiMsgs.UNHANDLED_OBJ_TYPE_FOR_ENTITY_TYPE, entityType);
+ }
+
+ }
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Exception caught while"
+ + " decoding complex attribute group - " + exc.getMessage());
+ }
+
+ return false;
+
+ }
+
+ /**
+ * Process self link response.
+ *
+ * @param nodeId the node id
+ */
+ private void processSelfLinkResponse(String nodeId) {
+
+ if (nodeId == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Cannot process self link"
+ + " response because nodeId is null");
+ return;
+ }
+
+ ActiveInventoryNode ain = nodeCache.get(nodeId);
+
+ if (ain == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Cannot process self link response"
+ + " because can't find node for id = " + nodeId);
+ return;
+ }
+
+ JsonNode jsonNode = null;
+
+ try {
+ jsonNode = mapper.readValue(ain.getOpResult().getResult(), JsonNode.class);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to marshal json"
+ + " response str into JsonNode with error, " + exc.getLocalizedMessage());
+ ain.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR);
+ return;
+ }
+
+ if (jsonNode == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to parse json node str."
+ + " Parse resulted a null value.");
+ ain.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR);
+ return;
+ }
+
+ Iterator<Entry<String, JsonNode>> fieldNames = jsonNode.fields();
+ Entry<String, JsonNode> field = null;
+
+ RelationshipList relationshipList = null;
+
+ while (fieldNames.hasNext()) {
+
+ field = fieldNames.next();
+ String fieldName = field.getKey();
+
+ if ("relationship-list".equals(fieldName)) {
+
+ try {
+ relationshipList = mapper.readValue(field.getValue().toString(), RelationshipList.class);
+
+ if (relationshipList != null) {
+ ain.addRelationshipList(relationshipList);
+ }
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to parse relationship-list"
+ + " attribute. Parse resulted in error, " + exc.getLocalizedMessage());
+ ain.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR);
+ return;
+ }
+
+ } else {
+
+ JsonNode nodeValue = field.getValue();
+
+ if (nodeValue != null && nodeValue.isValueNode()) {
+
+ if (oxmEntityLookup.getEntityDescriptors().get(fieldName) == null) {
+
+ /*
+ * entity property name is not an entity, thus we can add this property name and value
+ * to our property set
+ */
+
+ ain.addProperty(fieldName, nodeValue.asText());
+
+ }
+
+ } else {
+
+ if (nodeValue.isArray()) {
+
+ if (oxmEntityLookup.getEntityDescriptors().get(fieldName) == null) {
+
+ /*
+ * entity property name is not an entity, thus we can add this property name and value
+ * to our property set
+ */
+
+ ain.addProperty(field.getKey(), nodeValue.toString());
+
+ }
+
+ } else {
+
+ ain.addComplexGroup(nodeValue);
+
+ }
+
+ }
+ }
+
+ }
+
+ String uri = NodeUtils.calculateEditAttributeUri(ain.getSelfLink());
+ if (uri != null) {
+ ain.addProperty(SparkyConstants.URI_ATTR_NAME, uri);
+ }
+
+ /*
+ * We need a special behavior for intermediate entities from the REST model
+ *
+ * Tenants are not top level entities, and when we want to visualization
+ * their children, we need to construct keys that include the parent entity query
+ * keys, the current entity type keys, and the child keys. We'll always have the
+ * current entity and children, but never the parent entity in the current (1707) REST
+ * data model.
+ *
+ * We have two possible solutions:
+ *
+ * 1) Try to use the custom-query approach to learn about the entity keys
+ * - this could be done, but it could be very expensive for large objects. When we do the first
+ * query to get a tenant, it will list all the in and out edges related to this entity,
+ * there is presently no way to filter this. But the approach could be made to work and it would be
+ * somewhat data-model driven, other than the fact that we have to first realize that the entity
+ * that is being searched for is not top-level entity. Once we have globally unique ids for resources
+ * this logic will not be needed and everything will be simpler. The only reason we are in this logic
+ * at all is to be able to calculate a url for the child entities so we can hash it to generate
+ * a globally unique id that can be safely used for the node.
+ *
+ * *2* Extract the keys from the pathed self-link.
+ * This is a bad solution and I don't like it but it will be fast for all resource types, as the
+ * information is already encoded in the URI. When we get to a point where we switch to a better
+ * globally unique entity identity model, then a lot of the code being used to calculate an entity url
+ * to in-turn generate a deterministic globally unique id will disappear.
+ *
+ *
+ * right now we have the following:
+ *
+ * - cloud-regions/cloud-region/{cloud-region-id}/{cloud-owner-id}/tenants/tenant/{tenant-id}
+ *
+ */
+
+ /*
+ * For all entity types use the self-link extraction method to be consistent. Once we have a
+ * globally unique identity mechanism for entities, this logic can be revisited.
+ */
+ ain.clearQueryParams();
+ ain.addQueryParams(extractQueryParamsFromSelfLink(ain.getSelfLink()));
+ ain.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED,
+ NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK);
+
+
+ }
+
+ /**
+ * Perform self link resolve.
+ *
+ * @param nodeId the node id
+ */
+ private void performSelfLinkResolve(String nodeId) {
+
+ if (nodeId == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Resolve of self-link"
+ + " has been skipped because provided nodeId is null");
+ return;
+ }
+
+ ActiveInventoryNode ain = nodeCache.get(nodeId);
+
+ if (ain == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Failed to find node with id, " + nodeId
+ + ", from node cache. Resolve self-link method has been skipped.");
+ return;
+ }
+
+ if (!ain.isSelfLinkPendingResolve()) {
+
+ ain.setSelfLinkPendingResolve(true);
+
+ // kick off async self-link resolution
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "About to process node in SELF_LINK_UNPROCESSED State, link = " + ain.getSelfLink());
+ }
+
+ numLinksDiscovered.incrementAndGet();
+
+ String depthModifier = DEPTH_ALL_MODIFIER;
+
+ /*
+ * If the current node is the search target, we want to see everything the node has to offer
+ * from the self-link and not filter it to a single node.
+ */
+
+ if (visualizationConfigs.getShallowEntities().contains(ain.getEntityType())
+ && !ain.isRootNode()) {
+ depthModifier = NODES_ONLY_MODIFIER;
+ }
+
+ NodeProcessingTransaction txn = new NodeProcessingTransaction();
+ txn.setProcessingNode(ain);
+ txn.setRequestParameters(depthModifier);
+ aaiWorkOnHand.incrementAndGet();
+ supplyAsync(
+ new PerformNodeSelfLinkProcessingTask(txn, depthModifier, aaiAdapter),
+ aaiExecutorService).whenComplete((nodeTxn, error) -> {
+
+ if (error != null) {
+
+ /*
+ * an error processing the self link should probably result in the node processing
+ * state shifting to ERROR
+ */
+
+ nodeTxn.getProcessingNode().setSelflinkRetrievalFailure(true);
+
+ nodeTxn.getProcessingNode().changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.SELF_LINK_RESOLVE_ERROR);
+
+ nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false);
+
+ } else {
+
+ totalLinksRetrieved.incrementAndGet();
+
+ OperationResult opResult = nodeTxn.getOpResult();
+
+ if (opResult != null && opResult.wasSuccessful()) {
+
+ if (!opResult.wasSuccessful()) {
+ numFailedLinkResolve.incrementAndGet();
+ }
+
+ if (opResult.isFromCache()) {
+ numSuccessfulLinkResolveFromCache.incrementAndGet();
+ } else {
+ numSuccessfulLinkResolveFromFromServer.incrementAndGet();
+ }
+
+ // success path
+ nodeTxn.getProcessingNode().setOpResult(opResult);
+ nodeTxn.getProcessingNode().changeState(
+ NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED,
+ NodeProcessingAction.SELF_LINK_RESOLVE_OK);
+
+ nodeTxn.getProcessingNode().setSelfLinkProcessed(true);
+ nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false);
+
+ } else {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Self Link retrieval for link,"
+ + txn.getSelfLinkWithModifiers() + ", failed with error code,"
+ + nodeTxn.getOpResult().getResultCode() + ", and message,"
+ + nodeTxn.getOpResult().getResult());
+
+ nodeTxn.getProcessingNode().setSelflinkRetrievalFailure(true);
+ nodeTxn.getProcessingNode().setSelfLinkProcessed(true);
+
+ nodeTxn.getProcessingNode().changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.SELF_LINK_RESOLVE_ERROR);
+
+ nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false);
+
+ }
+ }
+
+ aaiWorkOnHand.decrementAndGet();
+
+ });
+
+ }
+
+ }
+
+
+ /**
+ * Process neighbors.
+ *
+ * @param nodeId the node id
+ */
+ private void processNeighbors(String nodeId) {
+
+ if (nodeId == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESS_NEIGHBORS_ERROR, "Failed to process"
+ + " neighbors because nodeId is null.");
+ return;
+ }
+
+ ActiveInventoryNode ain = nodeCache.get(nodeId);
+
+ if (ain == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESS_NEIGHBORS_ERROR, "Failed to process"
+ + " neighbors because node could not be found in nodeCache with id, " + nodeId);
+ return;
+ }
+
+ /*
+ * process complex attribute and relationships
+ */
+
+ boolean neighborsProcessedSuccessfully = true;
+
+ for (JsonNode n : ain.getComplexGroups()) {
+ neighborsProcessedSuccessfully &= decodeComplexAttributeGroup(ain, n);
+ }
+
+ for (RelationshipList relationshipList : ain.getRelationshipLists()) {
+ neighborsProcessedSuccessfully &= addSelfLinkRelationshipChildren(ain, relationshipList);
+ }
+
+
+ if (neighborsProcessedSuccessfully) {
+ ain.changeState(NodeProcessingState.READY, NodeProcessingAction.NEIGHBORS_PROCESSED_OK);
+ } else {
+ ain.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR);
+ }
+
+
+ /*
+ * If neighbors fail to process, there is already a call to change the state within the
+ * relationship and neighbor processing functions.
+ */
+
+ }
+
+ /**
+ * Find and mark root node.
+ *
+ * @param queryParams the query params
+ * @return true, if successful
+ */
+ private void findAndMarkRootNode(QueryParams queryParams) {
+
+ if (isRootNodeFound()) {
+ return;
+ }
+
+ for (ActiveInventoryNode cacheNode : nodeCache.values()) {
+
+ if (queryParams.getSearchTargetNodeId().equals(cacheNode.getNodeId())) {
+ cacheNode.setNodeDepth(0);
+ cacheNode.setRootNode(true);
+ LOG.info(AaiUiMsgs.ROOT_NODE_DISCOVERED, queryParams.getSearchTargetNodeId());
+ setRootNodeFound(true);
+ }
+ }
+
+ }
+
+ /**
+ * Process current node states.
+ *
+ * @param rootNodeDiscovered the root node discovered
+ */
+ private void processCurrentNodeStates(QueryParams queryParams) {
+ /*
+ * Force an evaluation of node depths before determining if we should limit state-based
+ * traversal or processing.
+ */
+
+ findAndMarkRootNode(queryParams);
+
+ verifyOutboundNeighbors();
+
+ for (ActiveInventoryNode cacheNode : nodeCache.values()) {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "processCurrentNodeState(), nid = "
+ + cacheNode.getNodeId() + " , nodeDepth = " + cacheNode.getNodeDepth());
+ }
+
+ switch (cacheNode.getState()) {
+
+ case INIT: {
+ processInitialState(cacheNode.getNodeId());
+ break;
+ }
+
+ case READY:
+ case ERROR: {
+ break;
+ }
+
+ case SELF_LINK_UNRESOLVED: {
+ performSelfLinkResolve(cacheNode.getNodeId());
+ break;
+ }
+
+ case SELF_LINK_RESPONSE_UNPROCESSED: {
+ processSelfLinkResponse(cacheNode.getNodeId());
+ break;
+ }
+
+ case NEIGHBORS_UNPROCESSED: {
+
+ /*
+ * We use the rootNodeDiscovered flag to ignore depth retrieval thresholds until the root
+ * node is identified. Then the evaluative depth calculations should re-balance the graph
+ * around the root node.
+ */
+
+ if (!isRootNodeFound() || cacheNode.getNodeDepth() < this.visualizationConfigs
+ .getMaxSelfLinkTraversalDepth()) {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "processCurrentNodeState() -- Node at max depth,"
+ + " halting processing at current state = -- "
+ + cacheNode.getState() + " nodeId = " + cacheNode.getNodeId());
+ }
+
+ processNeighbors(cacheNode.getNodeId());
+
+ }
+
+ break;
+ }
+ default:
+ break;
+ }
+
+ }
+
+ }
+
+ /**
+ * Adds the complex group to node.
+ *
+ * @param targetNode the target node
+ * @param attributeGroup the attribute group
+ * @return true, if successful
+ */
+ private boolean addComplexGroupToNode(ActiveInventoryNode targetNode, JsonNode attributeGroup) {
+
+ if (attributeGroup == null) {
+ targetNode.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_OK);
+ return false;
+ }
+
+ RelationshipList relationshipList = null;
+
+ if (attributeGroup.isObject()) {
+
+ Iterator<Entry<String, JsonNode>> fields = attributeGroup.fields();
+ Entry<String, JsonNode> field = null;
+ String fieldName;
+ JsonNode fieldValue;
+
+ while (fields.hasNext()) {
+ field = fields.next();
+ fieldName = field.getKey();
+ fieldValue = field.getValue();
+
+ if (fieldValue.isObject()) {
+
+ if (fieldName.equals("relationship-list")) {
+
+ try {
+ relationshipList =
+ mapper.readValue(field.getValue().toString(), RelationshipList.class);
+
+ if (relationshipList != null) {
+ targetNode.addRelationshipList(relationshipList);
+ }
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to parse"
+ + " relationship-list attribute. Parse resulted in error, "
+ + exc.getLocalizedMessage());
+ targetNode.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_ERROR);
+ return false;
+ }
+
+ } else {
+ targetNode.addComplexGroup(fieldValue);
+ }
+
+ } else if (fieldValue.isArray()) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Unexpected array type with a key = " + fieldName);
+ }
+ } else if (fieldValue.isValueNode()) {
+ if (oxmEntityLookup.getEntityDescriptors().get(field.getKey()) == null) {
+ /*
+ * property key is not an entity type, add it to our property set.
+ */
+ targetNode.addProperty(field.getKey(), fieldValue.asText());
+ }
+
+ }
+ }
+
+ } else if (attributeGroup.isArray()) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Unexpected array type for attributeGroup = " + attributeGroup);
+ }
+ } else if (attributeGroup.isValueNode()) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Unexpected value type for attributeGroup = " + attributeGroup);
+ }
+ }
+
+ return true;
+ }
+
+ public int getNumSuccessfulLinkResolveFromCache() {
+ return numSuccessfulLinkResolveFromCache.get();
+ }
+
+ public int getNumSuccessfulLinkResolveFromFromServer() {
+ return numSuccessfulLinkResolveFromFromServer.get();
+ }
+
+ public int getNumFailedLinkResolve() {
+ return numFailedLinkResolve.get();
+ }
+
+ public InlineMessage getInlineMessage() {
+ return inlineMessage;
+ }
+
+ public void setInlineMessage(InlineMessage inlineMessage) {
+ this.inlineMessage = inlineMessage;
+ }
+
+ public void setMaxSelfLinkTraversalDepth(int depth) {
+ this.maxSelfLinkTraversalDepth = depth;
+ }
+
+ public int getMaxSelfLinkTraversalDepth() {
+ return this.maxSelfLinkTraversalDepth;
+ }
+
+ public ConcurrentHashMap<String, ActiveInventoryNode> getNodeCache() {
+ return nodeCache;
+ }
+
+ /**
+ * Gets the relationship primary key values.
+ *
+ * @param r the r
+ * @param entityType the entity type
+ * @param pkeyNames the pkey names
+ * @return the relationship primary key values
+ */
+ private String getRelationshipPrimaryKeyValues(Relationship r, String entityType,
+ List<String> pkeyNames) {
+
+ StringBuilder sb = new StringBuilder(64);
+
+ if (pkeyNames.size() > 0) {
+ String primaryKey = extractKeyValueFromRelationData(r, entityType + "." + pkeyNames.get(0));
+ if (primaryKey != null) {
+
+ sb.append(primaryKey);
+
+ } else {
+ // this should be a fatal error because unless we can
+ // successfully retrieve all the expected keys we'll end up
+ // with a garbage node
+ LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract"
+ + " keyName, " + entityType + "." + pkeyNames.get(0)
+ + ", from relationship data, " + r.toString());
+ return null;
+ }
+
+ for (int i = 1; i < pkeyNames.size(); i++) {
+
+ String kv = extractKeyValueFromRelationData(r, entityType + "." + pkeyNames.get(i));
+ if (kv != null) {
+ sb.append("/").append(kv);
+ } else {
+ // this should be a fatal error because unless we can
+ // successfully retrieve all the expected keys we'll end up
+ // with a garbage node
+ LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: failed to extract keyName, "
+ + entityType + "." + pkeyNames.get(i)
+ + ", from relationship data, " + r.toString());
+ return null;
+ }
+ }
+
+ return sb.toString();
+
+ }
+
+ return null;
+
+ }
+
+ /**
+ * Extract key value from relation data.
+ *
+ * @param r the r
+ * @param keyName the key name
+ * @return the string
+ */
+ private String extractKeyValueFromRelationData(Relationship r, String keyName) {
+
+ RelationshipData[] rdList = r.getRelationshipData();
+
+ for (RelationshipData relData : rdList) {
+
+ if (relData.getRelationshipKey().equals(keyName)) {
+ return relData.getRelationshipValue();
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Determine node id and key.
+ *
+ * @param ain the ain
+ * @return true, if successful
+ */
+ private boolean addNodeQueryParams(ActiveInventoryNode ain) {
+
+ if (ain == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE_NODE_ID, "ActiveInventoryNode is null");
+ return false;
+ }
+
+ List<String> pkeyNames =
+ oxmEntityLookup.getEntityDescriptors().get(ain.getEntityType()).getPrimaryKeyAttributeNames();
+
+ if (pkeyNames == null || pkeyNames.size() == 0) {
+ LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE_NODE_ID, "Primary key names is empty");
+ return false;
+ }
+
+ StringBuilder sb = new StringBuilder(64);
+
+ if (pkeyNames.size() > 0) {
+ String primaryKey = ain.getProperties().get(pkeyNames.get(0));
+ if (primaryKey != null) {
+ sb.append(primaryKey);
+ } else {
+ // this should be a fatal error because unless we can
+ // successfully retrieve all the expected keys we'll end up
+ // with a garbage node
+ LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract keyName, "
+ + pkeyNames.get(0) + ", from entity properties");
+ return false;
+ }
+
+ for (int i = 1; i < pkeyNames.size(); i++) {
+
+ String kv = ain.getProperties().get(pkeyNames.get(i));
+ if (kv != null) {
+ sb.append("/").append(kv);
+ } else {
+ // this should be a fatal error because unless we can
+ // successfully retrieve all the expected keys we'll end up
+ // with a garbage node
+ LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract keyName, "
+ + pkeyNames.get(i) + ", from entity properties");
+ return false;
+ }
+ }
+
+ /*final String nodeId = NodeUtils.generateUniqueShaDigest(ain.getEntityType(),
+ NodeUtils.concatArray(pkeyNames, "/"), sb.toString());*/
+
+ //ain.setNodeId(nodeId);
+ ain.setPrimaryKeyName(NodeUtils.concatArray(pkeyNames, "/"));
+ ain.setPrimaryKeyValue(sb.toString());
+
+ if (ain.getEntityType() != null && ain.getPrimaryKeyName() != null
+ && ain.getPrimaryKeyValue() != null) {
+ ain.addQueryParam(
+ ain.getEntityType() + "." + ain.getPrimaryKeyName() + ":" + ain.getPrimaryKeyValue());
+ }
+ return true;
+
+ }
+
+ return false;
+
+ }
+
+ /**
+ * Adds the self link relationship children.
+ *
+ * @param processingNode the processing node
+ * @param relationshipList the relationship list
+ * @return true, if successful
+ */
+ private boolean addSelfLinkRelationshipChildren(ActiveInventoryNode processingNode,
+ RelationshipList relationshipList) {
+
+ if (relationshipList == null) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "No relationships added to parent node = "
+ + processingNode.getNodeId() + " because relationshipList is empty");
+ processingNode.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR);
+ return false;
+ }
+
+ Relationship[] relationshipArray = relationshipList.getRelationshipList();
+ OxmEntityDescriptor descriptor = null;
+ String repairedSelfLink = null;
+
+ if (relationshipArray != null) {
+
+ ActiveInventoryNode newNode = null;
+ String resourcePath = null;
+
+ for (Relationship r : relationshipArray) {
+
+ resourcePath = ActiveInventoryAdapter.extractResourcePath(r.getRelatedLink());
+
+ String nodeId = NodeUtils.generateUniqueShaDigest(resourcePath);
+
+ if (nodeId == null) {
+
+ LOG.error(AaiUiMsgs.SKIPPING_RELATIONSHIP, r.toString());
+ processingNode.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.NODE_IDENTITY_ERROR);
+ return false;
+ }
+
+ newNode = new ActiveInventoryNode(this.visualizationConfigs, oxmEntityLookup);
+
+ String entityType = r.getRelatedTo();
+
+ if (r.getRelationshipData() != null) {
+ for (RelationshipData rd : r.getRelationshipData()) {
+ newNode.addQueryParam(rd.getRelationshipKey() + ":" + rd.getRelationshipValue());
+ }
+ }
+
+ descriptor = oxmEntityLookup.getEntityDescriptors().get(r.getRelatedTo());
+
+ newNode.setNodeId(nodeId);
+ newNode.setEntityType(entityType);
+ newNode.setSelfLink(resourcePath);
+
+ processingNode.addOutboundNeighbor(nodeId);
+
+ if (descriptor != null) {
+
+ List<String> pkeyNames = descriptor.getPrimaryKeyAttributeNames();
+
+ newNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED,
+ NodeProcessingAction.SELF_LINK_SET);
+
+ newNode.setPrimaryKeyName(NodeUtils.concatArray(pkeyNames, "/"));
+
+ String primaryKeyValues = getRelationshipPrimaryKeyValues(r, entityType, pkeyNames);
+ newNode.setPrimaryKeyValue(primaryKeyValues);
+
+ } else {
+
+ LOG.error(AaiUiMsgs.VISUALIZATION_OUTPUT_ERROR,
+ "Failed to parse entity because OXM descriptor could not be found for type = "
+ + r.getRelatedTo());
+
+ newNode.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR);
+
+ }
+
+ if (nodeCache.putIfAbsent(nodeId, newNode) != null) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Failed to add node to nodeCache because it already exists. Node id = "
+ + newNode.getNodeId());
+ }
+ }
+
+ }
+
+ }
+
+ return true;
+
+ }
+
+ /**
+ * Process initial state.
+ *
+ * @param nodeId the node id
+ */
+ private void processInitialState(String nodeId) {
+
+ if (nodeId == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_INITIAL_STATE, "Node id is null");
+ return;
+ }
+
+ ActiveInventoryNode cachedNode = nodeCache.get(nodeId);
+
+ if (cachedNode == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_INITIAL_STATE, "Node cannot be"
+ + " found for nodeId, " + nodeId);
+ return;
+ }
+
+ if (cachedNode.getSelfLink() == null) {
+
+ if (cachedNode.getNodeId() == null ) {
+
+ /*
+ * if the self link is null at the INIT state, which could be valid if this node is a
+ * complex attribute group which didn't originate from a self-link, but in that situation
+ * both the node id and node key should already be set.
+ */
+
+ cachedNode.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NODE_IDENTITY_ERROR);
+
+ }
+
+ if (cachedNode.getNodeId() != null) {
+
+ /*
+ * This should be the success path branch if the self-link is not set
+ */
+
+ cachedNode.changeState(NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED,
+ NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK);
+
+ }
+
+ } else {
+
+ if (cachedNode.hasResolvedSelfLink()) {
+ LOG.error(AaiUiMsgs.INVALID_RESOLVE_STATE_DURING_INIT);
+ cachedNode.changeState(NodeProcessingState.ERROR,
+ NodeProcessingAction.UNEXPECTED_STATE_TRANSITION);
+ } else {
+ cachedNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED,
+ NodeProcessingAction.SELF_LINK_SET);
+ }
+ }
+ }
+
+ /**
+ * Process skeleton node.
+ *
+ * @param skeletonNode the skeleton node
+ * @param queryParams the query params
+ */
+ private void processSearchableEntity(SearchableEntity searchTargetEntity, QueryParams queryParams) {
+
+ if (searchTargetEntity == null) {
+ return;
+ }
+
+ if (searchTargetEntity.getId() == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_SKELETON_NODE, "Failed to process skeleton"
+ + " node because nodeId is null for node, " + searchTargetEntity.getLink());
+ return;
+ }
+
+ ActiveInventoryNode newNode = new ActiveInventoryNode(this.visualizationConfigs, oxmEntityLookup);
+
+ newNode.setNodeId(searchTargetEntity.getId());
+ newNode.setEntityType(searchTargetEntity.getEntityType());
+ newNode.setPrimaryKeyName(getEntityTypePrimaryKeyName(searchTargetEntity.getEntityType()));
+ newNode.setPrimaryKeyValue(searchTargetEntity.getEntityPrimaryKeyValue());
+
+ if (newNode.getEntityType() != null && newNode.getPrimaryKeyName() != null
+ && newNode.getPrimaryKeyValue() != null) {
+ newNode.addQueryParam(
+ newNode.getEntityType() + "." + newNode.getPrimaryKeyName() + ":" + newNode.getPrimaryKeyValue());
+ }
+ /*
+ * This code may need some explanation. In any graph there will be a single root node. The root
+ * node is really the center of the universe, and for now, we are tagging the search target as
+ * the root node. Everything else in the visualization of the graph will be centered around this
+ * node as the focal point of interest.
+ *
+ * Due to it's special nature, there will only ever be one root node, and it's node depth will
+ * always be equal to zero.
+ */
+
+ if (!isRootNodeFound()) {
+ if (queryParams.getSearchTargetNodeId().equals(newNode.getNodeId())) {
+ newNode.setNodeDepth(0);
+ newNode.setRootNode(true);
+ LOG.info(AaiUiMsgs.ROOT_NODE_DISCOVERED, queryParams.getSearchTargetNodeId());
+ setRootNodeFound(true);
+ }
+ }
+
+ newNode.setSelfLink(searchTargetEntity.getLink());
+
+ nodeCache.putIfAbsent(newNode.getNodeId(), newNode);
+ }
+
+ private int getTotalWorkOnHand() {
+
+ int numNodesWithPendingStates = 0;
+
+ if( isRootNodeFound()) {
+ evaluateNodeDepths();
+ }
+
+ for (ActiveInventoryNode n : nodeCache.values()) {
+
+ switch (n.getState()) {
+
+ case READY:
+ case ERROR: {
+ // do nothing, these are our normal
+ // exit states
+ break;
+ }
+
+ case NEIGHBORS_UNPROCESSED: {
+
+ if (n.getNodeDepth() < this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) {
+ /*
+ * Only process our neighbors relationships if our current depth is less than the max
+ * depth
+ */
+ numNodesWithPendingStates++;
+ }
+
+ break;
+ }
+
+ default: {
+
+ /*
+ * for all other states, there is work to be done
+ */
+ numNodesWithPendingStates++;
+ }
+
+ }
+
+ }
+
+ LOG.debug(AaiUiMsgs.OUTSTANDING_WORK_PENDING_NODES,
+ String.valueOf(numNodesWithPendingStates));
+
+ int totalWorkOnHand = aaiWorkOnHand.get() + numNodesWithPendingStates;
+
+ return totalWorkOnHand;
+
+ }
+
+ /**
+ * Checks for out standing work.
+ *
+ * @return true, if successful
+ */
+ private void processOutstandingWork(QueryParams queryParams) {
+
+ while (getTotalWorkOnHand() > 0) {
+
+ /*
+ * Force an evaluation of node depths before determining if we should limit state-based
+ * traversal or processing.
+ */
+
+ processCurrentNodeStates(queryParams);
+
+ try {
+ Thread.sleep(10);
+ } catch (InterruptedException exc) {
+ LOG.error(AaiUiMsgs.PROCESSING_LOOP_INTERUPTED, exc.getMessage());
+ return;
+ }
+
+ }
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.onap.aai.sparky.viewandinspect.services.VisualizationContext#processSelfLinks(org.onap.aai.sparky.sync.entity.SearchableEntity, org.onap.aai.sparky.viewandinspect.entity.QueryParams)
+ */
+ @Override
+ public void processSelfLinks(SearchableEntity searchtargetEntity, QueryParams queryParams) {
+
+ try {
+
+
+ if (searchtargetEntity == null) {
+ LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, contextIdStr + " - Failed to"
+ + " processSelfLinks, searchtargetEntity is null");
+ return;
+ }
+
+ long startTimeInMs = System.currentTimeMillis();
+
+ processSearchableEntity(searchtargetEntity, queryParams);
+
+ /*
+ * This method is blocking until we decouple it with a CountDownLatch await condition,
+ * and make the internal graph processing more event-y.
+ */
+
+ processOutstandingWork(queryParams);
+
+ long totalResolveTime = (System.currentTimeMillis() - startTimeInMs);
+
+ long opTime = System.currentTimeMillis() - startTimeInMs;
+
+ LOG.info(AaiUiMsgs.ALL_TRANSACTIONS_RESOLVED, String.valueOf(totalResolveTime),
+ String.valueOf(totalLinksRetrieved.get()), String.valueOf(opTime));
+
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.VISUALIZATION_OUTPUT_ERROR, exc.getMessage());
+ }
+
+ }
+
+ /**
+ * Verify outbound neighbors.
+ */
+ private void verifyOutboundNeighbors() {
+
+ for (ActiveInventoryNode srcNode : nodeCache.values()) {
+
+ for (String targetNodeId : srcNode.getOutboundNeighbors()) {
+
+ ActiveInventoryNode targetNode = nodeCache.get(targetNodeId);
+
+ if (targetNode != null && srcNode.getNodeId() != null) {
+
+ targetNode.addInboundNeighbor(srcNode.getNodeId());
+
+ if (this.visualizationConfigs.makeAllNeighborsBidirectional()) {
+ targetNode.addOutboundNeighbor(srcNode.getNodeId());
+ }
+
+ }
+
+ }
+
+ }
+
+ }
+
+ /**
+ * Evaluate node depths.
+ */
+ private void evaluateNodeDepths() {
+
+ int numChanged = -1;
+ int numAttempts = 0;
+
+ while (numChanged != 0) {
+
+ numChanged = 0;
+ numAttempts++;
+
+ for (ActiveInventoryNode srcNode : nodeCache.values()) {
+
+ if (srcNode.getState() == NodeProcessingState.INIT) {
+
+ /*
+ * this maybe the only state that we don't want to to process the node depth on, because
+ * typically it won't have any valid fields set, and it may remain in a partial state
+ * until we have processed the self-link.
+ */
+
+ continue;
+
+ }
+
+ for (String targetNodeId : srcNode.getOutboundNeighbors()) {
+ ActiveInventoryNode targetNode = nodeCache.get(targetNodeId);
+
+ if (targetNode != null) {
+
+ if (targetNode.changeDepth(srcNode.getNodeDepth() + 1)) {
+ numChanged++;
+ }
+ }
+ }
+
+ for (String targetNodeId : srcNode.getInboundNeighbors()) {
+ ActiveInventoryNode targetNode = nodeCache.get(targetNodeId);
+
+ if (targetNode != null) {
+
+ if (targetNode.changeDepth(srcNode.getNodeDepth() + 1)) {
+ numChanged++;
+ }
+ }
+ }
+ }
+
+ if (numAttempts >= MAX_DEPTH_EVALUATION_ATTEMPTS) {
+ LOG.info(AaiUiMsgs.MAX_EVALUATION_ATTEMPTS_EXCEEDED);
+ return;
+ }
+
+ }
+
+ if (LOG.isDebugEnabled()) {
+ if (numAttempts > 0) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Evaluate node depths completed in " + numAttempts + " attempts");
+ } else {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Evaluate node depths completed in 0 attempts because all nodes at correct depth");
+ }
+ }
+
+ }
+
+
+ /**
+ * Gets the entity type primary key name.
+ *
+ * @param entityType the entity type
+ * @return the entity type primary key name
+ */
+
+
+ private String getEntityTypePrimaryKeyName(String entityType) {
+
+ if (entityType == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, "node primary key"
+ + " name because entity type is null");
+ return null;
+ }
+
+ OxmEntityDescriptor descriptor = oxmEntityLookup.getEntityDescriptors().get(entityType);
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, "oxm entity"
+ + " descriptor for entityType = " + entityType);
+ return null;
+ }
+
+ List<String> pkeyNames = descriptor.getPrimaryKeyAttributeNames();
+
+ if (pkeyNames == null || pkeyNames.size() == 0) {
+ LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, "node primary"
+ + " key because descriptor primary key names is empty");
+ return null;
+ }
+
+ return NodeUtils.concatArray(pkeyNames, "/");
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationService.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationService.java
new file mode 100644
index 0000000..b0b8b9e
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationService.java
@@ -0,0 +1,382 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.services;
+
+import java.io.IOException;
+import java.security.SecureRandom;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+
+import javax.servlet.ServletException;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.config.oxm.OxmModelLoader;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.dal.GizmoAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.subscription.config.SubscriptionConfig;
+import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.entity.SearchableEntity;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs;
+import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode;
+import org.onap.aai.sparky.viewandinspect.entity.D3VisualizationOutput;
+import org.onap.aai.sparky.viewandinspect.entity.GraphMeta;
+import org.onap.aai.sparky.viewandinspect.entity.QueryParams;
+import org.onap.aai.sparky.viewandinspect.entity.QueryRequest;
+
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class BaseVisualizationService implements VisualizationService {
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(BaseVisualizationService.class);
+
+ private ObjectMapper mapper = new ObjectMapper();
+
+ private final ActiveInventoryAdapter aaiAdapter;
+ private final GizmoAdapter gizmoAdapter;
+ private final ElasticSearchAdapter esAdapter;
+ private final ExecutorService aaiExecutorService;
+
+ private ConcurrentHashMap<Long, VisualizationContext> contextMap;
+ private final SecureRandom secureRandom;
+
+ private VisualizationConfigs visualizationConfigs;
+ private SubscriptionConfig subConfig;
+ private ElasticSearchEndpointConfig endpointEConfig;
+ private ElasticSearchSchemaConfig schemaEConfig;
+ private OxmEntityLookup oxmEntityLookup;
+
+ public BaseVisualizationService(OxmModelLoader loader, VisualizationConfigs visualizationConfigs,
+ ActiveInventoryAdapter aaiAdapter, GizmoAdapter gizmoAdapter, ElasticSearchAdapter esAdapter,
+ ElasticSearchEndpointConfig endpointConfig, ElasticSearchSchemaConfig schemaConfig,
+ int numActiveInventoryWorkers, OxmEntityLookup oxmEntityLookup, SubscriptionConfig subscriptionConfig)
+ throws Exception {
+
+ this.visualizationConfigs = visualizationConfigs;
+ this.endpointEConfig = endpointConfig;
+ this.schemaEConfig = schemaConfig;
+ this.oxmEntityLookup = oxmEntityLookup;
+ this.subConfig = subscriptionConfig;
+
+
+ secureRandom = new SecureRandom();
+
+ /*
+ * Fix constructor with properly wired in properties
+ */
+
+ this.aaiAdapter = aaiAdapter;
+ this.gizmoAdapter = gizmoAdapter;
+ this.esAdapter = esAdapter;
+
+ this.mapper = new ObjectMapper();
+ mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+
+ this.contextMap = new ConcurrentHashMap<Long, VisualizationContext>();
+
+ this.aaiExecutorService = NodeUtils.createNamedExecutor("SLNC-WORKER",
+ numActiveInventoryWorkers, LOG);
+
+ }
+
+ /**
+ * Analyze query request body.
+ *
+ * @param queryRequestJson the query request json
+ * @return the query request
+ */
+
+ public QueryRequest analyzeQueryRequestBody(String queryRequestJson) {
+
+
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "analyzeQueryRequestBody()," + " queryRequestJson = " + queryRequestJson);
+
+ ObjectMapper nonEmptyMapper = new ObjectMapper();
+ nonEmptyMapper.setSerializationInclusion(Include.NON_EMPTY);
+
+ QueryRequest queryBody = null;
+
+ try {
+ queryBody = nonEmptyMapper.readValue(queryRequestJson, QueryRequest.class);
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.EXCEPTION_CAUGHT, "Analyzing query request body.",
+ exc.getLocalizedMessage());
+ }
+
+ return queryBody;
+
+ }
+
+ /**
+ * Log optime.
+ *
+ * @param method the method
+ * @param opStartTimeInMs the op start time in ms
+ */
+ private void logOptime(String method, long opStartTimeInMs) {
+ LOG.info(AaiUiMsgs.OPERATION_TIME, method,
+ String.valueOf(System.currentTimeMillis() - opStartTimeInMs));
+ }
+
+ private SearchableEntity extractSearchableEntityFromElasticEntity(OperationResult operationResult) {
+ if (operationResult == null || !operationResult.wasSuccessful()) {
+ // error, return empty collection
+ return null;
+ }
+
+ SearchableEntity sourceEntity = null;
+ if (operationResult.wasSuccessful()) {
+
+ try {
+ JsonNode elasticValue = mapper.readValue(operationResult.getResult(), JsonNode.class);
+
+ if (elasticValue != null) {
+ JsonNode sourceField = elasticValue.get("_source");
+
+ if (sourceField != null) {
+ sourceEntity = new SearchableEntity();
+
+ String entityType = NodeUtils.extractFieldValueFromObject(sourceField, "entityType");
+ sourceEntity.setEntityType(entityType);
+ String entityPrimaryKeyValue = NodeUtils.extractFieldValueFromObject(sourceField, "entityPrimaryKeyValue");
+ sourceEntity.setEntityPrimaryKeyValue(entityPrimaryKeyValue);
+ String link = NodeUtils.extractFieldValueFromObject(sourceField, "link");
+ sourceEntity.setLink(link);
+ String lastmodTimestamp = NodeUtils.extractFieldValueFromObject(sourceField, "lastmodTimestamp");
+ sourceEntity.setEntityTimeStamp(lastmodTimestamp);
+ }
+ }
+ } catch (IOException ioe) {
+ LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, "a json node ", ioe.getLocalizedMessage());
+ }
+ }
+ return sourceEntity;
+ }
+
+ /**
+ * Builds the visualization using generic query.
+ *
+ * @param queryRequest the query request
+ * @return the operation result
+ */
+ public OperationResult buildVisualizationUsingGenericQuery(QueryRequest queryRequest) {
+
+ OperationResult returnValue = new OperationResult();
+ OperationResult dataCollectionResult = null;
+ QueryParams queryParams = null;
+ SearchableEntity sourceEntity = null;
+
+ try {
+
+ /*
+ * Here is where we need to make a dip to elastic-search for the self-link by entity-id (link
+ * hash).
+ */
+ dataCollectionResult = esAdapter.retrieveEntityById(endpointEConfig.getEsIpAddress(),
+ endpointEConfig.getEsServerPort(),schemaEConfig.getIndexName(),
+ schemaEConfig.getIndexDocType(), queryRequest.getHashId());
+ sourceEntity = extractSearchableEntityFromElasticEntity(dataCollectionResult);
+
+ if (sourceEntity != null) {
+ sourceEntity.generateId();
+ }
+
+ queryParams = new QueryParams();
+ queryParams.setSearchTargetNodeId(queryRequest.getHashId());
+
+ } catch (Exception e1) {
+ LOG.error(AaiUiMsgs.FAILED_TO_GET_NODES_QUERY_RESULT, e1.getLocalizedMessage());
+ dataCollectionResult = new OperationResult(500, "Failed to get nodes-query result from AAI");
+ }
+
+ if (dataCollectionResult.getResultCode() == 200) {
+
+ String d3OutputJsonOutput = null;
+
+ try {
+
+ d3OutputJsonOutput = getVisualizationOutputBasedonGenericQuery( sourceEntity, queryParams, queryRequest);
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Generated D3" + " output as json = " + d3OutputJsonOutput);
+ }
+
+ if (d3OutputJsonOutput != null) {
+ returnValue.setResultCode(200);
+ returnValue.setResult(d3OutputJsonOutput);
+ } else {
+ returnValue.setResult(500, "Failed to generate D3 graph visualization");
+ }
+
+ } catch (Exception exc) {
+ returnValue.setResult(500,
+ "Failed to generate D3 graph visualization, due to a servlet exception.");
+ LOG.error(AaiUiMsgs.ERROR_D3_GRAPH_VISUALIZATION, exc.getLocalizedMessage());
+ }
+ } else {
+ returnValue.setResult(dataCollectionResult.getResultCode(), dataCollectionResult.getResult());
+ }
+
+ return returnValue;
+
+ }
+
+
+ /**
+ * Gets the visualization output basedon generic query.
+ *
+ * @param searchtargetEntity entity that will be used to start visualization flow
+ * @param queryParams the query params
+ * @return the visualization output basedon generic query
+ * @throws ServletException the servlet exception
+ * @throws
+ */
+ private String getVisualizationOutputBasedonGenericQuery(SearchableEntity searchtargetEntity,
+ QueryParams queryParams, QueryRequest request) throws ServletException {
+
+ long opStartTimeInMs = System.currentTimeMillis();
+
+ VisualizationTransformer transformer = null;
+ try {
+ transformer = new VisualizationTransformer(visualizationConfigs, subConfig);
+ } catch (Exception exc) {
+ throw new ServletException(
+ "Failed to create VisualizationTransformer instance because of execption", exc);
+ }
+
+ VisualizationContext visContext = null;
+ long contextId = secureRandom.nextLong();
+ try {
+ if ( visualizationConfigs.isGizmoEnabled()) {
+ visContext = new BaseGizmoVisualizationContext(contextId, this.gizmoAdapter, aaiExecutorService,
+ this.visualizationConfigs, oxmEntityLookup);
+ } else {
+ visContext = new BaseVisualizationContext(contextId, this.aaiAdapter, aaiExecutorService,
+ this.visualizationConfigs, oxmEntityLookup);
+ }
+
+ contextMap.putIfAbsent(contextId, visContext);
+ } catch (Exception e1) {
+ LOG.error(AaiUiMsgs.EXCEPTION_CAUGHT,
+ "While building Visualization Context, " + e1.getLocalizedMessage());
+ throw new ServletException(e1);
+ }
+
+ String jsonResponse = null;
+
+ long startTimeInMs = System.currentTimeMillis();
+
+ visContext.processSelfLinks(searchtargetEntity, queryParams);
+ contextMap.remove(contextId);
+
+ logOptime("collectSelfLinkNodes()", startTimeInMs);
+
+ /*
+ * Flatten the graphs into a set of Graph and Link nodes. In this method I want the node graph
+ * resulting from the edge-tag-query to be represented first, and then we'll layer in
+ * relationship data.
+ */
+ long overlayDataStartTimeInMs = System.currentTimeMillis();
+
+ Map<String, ActiveInventoryNode> cachedNodeMap = visContext.getNodeCache();
+
+ if (LOG.isDebugEnabled()) {
+
+ StringBuilder sb = new StringBuilder(128);
+
+ sb.append("\nCached Node Map:\n");
+ for (String k : cachedNodeMap.keySet()) {
+ sb.append("\n----");
+ sb.append("\n").append(cachedNodeMap.get(k).dumpNodeTree(true));
+ }
+
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, sb.toString());
+ }
+
+ transformer.buildFlatNodeArrayFromGraphCollection(cachedNodeMap);
+ transformer.buildLinksFromGraphCollection(cachedNodeMap);
+
+ /*
+ * - Apply configuration-driven styling
+ * - Build the final transformation response object
+ * - Use information we have to populate the GraphMeta object
+ */
+
+ transformer.addSearchTargetAttributesToRootNode();
+
+ GraphMeta graphMeta = new GraphMeta();
+
+ D3VisualizationOutput output = null;
+ try {
+ output = transformer
+ .generateVisualizationOutput((System.currentTimeMillis() - opStartTimeInMs), graphMeta);
+ } catch (JsonProcessingException exc) {
+ throw new ServletException("Caught an exception while generation visualization output", exc);
+ } catch (IOException exc) {
+ LOG.error(AaiUiMsgs.FAILURE_TO_PROCESS_REQUEST, exc.getLocalizedMessage());
+ }
+
+ output.setInlineMessage(visContext.getInlineMessage());
+ output.getGraphMeta().setNumLinkResolveFailed(visContext.getNumFailedLinkResolve());
+ output.getGraphMeta().setNumLinksResolvedSuccessfullyFromCache(
+ visContext.getNumSuccessfulLinkResolveFromCache());
+ output.getGraphMeta().setNumLinksResolvedSuccessfullyFromServer(
+ visContext.getNumSuccessfulLinkResolveFromFromServer());
+
+ try {
+ jsonResponse = transformer.convertVisualizationOutputToJson(output);
+ } catch (JsonProcessingException jpe) {
+ throw new ServletException(
+ "Caught an exception while converting visualization output to json", jpe);
+ }
+
+ logOptime("[build flat node array, add relationship data, search target,"
+ + " color scheme, and generate visualization output]", overlayDataStartTimeInMs);
+
+ logOptime("doFilter()", opStartTimeInMs);
+
+ return jsonResponse;
+
+ }
+
+ public void shutdown() {
+ aaiExecutorService.shutdown();
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java
new file mode 100644
index 0000000..6d4ed88
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java
@@ -0,0 +1,55 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.services;
+
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.onap.aai.sparky.sync.entity.SearchableEntity;
+import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode;
+import org.onap.aai.sparky.viewandinspect.entity.InlineMessage;
+import org.onap.aai.sparky.viewandinspect.entity.QueryParams;
+
+
+public interface VisualizationContext {
+
+ /**
+ * Process self links.
+ *
+ * @param skeletonNode the skeleton node
+ * @param queryParams the query params
+ */
+ void processSelfLinks(SearchableEntity searchtargetEntity, QueryParams queryParams);
+
+ ConcurrentHashMap<String, ActiveInventoryNode> getNodeCache();
+
+ InlineMessage getInlineMessage();
+
+ int getNumFailedLinkResolve();
+
+ int getNumSuccessfulLinkResolveFromCache();
+
+ int getNumSuccessfulLinkResolveFromFromServer();
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java
new file mode 100644
index 0000000..c8f252c
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java
@@ -0,0 +1,52 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.services;
+
+
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.viewandinspect.entity.QueryRequest;
+
+public interface VisualizationService {
+
+ /**
+ * Analyze query request body.
+ *
+ * @param queryRequestJson the query request json
+ * @return the query request
+ */
+
+ QueryRequest analyzeQueryRequestBody(String queryRequestJson);
+
+ /**
+ * Builds the visualization using generic query.
+ *
+ * @param queryRequest the query request
+ * @return the operation result
+ */
+ OperationResult buildVisualizationUsingGenericQuery(QueryRequest queryRequest);
+
+ void shutdown();
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java
new file mode 100644
index 0000000..46a70c3
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java
@@ -0,0 +1,305 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.services;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.sparky.config.SparkyResourceLoader;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.subscription.config.SubscriptionConfig;
+import org.onap.aai.sparky.util.ConfigHelper;
+import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs;
+import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode;
+import org.onap.aai.sparky.viewandinspect.entity.D3VisualizationOutput;
+import org.onap.aai.sparky.viewandinspect.entity.GraphMeta;
+import org.onap.aai.sparky.viewandinspect.entity.NodeDebug;
+import org.onap.aai.sparky.viewandinspect.entity.SparkyGraphLink;
+import org.onap.aai.sparky.viewandinspect.entity.SparkyGraphNode;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
+
+/**
+ * The idea here is to receive a collection of graphs and then fold them together (or not) based on
+ * configuration. The first goal will be to fold all like-resources together, but the choice of
+ * folding could/should be configurable, and will simply change the degree of link based nodes when
+ * we generate the Node-Array and Link-Array output.
+ *
+ * @author DAVEA
+ *
+ */
+
+public class VisualizationTransformer {
+
+ private static final Logger LOG = LoggerFactory.getInstance().getLogger(
+ VisualizationTransformer.class);
+
+ List<SparkyGraphNode> flatNodeArray = new ArrayList<SparkyGraphNode>();
+
+ /*
+ * Maybe this isn't a string but Json-Model objects that we will convert to final string
+ * representation when we dump the node-array and link-array collections the post-data blob in the
+ * HttpServletResponse.
+ */
+
+ List<SparkyGraphLink> linkArrayOutput = new ArrayList<SparkyGraphLink>();
+
+ private VisualizationConfigs visualizationConfigs;
+ private SubscriptionConfig subConfig;
+
+ /**
+ * Instantiates a new visualization transformer.
+ *
+ * @throws Exception the exception
+ */
+ public VisualizationTransformer(VisualizationConfigs visualizationConfigs,
+ SubscriptionConfig subConfig) throws Exception {
+ this.visualizationConfigs = visualizationConfigs;
+ this.subConfig = subConfig;
+ }
+
+
+ /**
+ * Log optime.
+ *
+ * @param method the method
+ * @param startTimeInMs the start time in ms
+ */
+ private void logOptime(String method, long startTimeInMs) {
+ LOG.info(AaiUiMsgs.OPERATION_TIME, method,
+ String.valueOf((System.currentTimeMillis() - startTimeInMs)));
+ }
+
+ /**
+ * Adds the search target attributes to root node.
+ */
+ public void addSearchTargetAttributesToRootNode() {
+
+ for (SparkyGraphNode n : flatNodeArray) {
+ if (n.isRootNode()) {
+ n.getNodeMeta().setSearchTarget(true);
+ n.getNodeMeta().setClassName(this.visualizationConfigs.getSelectedSearchedNodeClassName());
+ }
+
+ }
+
+ }
+
+ /**
+ * Generate visualization output.
+ *
+ * @param preProcessingOpTimeInMs the pre processing op time in ms
+ * @param graphMeta the graph meta
+ * @return the d 3 visualization output
+ * @throws JsonProcessingException the json processing exception
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+
+ public D3VisualizationOutput generateVisualizationOutput(long preProcessingOpTimeInMs,
+ GraphMeta graphMeta) throws JsonProcessingException, IOException {
+
+ long opStartTimeInMs = System.currentTimeMillis();
+
+ /*
+ * iterate over the flat collection, and only add the graph nodes to the graph node collection
+ */
+
+ D3VisualizationOutput output = new D3VisualizationOutput();
+
+ output.setGraphMeta(graphMeta);
+
+ for (SparkyGraphNode n : flatNodeArray) {
+ if ( n.getItemType()!= null) {
+ output.pegCounter(n.getItemType());
+ }
+ }
+
+ output.addNodes(flatNodeArray);
+ output.addLinks(linkArrayOutput);
+
+ int numNodes = flatNodeArray.size();
+ int numLinks = linkArrayOutput.size();
+
+ LOG.info(AaiUiMsgs.VISUALIZATION_GRAPH_OUTPUT, String.valueOf(numNodes),
+ String.valueOf(numLinks));
+
+ if (numLinks < (numNodes - 1)) {
+ LOG.warn(AaiUiMsgs.DANGLING_NODE_WARNING, String.valueOf(numLinks),
+ String.valueOf(numNodes));
+ }
+
+ ObjectMapper mapper = new ObjectMapper();
+
+ SparkyResourceLoader resourceLoader = visualizationConfigs.getResourceLoader();
+ File aaiEntityDescriptorsFile = resourceLoader.getResourceAsFile(visualizationConfigs.getAaiEntityNodeDescriptors(), true);
+
+ if (aaiEntityDescriptorsFile != null) {
+ com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDefinitions =
+ mapper.readTree(aaiEntityDescriptorsFile);
+ graphMeta.setAaiEntityNodeDescriptors(aaiEntityNodeDefinitions);
+ } else {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, "Failed to find " + visualizationConfigs.getAaiEntityNodeDescriptors());
+ graphMeta.setAaiEntityNodeDescriptors(null);
+ }
+
+ graphMeta.setNumLinks(linkArrayOutput.size());
+ graphMeta.setNumNodes(flatNodeArray.size());
+ graphMeta.setRenderTimeInMs(preProcessingOpTimeInMs);
+
+ output.setGraphMeta(graphMeta);
+
+ logOptime("generateVisualizationOutput()", opStartTimeInMs);
+
+ return output;
+ }
+
+ /**
+ * Convert visualization output to json.
+ *
+ * @param output the output
+ * @return the string
+ * @throws JsonProcessingException the json processing exception
+ */
+ public String convertVisualizationOutputToJson(D3VisualizationOutput output)
+ throws JsonProcessingException {
+
+ if (output == null) {
+ return null;
+ }
+
+ ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
+
+ return ow.writeValueAsString(output);
+
+ }
+
+ /**
+ * Builds the links from graph collection.
+ *
+ * @param nodeMap the node map
+ */
+ public void buildLinksFromGraphCollection(Map<String, ActiveInventoryNode> nodeMap) {
+
+ for (ActiveInventoryNode ain : nodeMap.values()) {
+
+ /*
+ * This one is a little bit different, when we iterate over the collection we only want to
+ * draw the links for node that are less than the max traversal depth. We want to only draw
+ * links at a depth of n-1 because we are basing the links on the outbound neighbors from the
+ * current node.
+ */
+
+ if (ain.getNodeDepth() < this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) {
+
+ Collection<String> outboundNeighbors = ain.getOutboundNeighbors();
+
+ for (String outboundNeighbor : outboundNeighbors) {
+
+ SparkyGraphLink nodeLink = new SparkyGraphLink();
+
+ nodeLink.setId(UUID.randomUUID().toString());
+ nodeLink.setSource(ain.getNodeId());
+ nodeLink.setTarget(outboundNeighbor);
+
+ linkArrayOutput.add(nodeLink);
+
+ }
+
+ Collection<String> inboundNeighbors = ain.getInboundNeighbors();
+
+ for (String inboundNeighbor : inboundNeighbors) {
+
+ SparkyGraphLink nodeLink = new SparkyGraphLink();
+
+ nodeLink.setId(UUID.randomUUID().toString());
+ nodeLink.setSource(ain.getNodeId());
+ nodeLink.setTarget(inboundNeighbor);
+
+ linkArrayOutput.add(nodeLink);
+
+ }
+
+
+ } else {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "buildLinks(),"
+ + " Filtering node = " + ain.getNodeId() + " @ depth = "
+ + ain.getNodeDepth());
+ }
+
+ }
+ }
+
+ }
+
+ /**
+ * Builds the flat node array from graph collection.
+ *
+ * @param nodeMap the node map
+ */
+ /*
+ * Recursive function to walk multi-graph nodes and children to build a folded resource target
+ * graph.
+ */
+ public void buildFlatNodeArrayFromGraphCollection(Map<String, ActiveInventoryNode> nodeMap) {
+
+ for (ActiveInventoryNode n : nodeMap.values()) {
+
+ if (n.getNodeDepth() <= this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) {
+
+ SparkyGraphNode jsonNode = new SparkyGraphNode(n, this.visualizationConfigs, this.subConfig);
+
+ jsonNode.getNodeMeta().setClassName(this.visualizationConfigs.getGeneralNodeClassName());
+
+ if (this.visualizationConfigs.isVisualizationDebugEnabled()) {
+
+ NodeDebug nodeDebug = jsonNode.getNodeMeta().getNodeDebug();
+
+ if (nodeDebug != null) {
+ nodeDebug.setProcessingError(n.isProcessingErrorOccurred());
+ nodeDebug.setProcessingErrorCauses(n.getProcessingErrorCauses());
+ }
+ }
+ flatNodeArray.add(jsonNode);
+ } else {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
+ "Filtering node from visualization: " + n.getNodeId() + " @ depth = "
+ + n.getNodeDepth());
+ }
+ }
+ }
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformGizmoNodeSelfLinkProcessingTask.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformGizmoNodeSelfLinkProcessingTask.java
new file mode 100644
index 0000000..820b749
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformGizmoNodeSelfLinkProcessingTask.java
@@ -0,0 +1,128 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.task;
+
+import java.util.Map;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.GizmoAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.viewandinspect.entity.NodeProcessingTransaction;
+import org.slf4j.MDC;
+
+/**
+ * The Class PerformNodeSelfLinkProcessingTask.
+ */
+public class PerformGizmoNodeSelfLinkProcessingTask implements Supplier<NodeProcessingTransaction> {
+
+ private static final Logger logger =
+ LoggerFactory.getInstance().getLogger(PerformGizmoNodeSelfLinkProcessingTask.class);
+
+ private NodeProcessingTransaction txn;
+ private GizmoAdapter gizmoAdapter;
+ private Map<String, String> contextMap;
+
+ /**
+ * Instantiates a new perform node self link processing task.
+ *
+ * @param txn the txn
+ * @param aaiProvider the aai provider
+ * @param aaiConfig the aai config
+ */
+ /**
+ *
+ * @param txn
+ * @param requestParameters
+ * @param aaiProvider
+ * @param aaiConfig
+ */
+ public PerformGizmoNodeSelfLinkProcessingTask(NodeProcessingTransaction txn, String requestParameters,
+ GizmoAdapter gizmoAdapter) {
+ this.gizmoAdapter = gizmoAdapter;
+ this.txn = txn;
+ this.contextMap = MDC.getCopyOfContextMap();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.util.function.Supplier#get()
+ */
+ @Override
+ public NodeProcessingTransaction get() {
+ MDC.setContextMap(contextMap);
+ OperationResult opResult = new OperationResult();
+ String link = txn.getSelfLink();
+
+ if (link == null) {
+ opResult.setResult(500, "Aborting self-link processing because self link is null");
+ txn.setOpResult(opResult);
+ return txn;
+ }
+
+ /**
+ * Rebuild the self link:
+ *
+ * <li>build the base url with the configured scheme + authority (server:port)
+ * <li>recombine baseUrl + originalEncodedLink + queryStringParameters
+ *
+ */
+
+ final String urlSchemeAndAuthority = gizmoAdapter.repairInventorySelfLink("", null);
+
+ String parameters = txn.getRequestParameters();
+ link = urlSchemeAndAuthority + link;
+
+ if (parameters != null) {
+ link += parameters;
+ }
+
+ if (logger.isDebugEnabled()) {
+ logger.debug(AaiUiMsgs.DEBUG_GENERIC, "Collecting " + link);
+ }
+
+ try {
+
+ opResult = gizmoAdapter.queryGizmoWithRetries(link, "application/json",
+ gizmoAdapter.getEndpointConfig().getNumRequestRetries());
+ } catch (Exception exc) {
+ opResult = new OperationResult();
+ opResult.setResult(500, "Querying AAI with retry failed due to an exception.");
+ logger.error(AaiUiMsgs.ERROR_AAI_QUERY_WITH_RETRY, exc.getMessage());
+ }
+
+ if (logger.isDebugEnabled()) {
+ logger.debug(AaiUiMsgs.DEBUG_GENERIC, "Operation result = " + opResult.toString());
+ }
+
+ txn.setOpResult(opResult);
+ return txn;
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java
new file mode 100644
index 0000000..c686443
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java
@@ -0,0 +1,129 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.task;
+
+import java.util.Map;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.viewandinspect.entity.NodeProcessingTransaction;
+import org.slf4j.MDC;
+
+/**
+ * The Class PerformNodeSelfLinkProcessingTask.
+ */
+public class PerformNodeSelfLinkProcessingTask implements Supplier<NodeProcessingTransaction> {
+
+ private static final Logger logger =
+ LoggerFactory.getInstance().getLogger(PerformNodeSelfLinkProcessingTask.class);
+
+ private NodeProcessingTransaction txn;
+ private ActiveInventoryAdapter aaiAdapter;
+ private Map<String, String> contextMap;
+
+ /**
+ * Instantiates a new perform node self link processing task.
+ *
+ * @param txn the txn
+ * @param aaiProvider the aai provider
+ * @param aaiConfig the aai config
+ */
+ /**
+ *
+ * @param txn
+ * @param requestParameters
+ * @param aaiProvider
+ * @param aaiConfig
+ */
+ public PerformNodeSelfLinkProcessingTask(NodeProcessingTransaction txn, String requestParameters,
+ ActiveInventoryAdapter aaiAdapter) {
+ this.aaiAdapter = aaiAdapter;
+ this.txn = txn;
+ this.contextMap = MDC.getCopyOfContextMap();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see java.util.function.Supplier#get()
+ */
+ @Override
+ public NodeProcessingTransaction get() {
+ MDC.setContextMap(contextMap);
+ OperationResult opResult = new OperationResult();
+ String link = txn.getSelfLink();
+
+ if (link == null) {
+ opResult.setResult(500, "Aborting self-link processing because self link is null");
+ txn.setOpResult(opResult);
+ return txn;
+ }
+
+ /**
+ * Rebuild the self link:
+ *
+ * <li>build the base url with the configured scheme + authority (server:port)
+ * <li>recombine baseUrl + originalEncodedLink + queryStringParameters
+ *
+ */
+
+ final String urlSchemeAndAuthority = aaiAdapter.repairSelfLink("");
+
+ String parameters = txn.getRequestParameters();
+ link = urlSchemeAndAuthority + link;
+
+ if (parameters != null) {
+ link += parameters;
+ }
+
+
+
+ if (logger.isDebugEnabled()) {
+ logger.debug(AaiUiMsgs.DEBUG_GENERIC, "Collecting " + link);
+ }
+
+ try {
+ opResult = aaiAdapter.queryActiveInventoryWithRetries(link, "application/json",
+ aaiAdapter.getEndpointConfig().getNumRequestRetries());
+ } catch (Exception exc) {
+ opResult = new OperationResult();
+ opResult.setResult(500, "Querying AAI with retry failed due to an exception.");
+ logger.error(AaiUiMsgs.ERROR_AAI_QUERY_WITH_RETRY, exc.getMessage());
+ }
+
+ if (logger.isDebugEnabled()) {
+ logger.debug(AaiUiMsgs.DEBUG_GENERIC, "Operation result = " + opResult.toString());
+ }
+
+ txn.setOpResult(opResult);
+ return txn;
+
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformSelfLinkDeterminationTask.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformSelfLinkDeterminationTask.java
new file mode 100644
index 0000000..6d39849
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformSelfLinkDeterminationTask.java
@@ -0,0 +1,95 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewandinspect.task;
+
+import java.util.Map;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.viewandinspect.entity.SelfLinkDeterminationTransaction;
+import org.slf4j.MDC;
+
+public class PerformSelfLinkDeterminationTask implements Supplier<SelfLinkDeterminationTransaction> {
+
+ private static final Logger logger =
+ LoggerFactory.getInstance().getLogger(PerformSelfLinkDeterminationTask.class);
+
+ private SelfLinkDeterminationTransaction txn;
+ private ActiveInventoryAdapter aaiAdapter;
+ private Map<String, String> contextMap;
+
+
+ /**
+ * Instantiates a new perform node self link processing task.
+ *
+ * @param txn the txn
+ * @param requestParameters the request parameters
+ * @param aaiProvider the aai provider
+ */
+ public PerformSelfLinkDeterminationTask(SelfLinkDeterminationTransaction txn, String requestParameters,
+ ActiveInventoryAdapter aaiAdapter) {
+
+ this.aaiAdapter = aaiAdapter;
+ this.txn = txn;
+ this.contextMap = MDC.getCopyOfContextMap();
+ }
+
+ /* (non-Javadoc)
+ * @see java.util.function.Supplier#get()
+ */
+ @Override
+ public SelfLinkDeterminationTransaction get() {
+ MDC.setContextMap(contextMap);
+ if (txn.getQueryString() == null) {
+ OperationResult opResult = new OperationResult();
+ opResult.setResult(500, "Aborting self-link determination because self link query is null.");
+ txn.setOpResult(opResult);
+ return txn;
+ }
+
+ OperationResult opResult = null;
+ try {
+ opResult = aaiAdapter.queryActiveInventoryWithRetries(txn.getQueryString(), "application/json",
+ aaiAdapter.getEndpointConfig().getNumRequestRetries());
+ } catch (Exception exc) {
+ opResult = new OperationResult();
+ opResult.setResult(500, "Querying AAI with retry failed due to an exception.");
+ logger.error(AaiUiMsgs.ERROR_AAI_QUERY_WITH_RETRY, exc.getMessage());
+ }
+
+ if (logger.isDebugEnabled()) {
+ logger.debug("Operation result = " + opResult.toString());
+ }
+
+ txn.setOpResult(opResult);
+ return txn;
+
+ }
+
+} \ No newline at end of file
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java
new file mode 100644
index 0000000..8f29519
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java
@@ -0,0 +1,779 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewinspect.sync;
+
+import static java.util.concurrent.CompletableFuture.supplyAsync;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Deque;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentLinkedDeque;
+import java.util.concurrent.ExecutorService;
+import java.util.function.Supplier;
+
+import org.onap.aai.cl.api.Logger;
+import org.onap.aai.cl.eelf.LoggerFactory;
+import org.onap.aai.cl.mdc.MdcContext;
+import org.onap.aai.restclient.client.OperationResult;
+import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.config.oxm.SearchableEntityLookup;
+import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.NetworkTransaction;
+import org.onap.aai.sparky.dal.rest.HttpMethod;
+import org.onap.aai.sparky.logging.AaiUiMsgs;
+import org.onap.aai.sparky.sync.AbstractEntitySynchronizer;
+import org.onap.aai.sparky.sync.IndexSynchronizer;
+import org.onap.aai.sparky.sync.SynchronizerConstants;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.entity.MergableEntity;
+import org.onap.aai.sparky.sync.entity.SearchableEntity;
+import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor;
+import org.onap.aai.sparky.sync.enumeration.OperationState;
+import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
+import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchPut;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval;
+import org.onap.aai.sparky.sync.task.PerformElasticSearchUpdate;
+import org.onap.aai.sparky.util.NodeUtils;
+import org.slf4j.MDC;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+
+/**
+ * The Class SearchableEntitySynchronizer.
+ */
+public class ViewInspectEntitySynchronizer extends AbstractEntitySynchronizer
+ implements IndexSynchronizer {
+
+ /**
+ * The Class RetrySearchableEntitySyncContainer.
+ */
+ private class RetrySearchableEntitySyncContainer {
+ NetworkTransaction txn;
+ SearchableEntity se;
+
+ /**
+ * Instantiates a new retry searchable entity sync container.
+ *
+ * @param txn the txn
+ * @param se the se
+ */
+ public RetrySearchableEntitySyncContainer(NetworkTransaction txn, SearchableEntity se) {
+ this.txn = txn;
+ this.se = se;
+ }
+
+ public NetworkTransaction getNetworkTransaction() {
+ return txn;
+ }
+
+ public SearchableEntity getSearchableEntity() {
+ return se;
+ }
+ }
+
+ private static final Logger LOG =
+ LoggerFactory.getInstance().getLogger(ViewInspectEntitySynchronizer.class);
+
+ private boolean allWorkEnumerated;
+ private Deque<SelfLinkDescriptor> selflinks;
+ private Deque<RetrySearchableEntitySyncContainer> retryQueue;
+ private Map<String, Integer> retryLimitTracker;
+ protected ExecutorService esPutExecutor;
+ private OxmEntityLookup oxmEntityLookup;
+ private SearchableEntityLookup searchableEntityLookup;
+
+ /**
+ * Instantiates a new searchable entity synchronizer.
+ *
+ * @param indexName the index name
+ * @throws Exception the exception
+ */
+ public ViewInspectEntitySynchronizer(ElasticSearchSchemaConfig schemaConfig,
+ int internalSyncWorkers, int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig,
+ NetworkStatisticsConfig esStatConfig, OxmEntityLookup oxmEntityLookup,
+ SearchableEntityLookup searchableEntityLookup) throws Exception {
+ super(LOG, "SES", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(),
+ aaiStatConfig, esStatConfig);
+
+ this.oxmEntityLookup = oxmEntityLookup;
+ this.searchableEntityLookup = searchableEntityLookup;
+ this.allWorkEnumerated = false;
+ this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>();
+ this.retryQueue = new ConcurrentLinkedDeque<RetrySearchableEntitySyncContainer>();
+ this.retryLimitTracker = new ConcurrentHashMap<String, Integer>();
+ this.synchronizerName = "Searchable Entity Synchronizer";
+ this.esPutExecutor = NodeUtils.createNamedExecutor("SES-ES-PUT", 5, LOG);
+ this.aaiEntityStats.intializeEntityCounters(
+ searchableEntityLookup.getSearchableEntityDescriptors().keySet());
+ this.esEntityStats.intializeEntityCounters(
+ searchableEntityLookup.getSearchableEntityDescriptors().keySet());
+ this.syncDurationInMs = -1;
+ }
+
+ /**
+ * Collect all the work.
+ *
+ * @return the operation state
+ */
+ private OperationState collectAllTheWork() {
+ final Map<String, String> contextMap = MDC.getCopyOfContextMap();
+ Map<String, SearchableOxmEntityDescriptor> descriptorMap =
+ searchableEntityLookup.getSearchableEntityDescriptors();
+
+ if (descriptorMap.isEmpty()) {
+ LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES);
+ LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES);
+ return OperationState.ERROR;
+ }
+
+ Collection<String> syncTypes = descriptorMap.keySet();
+
+ /*Collection<String> syncTypes = new ArrayList<String>();
+ syncTypes.add("service-instance");*/
+
+ try {
+
+ /*
+ * launch a parallel async thread to process the documents for each entity-type (to max the
+ * of the configured executor anyway)
+ */
+
+ aaiWorkOnHand.set(syncTypes.size());
+
+ for (String key : syncTypes) {
+
+ supplyAsync(new Supplier<Void>() {
+
+ @Override
+ public Void get() {
+ MDC.setContextMap(contextMap);
+ OperationResult typeLinksResult = null;
+ try {
+ typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key);
+ System.out.println(typeLinksResult);
+ aaiWorkOnHand.decrementAndGet();
+ processEntityTypeSelfLinks(typeLinksResult);
+ } catch (Exception exc) {
+ // TODO -> LOG, what should be logged here?
+ }
+
+ return null;
+ }
+
+ }, aaiExecutor).whenComplete((result, error) -> {
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ERROR_GENERIC,
+ "An error occurred getting data from AAI. Error = " + error.getMessage());
+ }
+ });
+
+ }
+
+ while (aaiWorkOnHand.get() != 0) {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED);
+ }
+
+ Thread.sleep(1000);
+ }
+
+ aaiWorkOnHand.set(selflinks.size());
+ allWorkEnumerated = true;
+ syncEntityTypes();
+
+ while (!isSyncDone()) {
+ performRetrySync();
+ Thread.sleep(1000);
+ }
+
+ /*
+ * Make sure we don't hang on to retries that failed which could cause issues during future
+ * syncs
+ */
+ retryLimitTracker.clear();
+
+ } catch (Exception exc) {
+ // TODO -> LOG, waht should be logged here?
+ }
+
+ return OperationState.OK;
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync()
+ */
+ @Override
+ public OperationState doSync() {
+ this.syncDurationInMs = -1;
+ String txnID = NodeUtils.getRandomTxnId();
+ MdcContext.initialize(txnID, "SearchableEntitySynchronizer", "", "Sync", "");
+
+ resetCounters();
+ this.allWorkEnumerated = false;
+ syncStartedTimeStampInMs = System.currentTimeMillis();
+ collectAllTheWork();
+
+ return OperationState.OK;
+ }
+
+ /**
+ * Process entity type self links.
+ *
+ * @param operationResult the operation result
+ */
+ private void processEntityTypeSelfLinks(OperationResult operationResult) {
+
+ JsonNode rootNode = null;
+
+ final String jsonResult = operationResult.getResult();
+
+ if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) {
+
+ try {
+ rootNode = mapper.readTree(jsonResult);
+ } catch (IOException exc) {
+ String message =
+ "Could not deserialize JSON (representing operation result) as node tree. " +
+ "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message);
+ }
+
+ JsonNode resultData = rootNode.get("result-data");
+ ArrayNode resultDataArrayNode = null;
+
+ if (resultData.isArray()) {
+ resultDataArrayNode = (ArrayNode) resultData;
+
+ Iterator<JsonNode> elementIterator = resultDataArrayNode.elements();
+ JsonNode element = null;
+
+ while (elementIterator.hasNext()) {
+ element = elementIterator.next();
+
+ final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type");
+ final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link");
+
+ SearchableOxmEntityDescriptor descriptor = null;
+
+ if (resourceType != null && resourceLink != null) {
+
+ descriptor = searchableEntityLookup.getSearchableEntityDescriptors().get(resourceType);
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType);
+ // go to next element in iterator
+ continue;
+ }
+
+ if (descriptor.hasSearchableAttributes()) {
+ selflinks.add(new SelfLinkDescriptor(resourceLink, SynchronizerConstants.NODES_ONLY_MODIFIER, resourceType));
+ }
+
+ }
+ }
+ }
+ }
+
+ }
+
+ /**
+ * Sync entity types.
+ */
+ private void syncEntityTypes() {
+
+ while (selflinks.peek() != null) {
+
+ SelfLinkDescriptor linkDescriptor = selflinks.poll();
+ aaiWorkOnHand.decrementAndGet();
+
+ OxmEntityDescriptor descriptor = null;
+
+ if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) {
+
+ descriptor = oxmEntityLookup.getEntityDescriptors().get(linkDescriptor.getEntityType());
+
+ if (descriptor == null) {
+ LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType());
+ // go to next element in iterator
+ continue;
+ }
+
+ NetworkTransaction txn = new NetworkTransaction();
+ txn.setDescriptor(descriptor);
+ txn.setLink(linkDescriptor.getSelfLink());
+ txn.setOperationType(HttpMethod.GET);
+ txn.setEntityType(linkDescriptor.getEntityType());
+
+ aaiWorkOnHand.incrementAndGet();
+
+ supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor)
+ .whenComplete((result, error) -> {
+
+ aaiWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage());
+ } else {
+ if (result == null) {
+ LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK,
+ linkDescriptor.getSelfLink());
+ } else {
+ updateActiveInventoryCounters(result);
+ fetchDocumentForUpsert(result);
+ }
+ }
+ });
+ }
+
+ }
+
+ }
+
+ /**
+ * Perform document upsert.
+ *
+ * @param esGetTxn the es get txn
+ * @param se the se
+ */
+ protected void performDocumentUpsert(NetworkTransaction esGetTxn, SearchableEntity se) {
+ /**
+ * <p>
+ * <ul>
+ * As part of the response processing we need to do the following:
+ * <li>1. Extract the version (if present), it will be the ETAG when we use the
+ * Search-Abstraction-Service
+ * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version
+ * tag
+ * <li>a) if version is null or RC=404, then standard put, no _update with version tag
+ * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic
+ * </ul>
+ * </p>
+ */
+ String link = null;
+ try {
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), se.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage());
+ return;
+ }
+
+ String versionNumber = null;
+ boolean wasEntryDiscovered = false;
+ if (esGetTxn.getOperationResult().getResultCode() == 404) {
+ LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, se.getEntityPrimaryKeyValue());
+ } else if (esGetTxn.getOperationResult().getResultCode() == 200) {
+ wasEntryDiscovered = true;
+ try {
+ versionNumber = NodeUtils.extractFieldValueFromObject(
+ NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()),
+ "_version");
+ } catch (IOException exc) {
+ String message =
+ "Error extracting version number from response, aborting searchable entity sync of "
+ + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message);
+ return;
+ }
+ } else {
+ /*
+ * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we
+ * return.
+ */
+ LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE,
+ String.valueOf(esGetTxn.getOperationResult().getResultCode()));
+ return;
+ }
+
+ try {
+ String jsonPayload = null;
+ if (wasEntryDiscovered) {
+ try {
+ ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>();
+ NodeUtils.extractObjectsByKey(
+ NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()),
+ "_source", sourceObject);
+
+ if (!sourceObject.isEmpty()) {
+ String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false);
+ MergableEntity me = mapper.readValue(responseSource, MergableEntity.class);
+ ObjectReader updater = mapper.readerForUpdating(me);
+ MergableEntity merged = updater.readValue(NodeUtils.convertObjectToJson(se,false));
+ jsonPayload = mapper.writeValueAsString(merged);
+ }
+ } catch (IOException exc) {
+ String message =
+ "Error extracting source value from response, aborting searchable entity sync of "
+ + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message);
+ return;
+ }
+ } else {
+ jsonPayload = se.getAsJson();
+ }
+
+ if (wasEntryDiscovered) {
+ if (versionNumber != null && jsonPayload != null) {
+
+ String requestPayload = elasticSearchAdapter.buildBulkImportOperationRequest(getIndexName(),
+ "default", se.getId(), versionNumber, jsonPayload);
+
+ NetworkTransaction transactionTracker = new NetworkTransaction();
+ transactionTracker.setEntityType(esGetTxn.getEntityType());
+ transactionTracker.setDescriptor(esGetTxn.getDescriptor());
+ transactionTracker.setOperationType(HttpMethod.PUT);
+
+ esWorkOnHand.incrementAndGet();
+ supplyAsync(new PerformElasticSearchUpdate(elasticSearchAdapter.getBulkUrl(),
+ requestPayload, elasticSearchAdapter, transactionTracker), esPutExecutor)
+ .whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ String message = "Searchable entity sync UPDATE PUT error - "
+ + error.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message);
+ } else {
+ updateElasticSearchCounters(result);
+ processStoreDocumentResult(result, esGetTxn, se);
+ }
+ });
+ }
+
+ } else {
+
+ if (link != null && jsonPayload != null) {
+
+ NetworkTransaction updateElasticTxn = new NetworkTransaction();
+ updateElasticTxn.setLink(link);
+ updateElasticTxn.setEntityType(esGetTxn.getEntityType());
+ updateElasticTxn.setDescriptor(esGetTxn.getDescriptor());
+ updateElasticTxn.setOperationType(HttpMethod.PUT);
+
+ esWorkOnHand.incrementAndGet();
+ supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter),
+ esPutExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ String message =
+ "Searchable entity sync UPDATE PUT error - " + error.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message);
+ } else {
+ updateElasticSearchCounters(result);
+ processStoreDocumentResult(result, esGetTxn, se);
+ }
+ });
+ }
+ }
+ } catch (Exception exc) {
+ String message = "Exception caught during searchable entity sync PUT operation. Message - "
+ + exc.getLocalizedMessage();
+ LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message);
+ }
+ }
+
+ /**
+ * Populate searchable entity document.
+ *
+ * @param doc the doc
+ * @param result the result
+ * @param resultDescriptor the result descriptor
+ * @throws JsonProcessingException the json processing exception
+ * @throws IOException Signals that an I/O exception has occurred.
+ */
+ protected void populateSearchableEntityDocument(SearchableEntity doc, String result,
+ OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException {
+
+ doc.setEntityType(resultDescriptor.getEntityName());
+
+ JsonNode entityNode = mapper.readTree(result);
+
+ List<String> primaryKeyValues = new ArrayList<String>();
+ String pkeyValue = null;
+
+ SearchableOxmEntityDescriptor searchableDescriptor = searchableEntityLookup.getSearchableEntityDescriptors().get(resultDescriptor.getEntityName());
+
+ for (String keyName : searchableDescriptor.getPrimaryKeyAttributeNames()) {
+ pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName);
+ if (pkeyValue != null) {
+ primaryKeyValues.add(pkeyValue);
+ } else {
+ String message = "populateSearchableEntityDocument(), pKeyValue is null for entityType = "
+ + resultDescriptor.getEntityName();
+ LOG.warn(AaiUiMsgs.WARN_GENERIC, message);
+ }
+ }
+
+ final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/");
+ doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue);
+
+ final List<String> searchTagFields = searchableDescriptor.getSearchableAttributes();
+
+ /*
+ * Based on configuration, use the configured field names for this entity-Type to build a
+ * multi-value collection of search tags for elastic search entity search criteria.
+ */
+ for (String searchTagField : searchTagFields) {
+ String searchTagValue = NodeUtils.getNodeFieldAsText(entityNode, searchTagField);
+ if (searchTagValue != null && !searchTagValue.isEmpty()) {
+ doc.addSearchTagWithKey(searchTagValue, searchTagField);
+ }
+ }
+ }
+
+ /**
+ * Fetch document for upsert.
+ *
+ * @param txn the txn
+ */
+ private void fetchDocumentForUpsert(NetworkTransaction txn) {
+ if (!txn.getOperationResult().wasSuccessful()) {
+ String message = "Self link failure. Result - " + txn.getOperationResult().getResult();
+ LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
+ return;
+ }
+
+ SearchableOxmEntityDescriptor searchableDescriptor = searchableEntityLookup
+ .getSearchableEntityDescriptors().get(txn.getDescriptor().getEntityName());
+
+ try {
+ if (searchableDescriptor.hasSearchableAttributes()) {
+
+ final String jsonResult = txn.getOperationResult().getResult();
+ if (jsonResult != null && jsonResult.length() > 0) {
+
+ SearchableEntity se = new SearchableEntity();
+ se.setLink(ActiveInventoryAdapter.extractResourcePath(txn.getLink()));
+ populateSearchableEntityDocument(se, jsonResult, txn.getDescriptor());
+ se.deriveFields();
+
+ String link = null;
+ try {
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), se.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage());
+ }
+
+ if (link != null) {
+ NetworkTransaction n2 = new NetworkTransaction();
+ n2.setLink(link);
+ n2.setEntityType(txn.getEntityType());
+ n2.setDescriptor(txn.getDescriptor());
+ n2.setOperationType(HttpMethod.GET);
+
+ esWorkOnHand.incrementAndGet();
+
+ supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor)
+ .whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ performDocumentUpsert(result, se);
+ }
+ });
+ }
+ }
+
+ }
+ } catch (JsonProcessingException exc) {
+ // TODO -> LOG, waht should be logged here?
+ } catch (IOException exc) {
+ // TODO -> LOG, waht should be logged here?
+ }
+ }
+
+ /**
+ * Process store document result.
+ *
+ * @param esPutResult the es put result
+ * @param esGetResult the es get result
+ * @param se the se
+ */
+ private void processStoreDocumentResult(NetworkTransaction esPutResult,
+ NetworkTransaction esGetResult, SearchableEntity se) {
+
+ OperationResult or = esPutResult.getOperationResult();
+
+ if (!or.wasSuccessful()) {
+ if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) {
+
+ if (shouldAllowRetry(se.getId())) {
+ esWorkOnHand.incrementAndGet();
+
+ RetrySearchableEntitySyncContainer rsc =
+ new RetrySearchableEntitySyncContainer(esGetResult, se);
+ retryQueue.push(rsc);
+
+ String message = "Store document failed during searchable entity synchronization"
+ + " due to version conflict. Entity will be re-synced.";
+ LOG.warn(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message);
+ }
+ } else {
+ String message =
+ "Store document failed during searchable entity synchronization with result code "
+ + or.getResultCode() + " and result message " + or.getResult();
+ LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message);
+ }
+ }
+ }
+
+ /**
+ * Perform retry sync.
+ */
+ private void performRetrySync() {
+ while (retryQueue.peek() != null) {
+
+ RetrySearchableEntitySyncContainer rsc = retryQueue.poll();
+ if (rsc != null) {
+
+ SearchableEntity se = rsc.getSearchableEntity();
+ NetworkTransaction txn = rsc.getNetworkTransaction();
+
+ String link = null;
+ try {
+ /*
+ * In this retry flow the se object has already derived its fields
+ */
+ link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), se.getId());
+ } catch (Exception exc) {
+ LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage());
+ }
+
+ if (link != null) {
+ NetworkTransaction retryTransaction = new NetworkTransaction();
+ retryTransaction.setLink(link);
+ retryTransaction.setEntityType(txn.getEntityType());
+ retryTransaction.setDescriptor(txn.getDescriptor());
+ retryTransaction.setOperationType(HttpMethod.GET);
+
+ /*
+ * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already
+ * called incrementAndGet when queuing the failed PUT!
+ */
+
+ supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter),
+ esExecutor).whenComplete((result, error) -> {
+
+ esWorkOnHand.decrementAndGet();
+
+ if (error != null) {
+ LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage());
+ } else {
+ updateElasticSearchCounters(result);
+ performDocumentUpsert(result, se);
+ }
+ });
+ }
+
+ }
+ }
+ }
+
+ /**
+ * Should allow retry.
+ *
+ * @param id the id
+ * @return true, if successful
+ */
+ private boolean shouldAllowRetry(String id) {
+ boolean isRetryAllowed = true;
+ if (retryLimitTracker.get(id) != null) {
+ Integer currentCount = retryLimitTracker.get(id);
+ if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) {
+ isRetryAllowed = false;
+ String message = "Searchable entity re-sync limit reached for " + id
+ + ", re-sync will no longer be attempted for this entity";
+ LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message);
+ } else {
+ Integer newCount = new Integer(currentCount.intValue() + 1);
+ retryLimitTracker.put(id, newCount);
+ }
+ } else {
+ Integer firstRetryCount = new Integer(1);
+ retryLimitTracker.put(id, firstRetryCount);
+ }
+
+ return isRetryAllowed;
+ }
+
+ @Override
+ public SynchronizerState getState() {
+ if (!isSyncDone()) {
+ return SynchronizerState.PERFORMING_SYNCHRONIZATION;
+ }
+
+ return SynchronizerState.IDLE;
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean)
+ */
+ @Override
+ public String getStatReport(boolean showFinalReport) {
+ syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs;
+ return this.getStatReport(syncDurationInMs, showFinalReport);
+ }
+
+ /* (non-Javadoc)
+ * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown()
+ */
+ @Override
+ public void shutdown() {
+ this.shutdownExecutors();
+ }
+
+ @Override
+ protected boolean isSyncDone() {
+ int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get();
+
+ if (totalWorkOnHand > 0 || !allWorkEnumerated) {
+ return false;
+ }
+
+ return true;
+ }
+
+}
diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java
new file mode 100644
index 0000000..bd15e1f
--- /dev/null
+++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java
@@ -0,0 +1,122 @@
+/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.onap.aai.sparky.viewinspect.sync;
+
+import org.onap.aai.sparky.config.oxm.CrossEntityReferenceLookup;
+import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
+import org.onap.aai.sparky.config.oxm.SearchableEntityLookup;
+import org.onap.aai.sparky.crossentityreference.sync.CrossEntityReferenceSynchronizer;
+import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
+import org.onap.aai.sparky.dal.ElasticSearchAdapter;
+import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner;
+import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory;
+import org.onap.aai.sparky.sync.IndexCleaner;
+import org.onap.aai.sparky.sync.IndexIntegrityValidator;
+import org.onap.aai.sparky.sync.SyncControllerImpl;
+import org.onap.aai.sparky.sync.SyncControllerRegistrar;
+import org.onap.aai.sparky.sync.SyncControllerRegistry;
+import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig;
+import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
+import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
+import org.onap.aai.sparky.sync.config.SyncControllerConfig;
+
+public class ViewInspectSyncController extends SyncControllerImpl
+ implements SyncControllerRegistrar {
+
+ private SyncControllerRegistry syncControllerRegistry;
+ private ActiveInventoryAdapter aaiAdapter;
+ private ElasticSearchAdapter esAdapter;
+ private ElasticSearchSchemaConfig schemaConfig;
+ private ElasticSearchEndpointConfig endpointConfig;
+
+ public ViewInspectSyncController(SyncControllerConfig syncControllerConfig,
+ ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter,
+ ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig,
+ NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig,
+ CrossEntityReferenceLookup crossEntityReferenceLookup, OxmEntityLookup oxmEntityLookup,
+ SearchableEntityLookup searchableEntityLookup,
+ ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception {
+ super(syncControllerConfig);
+
+
+ // final String controllerName = "View and Inspect Entity Synchronizer";
+
+ this.aaiAdapter = aaiAdapter;
+ this.esAdapter = esAdapter;
+ this.schemaConfig = schemaConfig;
+ this.endpointConfig = endpointConfig;
+ IndexIntegrityValidator indexValidator = new IndexIntegrityValidator(esAdapter, schemaConfig,
+ endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig));
+
+ registerIndexValidator(indexValidator);
+
+
+ ViewInspectEntitySynchronizer ses = new ViewInspectEntitySynchronizer(schemaConfig,
+ syncControllerConfig.getNumInternalSyncWorkers(),
+ syncControllerConfig.getNumSyncActiveInventoryWorkers(),
+ syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig,
+ oxmEntityLookup, searchableEntityLookup);
+
+ ses.setAaiAdapter(aaiAdapter);
+ ses.setElasticSearchAdapter(esAdapter);
+
+ registerEntitySynchronizer(ses);
+
+ CrossEntityReferenceSynchronizer cers = new CrossEntityReferenceSynchronizer(schemaConfig,
+ syncControllerConfig.getNumInternalSyncWorkers(),
+ syncControllerConfig.getNumSyncActiveInventoryWorkers(),
+ syncControllerConfig.getNumSyncElasticWorkers(),aaiStatConfig,esStatConfig,
+ crossEntityReferenceLookup, oxmEntityLookup, searchableEntityLookup);
+
+ cers.setAaiAdapter(aaiAdapter);
+ cers.setElasticSearchAdapter(esAdapter);
+
+ registerEntitySynchronizer(cers);
+
+ IndexCleaner indexCleaner =
+ new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig);
+
+ registerIndexCleaner(indexCleaner);
+
+ }
+
+ public SyncControllerRegistry getSyncControllerRegistry() {
+ return syncControllerRegistry;
+ }
+
+ public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) {
+ this.syncControllerRegistry = syncControllerRegistry;
+ }
+
+ @Override
+ public void registerController() {
+ if ( syncControllerRegistry != null ) {
+ if ( syncControllerConfig.isEnabled()) {
+ syncControllerRegistry.registerSyncController(this);
+ }
+ }
+
+ }
+}