From 4310430012a1bd939d406ec8a53045a128505ec9 Mon Sep 17 00:00:00 2001 From: leventecsanyi Date: Tue, 17 Sep 2024 12:07:32 +0200 Subject: Refactored cps-ri package structure - fixed import order and moved package structure Issue-ID: CPS-2293 Change-Id: Ie2f9f057f261577054530feee7480850ba4b41e1 Signed-off-by: leventecsanyi --- .../cps/architecture/LayeredArchitectureTest.java | 4 +- .../impl/inventory/CmHandleQueryServiceImpl.java | 2 +- .../impl/inventory/InventoryPersistenceImpl.java | 2 +- .../ncmp/impl/inventory/NcmpPersistenceImpl.java | 2 +- .../inventory/CmHandleQueryServiceImplSpec.groovy | 2 +- .../inventory/InventoryPersistenceImplSpec.groovy | 2 +- .../cps/ri/CpsAdminPersistenceServiceImpl.java | 196 ++++++ .../onap/cps/ri/CpsDataPersistenceServiceImpl.java | 722 +++++++++++++++++++++ .../cps/ri/CpsModulePersistenceServiceImpl.java | 432 ++++++++++++ .../java/org/onap/cps/ri/models/AnchorEntity.java | 76 +++ .../org/onap/cps/ri/models/DataspaceEntity.java | 70 ++ .../org/onap/cps/ri/models/FragmentEntity.java | 93 +++ .../org/onap/cps/ri/models/SchemaSetEntity.java | 71 ++ .../org/onap/cps/ri/models/YangResourceEntity.java | 75 +++ .../cps/ri/models/YangResourceModuleReference.java | 31 + .../onap/cps/ri/repository/AnchorRepository.java | 118 ++++ .../cps/ri/repository/DataspaceRepository.java | 44 ++ .../ri/repository/FragmentPrefetchRepository.java | 31 + .../repository/FragmentPrefetchRepositoryImpl.java | 127 ++++ .../cps/ri/repository/FragmentQueryBuilder.java | 270 ++++++++ .../onap/cps/ri/repository/FragmentRepository.java | 140 ++++ .../repository/FragmentRepositoryCpsPathQuery.java | 40 ++ .../FragmentRepositoryCpsPathQueryImpl.java | 71 ++ .../cps/ri/repository/ModuleReferenceQuery.java | 37 ++ .../ri/repository/ModuleReferenceRepository.java | 29 + .../repository/ModuleReferenceRepositoryImpl.java | 179 +++++ .../cps/ri/repository/SchemaSetRepository.java | 79 +++ .../SchemaSetYangResourceRepository.java | 29 + .../SchemaSetYangResourceRepositoryImpl.java | 59 ++ .../onap/cps/ri/repository/TempTableCreator.java | 102 +++ .../repository/YangResourceNativeRepository.java | 31 + .../YangResourceNativeRepositoryImpl.java | 69 ++ .../cps/ri/repository/YangResourceRepository.java | 102 +++ .../org/onap/cps/ri/utils/CpsSessionFactory.java | 70 ++ .../org/onap/cps/ri/utils/CpsValidatorImpl.java | 70 ++ .../java/org/onap/cps/ri/utils/EscapeUtils.java | 37 ++ .../java/org/onap/cps/ri/utils/SessionManager.java | 180 +++++ .../org/onap/cps/ri/utils/TimeLimiterProvider.java | 33 + .../org/onap/cps/spi/config/CpsSessionFactory.java | 70 -- .../org/onap/cps/spi/entities/AnchorEntity.java | 76 --- .../org/onap/cps/spi/entities/DataspaceEntity.java | 70 -- .../org/onap/cps/spi/entities/FragmentEntity.java | 93 --- .../org/onap/cps/spi/entities/SchemaSetEntity.java | 71 -- .../onap/cps/spi/entities/YangResourceEntity.java | 75 --- .../spi/entities/YangResourceModuleReference.java | 31 - .../spi/impl/CpsAdminPersistenceServiceImpl.java | 196 ------ .../spi/impl/CpsDataPersistenceServiceImpl.java | 722 --------------------- .../spi/impl/CpsModulePersistenceServiceImpl.java | 432 ------------ .../onap/cps/spi/impl/utils/CpsValidatorImpl.java | 70 -- .../onap/cps/spi/repository/AnchorRepository.java | 118 ---- .../cps/spi/repository/DataspaceRepository.java | 44 -- .../spi/repository/FragmentPrefetchRepository.java | 31 - .../repository/FragmentPrefetchRepositoryImpl.java | 127 ---- .../cps/spi/repository/FragmentQueryBuilder.java | 270 -------- .../cps/spi/repository/FragmentRepository.java | 140 ---- .../repository/FragmentRepositoryCpsPathQuery.java | 40 -- .../FragmentRepositoryCpsPathQueryImpl.java | 71 -- .../cps/spi/repository/ModuleReferenceQuery.java | 37 -- .../spi/repository/ModuleReferenceRepository.java | 29 - .../repository/ModuleReferenceRepositoryImpl.java | 179 ----- .../cps/spi/repository/SchemaSetRepository.java | 79 --- .../SchemaSetYangResourceRepository.java | 29 - .../SchemaSetYangResourceRepositoryImpl.java | 59 -- .../onap/cps/spi/repository/TempTableCreator.java | 102 --- .../repository/YangResourceNativeRepository.java | 31 - .../YangResourceNativeRepositoryImpl.java | 69 -- .../cps/spi/repository/YangResourceRepository.java | 102 --- .../java/org/onap/cps/spi/utils/EscapeUtils.java | 37 -- .../org/onap/cps/spi/utils/SessionManager.java | 181 ------ .../onap/cps/spi/utils/TimeLimiterProvider.java | 33 - .../ri/CpsDataPersistenceServiceImplSpec.groovy | 281 ++++++++ ...sModulePersistenceServiceConcurrencySpec.groovy | 145 +++++ .../ri/CpsModulePersistenceServiceImplSpec.groovy | 104 +++ .../onap/cps/ri/utils/CpsValidatorImplSpec.groovy | 78 +++ .../org/onap/cps/ri/utils/EscapeUtilsSpec.groovy | 41 ++ .../onap/cps/ri/utils/SessionManagerSpec.groovy | 137 ++++ .../spi/impl/CpsDataPersistenceServiceSpec.groovy | 281 -------- ...sModulePersistenceServiceConcurrencySpec.groovy | 145 ----- .../impl/CpsModulePersistenceServiceSpec.groovy | 103 --- .../cps/spi/impl/utils/CpsValidatorSpec.groovy | 77 --- .../org/onap/cps/spi/utils/EscapeUtilsSpec.groovy | 41 -- .../onap/cps/spi/utils/SessionManagerSpec.groovy | 139 ---- .../onap/cps/api/impl/CpsAnchorServiceImpl.java | 2 +- .../org/onap/cps/api/impl/CpsDataServiceImpl.java | 2 +- .../onap/cps/api/impl/CpsDataspaceServiceImpl.java | 2 +- .../org/onap/cps/api/impl/CpsDeltaServiceImpl.java | 1 - .../onap/cps/api/impl/CpsModuleServiceImpl.java | 2 +- .../org/onap/cps/api/impl/CpsQueryServiceImpl.java | 2 +- .../cps/api/impl/YangTextSchemaSourceSetCache.java | 2 +- .../java/org/onap/cps/impl/utils/CpsValidator.java | 47 ++ .../java/org/onap/cps/spi/utils/CpsValidator.java | 47 -- .../cps/api/impl/CpsAnchorServiceImplSpec.groovy | 2 +- .../cps/api/impl/CpsDataServiceImplSpec.groovy | 9 +- .../api/impl/CpsDataspaceServiceImplSpec.groovy | 2 +- .../cps/api/impl/CpsModuleServiceImplSpec.groovy | 13 +- .../cps/api/impl/CpsQueryServiceImplSpec.groovy | 2 +- .../onap/cps/api/impl/E2ENetworkSliceSpec.groovy | 4 +- .../impl/YangTextSchemaSourceSetCacheSpec.groovy | 3 +- docs/admin-guide.rst | 4 +- .../integration/base/CpsIntegrationSpecBase.groovy | 6 +- .../cps/SessionManagerIntegrationSpec.groovy | 2 +- 101 files changed, 4582 insertions(+), 4585 deletions(-) create mode 100755 cps-ri/src/main/java/org/onap/cps/ri/CpsAdminPersistenceServiceImpl.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/CpsDataPersistenceServiceImpl.java create mode 100755 cps-ri/src/main/java/org/onap/cps/ri/CpsModulePersistenceServiceImpl.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/models/AnchorEntity.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/models/DataspaceEntity.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/models/FragmentEntity.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/models/SchemaSetEntity.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/models/YangResourceEntity.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/models/YangResourceModuleReference.java create mode 100755 cps-ri/src/main/java/org/onap/cps/ri/repository/AnchorRepository.java create mode 100755 cps-ri/src/main/java/org/onap/cps/ri/repository/DataspaceRepository.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepository.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepositoryImpl.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentQueryBuilder.java create mode 100755 cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepository.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQuery.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQueryImpl.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceQuery.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceRepository.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceRepositoryImpl.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetRepository.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepository.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepositoryImpl.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/TempTableCreator.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepository.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepositoryImpl.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceRepository.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/utils/CpsSessionFactory.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/utils/CpsValidatorImpl.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/utils/EscapeUtils.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/utils/SessionManager.java create mode 100644 cps-ri/src/main/java/org/onap/cps/ri/utils/TimeLimiterProvider.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/config/CpsSessionFactory.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/entities/AnchorEntity.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/entities/DataspaceEntity.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntity.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/entities/SchemaSetEntity.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/entities/YangResourceEntity.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/entities/YangResourceModuleReference.java delete mode 100755 cps-ri/src/main/java/org/onap/cps/spi/impl/CpsAdminPersistenceServiceImpl.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java delete mode 100755 cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/impl/utils/CpsValidatorImpl.java delete mode 100755 cps-ri/src/main/java/org/onap/cps/spi/repository/AnchorRepository.java delete mode 100755 cps-ri/src/main/java/org/onap/cps/spi/repository/DataspaceRepository.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepository.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentQueryBuilder.java delete mode 100755 cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQuery.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetYangResourceRepository.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetYangResourceRepositoryImpl.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceNativeRepository.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceNativeRepositoryImpl.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceRepository.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/utils/EscapeUtils.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/utils/SessionManager.java delete mode 100644 cps-ri/src/main/java/org/onap/cps/spi/utils/TimeLimiterProvider.java create mode 100644 cps-ri/src/test/groovy/org/onap/cps/ri/CpsDataPersistenceServiceImplSpec.groovy create mode 100644 cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceConcurrencySpec.groovy create mode 100644 cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceImplSpec.groovy create mode 100644 cps-ri/src/test/groovy/org/onap/cps/ri/utils/CpsValidatorImplSpec.groovy create mode 100644 cps-ri/src/test/groovy/org/onap/cps/ri/utils/EscapeUtilsSpec.groovy create mode 100644 cps-ri/src/test/groovy/org/onap/cps/ri/utils/SessionManagerSpec.groovy delete mode 100644 cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy delete mode 100644 cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy delete mode 100644 cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy delete mode 100644 cps-ri/src/test/groovy/org/onap/cps/spi/impl/utils/CpsValidatorSpec.groovy delete mode 100644 cps-ri/src/test/groovy/org/onap/cps/spi/utils/EscapeUtilsSpec.groovy delete mode 100644 cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy create mode 100644 cps-service/src/main/java/org/onap/cps/impl/utils/CpsValidator.java delete mode 100644 cps-service/src/main/java/org/onap/cps/spi/utils/CpsValidator.java diff --git a/cps-application/src/test/java/org/onap/cps/architecture/LayeredArchitectureTest.java b/cps-application/src/test/java/org/onap/cps/architecture/LayeredArchitectureTest.java index 91452a7fc5..82fdc7f487 100644 --- a/cps-application/src/test/java/org/onap/cps/architecture/LayeredArchitectureTest.java +++ b/cps-application/src/test/java/org/onap/cps/architecture/LayeredArchitectureTest.java @@ -38,9 +38,9 @@ public class LayeredArchitectureTest { private static final String REST_CONTROLLER_PACKAGE = "org.onap.cps.rest.."; private static final String NCMP_REST_PACKAGE = "org.onap.cps.ncmp.rest.."; private static final String API_SERVICE_PACKAGE = "org.onap.cps.api.."; - private static final String SPI_SERVICE_PACKAGE = "org.onap.cps.spi.."; + private static final String SPI_SERVICE_PACKAGE = "org.onap.cps.ri.."; private static final String NCMP_SERVICE_PACKAGE = "org.onap.cps.ncmp.api.."; - private static final String SPI_REPOSITORY_PACKAGE = "org.onap.cps.spi.repository.."; + private static final String SPI_REPOSITORY_PACKAGE = "org.onap.cps.ri.repository.."; private static final String YANG_SCHEMA_PACKAGE = "org.onap.cps.yang.."; private static final String NOTIFICATION_PACKAGE = "org.onap.cps.notification.."; private static final String CPS_UTILS_PACKAGE = "org.onap.cps.utils.."; diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java index f32008d482..71e7384208 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java @@ -36,6 +36,7 @@ import lombok.RequiredArgsConstructor; import org.onap.cps.api.CpsDataService; import org.onap.cps.api.CpsQueryService; import org.onap.cps.cpspath.parser.CpsPathUtil; +import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.ncmp.api.inventory.models.TrustLevel; import org.onap.cps.ncmp.impl.inventory.models.CmHandleState; import org.onap.cps.ncmp.impl.inventory.models.ModelledDmiServiceLeaves; @@ -43,7 +44,6 @@ import org.onap.cps.ncmp.impl.inventory.models.PropertyType; import org.onap.cps.ncmp.impl.inventory.trustlevel.TrustLevelCacheConfig; import org.onap.cps.spi.FetchDescendantsOption; import org.onap.cps.spi.model.DataNode; -import org.onap.cps.spi.utils.CpsValidator; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java index 13cde86b6e..06c3f8d2f4 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java @@ -37,6 +37,7 @@ import lombok.extern.slf4j.Slf4j; import org.onap.cps.api.CpsAnchorService; import org.onap.cps.api.CpsDataService; import org.onap.cps.api.CpsModuleService; +import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.ncmp.api.inventory.models.CompositeState; import org.onap.cps.ncmp.api.inventory.models.CompositeStateBuilder; import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle; @@ -47,7 +48,6 @@ import org.onap.cps.spi.exceptions.DataValidationException; import org.onap.cps.spi.model.DataNode; import org.onap.cps.spi.model.ModuleDefinition; import org.onap.cps.spi.model.ModuleReference; -import org.onap.cps.spi.utils.CpsValidator; import org.onap.cps.utils.ContentType; import org.onap.cps.utils.JsonObjectMapper; import org.springframework.stereotype.Component; diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/NcmpPersistenceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/NcmpPersistenceImpl.java index e44b6ba342..905b09ef74 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/NcmpPersistenceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/NcmpPersistenceImpl.java @@ -29,10 +29,10 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.onap.cps.api.CpsDataService; import org.onap.cps.api.CpsModuleService; +import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.FetchDescendantsOption; import org.onap.cps.spi.exceptions.SchemaSetNotFoundException; import org.onap.cps.spi.model.DataNode; -import org.onap.cps.spi.utils.CpsValidator; import org.onap.cps.utils.JsonObjectMapper; import org.springframework.stereotype.Component; diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImplSpec.groovy index cb3c4ffec1..7e34fe2822 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImplSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImplSpec.groovy @@ -23,10 +23,10 @@ package org.onap.cps.ncmp.impl.inventory import org.onap.cps.api.CpsDataService import org.onap.cps.api.CpsQueryService +import org.onap.cps.impl.utils.CpsValidator import org.onap.cps.ncmp.api.inventory.models.TrustLevel import org.onap.cps.ncmp.impl.inventory.models.CmHandleState import org.onap.cps.spi.model.DataNode -import org.onap.cps.spi.utils.CpsValidator import spock.lang.Shared import spock.lang.Specification diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy index fdf12a880d..1830f1331d 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy @@ -26,6 +26,7 @@ import com.fasterxml.jackson.databind.ObjectMapper import org.onap.cps.api.CpsAnchorService import org.onap.cps.api.CpsDataService import org.onap.cps.api.CpsModuleService +import org.onap.cps.impl.utils.CpsValidator import org.onap.cps.ncmp.api.inventory.models.CompositeState import org.onap.cps.ncmp.impl.inventory.models.CmHandleState import org.onap.cps.ncmp.impl.inventory.models.YangModelCmHandle @@ -35,7 +36,6 @@ import org.onap.cps.spi.exceptions.DataNodeNotFoundException import org.onap.cps.spi.model.DataNode import org.onap.cps.spi.model.ModuleDefinition import org.onap.cps.spi.model.ModuleReference -import org.onap.cps.spi.utils.CpsValidator import org.onap.cps.utils.ContentType import org.onap.cps.utils.JsonObjectMapper import spock.lang.Shared diff --git a/cps-ri/src/main/java/org/onap/cps/ri/CpsAdminPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/CpsAdminPersistenceServiceImpl.java new file mode 100755 index 0000000000..b85b0f9b98 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/CpsAdminPersistenceServiceImpl.java @@ -0,0 +1,196 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2020-2024 Nordix Foundation. + * Modifications Copyright (C) 2020-2022 Bell Canada. + * Modifications Copyright (C) 2021 Pantheon.tech + * Modifications Copyright (C) 2022 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri; + +import jakarta.transaction.Transactional; +import java.util.Collection; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.ri.models.AnchorEntity; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.models.SchemaSetEntity; +import org.onap.cps.ri.repository.AnchorRepository; +import org.onap.cps.ri.repository.DataspaceRepository; +import org.onap.cps.ri.repository.SchemaSetRepository; +import org.onap.cps.spi.CpsAdminPersistenceService; +import org.onap.cps.spi.exceptions.AlreadyDefinedException; +import org.onap.cps.spi.exceptions.DataspaceInUseException; +import org.onap.cps.spi.model.Anchor; +import org.onap.cps.spi.model.Dataspace; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +@RequiredArgsConstructor +public class CpsAdminPersistenceServiceImpl implements CpsAdminPersistenceService { + + private final DataspaceRepository dataspaceRepository; + private final AnchorRepository anchorRepository; + private final SchemaSetRepository schemaSetRepository; + + @Override + public void createDataspace(final String dataspaceName) { + try { + dataspaceRepository.save(new DataspaceEntity(dataspaceName)); + } catch (final DataIntegrityViolationException e) { + throw AlreadyDefinedException.forDataspace(dataspaceName, e); + } + } + + @Override + public void deleteDataspace(final String dataspaceName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final int numberOfAssociatedAnchors = anchorRepository.countByDataspace(dataspaceEntity); + if (numberOfAssociatedAnchors != 0) { + throw new DataspaceInUseException(dataspaceName, + String.format("Dataspace contains %d anchor(s)", numberOfAssociatedAnchors)); + } + final int numberOfAssociatedSchemaSets = schemaSetRepository.countByDataspace(dataspaceEntity); + if (numberOfAssociatedSchemaSets != 0) { + throw new DataspaceInUseException(dataspaceName, + String.format("Dataspace contains %d schemaset(s)", numberOfAssociatedSchemaSets)); + } + dataspaceRepository.delete(dataspaceEntity); + } + + @Override + public Dataspace getDataspace(final String dataspaceName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + return toDataspace(dataspaceEntity); + } + + @Override + public Collection getAllDataspaces() { + final Collection dataspaceEntities = dataspaceRepository.findAll(); + return dataspaceEntities.stream().map(CpsAdminPersistenceServiceImpl::toDataspace) + .collect(Collectors.toSet()); + } + + @Override + public void createAnchor(final String dataspaceName, final String schemaSetName, final String anchorName) { + final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final var schemaSetEntity = + schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); + final var anchorEntity = AnchorEntity.builder() + .name(anchorName) + .dataspace(dataspaceEntity) + .schemaSet(schemaSetEntity) + .build(); + try { + anchorRepository.save(anchorEntity); + } catch (final DataIntegrityViolationException e) { + throw AlreadyDefinedException.forAnchor(anchorName, dataspaceName, e); + } + } + + @Override + public Anchor getAnchor(final String dataspaceName, final String anchorName) { + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); + return toAnchor(anchorEntity); + } + + @Override + public Collection getAnchors(final String dataspaceName) { + final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final Collection anchorEntities = anchorRepository.findAllByDataspace(dataspaceEntity); + return anchorEntities.stream().map(CpsAdminPersistenceServiceImpl::toAnchor).collect(Collectors.toSet()); + } + + @Override + public Collection getAnchors(final String dataspaceName, final Collection anchorNames) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + return anchorRepository.findAllByDataspaceAndNameIn(dataspaceEntity, anchorNames) + .stream().map(CpsAdminPersistenceServiceImpl::toAnchor).collect(Collectors.toSet()); + } + + @Override + public Collection getAnchorsBySchemaSetName(final String dataspaceName, final String schemaSetName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final SchemaSetEntity schemaSetEntity = schemaSetRepository.getByDataspaceAndName( + dataspaceEntity, schemaSetName); + return anchorRepository.findAllBySchemaSet(schemaSetEntity) + .stream().map(CpsAdminPersistenceServiceImpl::toAnchor) + .collect(Collectors.toSet()); + } + + @Override + public Collection getAnchorsBySchemaSetNames(final String dataspaceName, + final Collection schemaSetNames) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + return anchorRepository.findAllByDataspaceAndSchemaSetNameIn(dataspaceEntity, schemaSetNames) + .stream().map(CpsAdminPersistenceServiceImpl::toAnchor).collect(Collectors.toSet()); + } + + @Override + public Collection queryAnchorNames(final String dataspaceName, final Collection inputModuleNames) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + return anchorRepository.getAnchorNamesByDataspaceIdAndModuleNames(dataspaceEntity.getId(), inputModuleNames, + inputModuleNames.size()); + } + + @Transactional + @Override + public void deleteAnchor(final String dataspaceName, final String anchorName) { + final var anchorEntity = getAnchorEntity(dataspaceName, anchorName); + anchorRepository.delete(anchorEntity); + } + + @Transactional + @Override + public void deleteAnchors(final String dataspaceName, final Collection anchorNames) { + final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + anchorRepository.deleteAllByDataspaceAndNameIn(dataspaceEntity, anchorNames); + } + + @Transactional + @Override + public void updateAnchorSchemaSet(final String dataspaceName, + final String anchorName, + final String schemaSetName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); + final SchemaSetEntity schemaSetEntity = schemaSetRepository + .getByDataspaceAndName(dataspaceEntity, schemaSetName); + anchorRepository.updateAnchorSchemaSetId(schemaSetEntity.getId(), anchorEntity.getId()); + } + + private AnchorEntity getAnchorEntity(final String dataspaceName, final String anchorName) { + final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + return anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); + } + + private static Anchor toAnchor(final AnchorEntity anchorEntity) { + return Anchor.builder() + .name(anchorEntity.getName()) + .dataspaceName(anchorEntity.getDataspace().getName()) + .schemaSetName(anchorEntity.getSchemaSet().getName()) + .build(); + } + + private static Dataspace toDataspace(final DataspaceEntity dataspaceEntity) { + return Dataspace.builder().name(dataspaceEntity.getName()).build(); + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/CpsDataPersistenceServiceImpl.java new file mode 100644 index 0000000000..ec46fea4cb --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/CpsDataPersistenceServiceImpl.java @@ -0,0 +1,722 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2021-2024 Nordix Foundation + * Modifications Copyright (C) 2021 Pantheon.tech + * Modifications Copyright (C) 2020-2022 Bell Canada. + * Modifications Copyright (C) 2022-2023 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri; + +import static org.onap.cps.spi.PaginationOption.NO_PAGINATION; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.ImmutableSet.Builder; +import io.micrometer.core.annotation.Timed; +import jakarta.transaction.Transactional; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.hibernate.StaleStateException; +import org.onap.cps.cpspath.parser.CpsPathQuery; +import org.onap.cps.cpspath.parser.CpsPathUtil; +import org.onap.cps.cpspath.parser.PathParsingException; +import org.onap.cps.ri.models.AnchorEntity; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.models.FragmentEntity; +import org.onap.cps.ri.repository.AnchorRepository; +import org.onap.cps.ri.repository.DataspaceRepository; +import org.onap.cps.ri.repository.FragmentRepository; +import org.onap.cps.ri.utils.SessionManager; +import org.onap.cps.spi.CpsDataPersistenceService; +import org.onap.cps.spi.FetchDescendantsOption; +import org.onap.cps.spi.PaginationOption; +import org.onap.cps.spi.exceptions.AlreadyDefinedException; +import org.onap.cps.spi.exceptions.ConcurrencyException; +import org.onap.cps.spi.exceptions.CpsAdminException; +import org.onap.cps.spi.exceptions.CpsPathException; +import org.onap.cps.spi.exceptions.DataNodeNotFoundException; +import org.onap.cps.spi.exceptions.DataNodeNotFoundExceptionBatch; +import org.onap.cps.spi.model.DataNode; +import org.onap.cps.spi.model.DataNodeBuilder; +import org.onap.cps.utils.JsonObjectMapper; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.stereotype.Service; + +@Service +@Slf4j +@RequiredArgsConstructor +public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService { + + private final DataspaceRepository dataspaceRepository; + private final AnchorRepository anchorRepository; + private final FragmentRepository fragmentRepository; + private final JsonObjectMapper jsonObjectMapper; + private final SessionManager sessionManager; + + private static final String REG_EX_FOR_OPTIONAL_LIST_INDEX = "(\\[@.+?])?)"; + + @Override + public void storeDataNodes(final String dataspaceName, final String anchorName, + final Collection dataNodes) { + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); + final List fragmentEntities = new ArrayList<>(dataNodes.size()); + try { + for (final DataNode dataNode: dataNodes) { + final FragmentEntity fragmentEntity = convertToFragmentWithAllDescendants(anchorEntity, dataNode); + fragmentEntities.add(fragmentEntity); + } + fragmentRepository.saveAll(fragmentEntities); + } catch (final DataIntegrityViolationException exception) { + log.warn("Exception occurred : {} , While saving : {} data nodes, Retrying saving data nodes individually", + exception, dataNodes.size()); + storeDataNodesIndividually(anchorEntity, dataNodes); + } + } + + private void storeDataNodesIndividually(final AnchorEntity anchorEntity, final Collection dataNodes) { + final Collection failedXpaths = new HashSet<>(); + for (final DataNode dataNode: dataNodes) { + try { + final FragmentEntity fragmentEntity = convertToFragmentWithAllDescendants(anchorEntity, dataNode); + fragmentRepository.save(fragmentEntity); + } catch (final DataIntegrityViolationException dataIntegrityViolationException) { + failedXpaths.add(dataNode.getXpath()); + } + } + if (!failedXpaths.isEmpty()) { + throw AlreadyDefinedException.forDataNodes(failedXpaths, anchorEntity.getName()); + } + } + + /** + * Convert DataNode object into Fragment and places the result in the fragments placeholder. Performs same action + * for all DataNode children recursively. + * + * @param anchorEntity anchorEntity + * @param dataNodeToBeConverted dataNode + * @return a Fragment built from current DataNode + */ + private FragmentEntity convertToFragmentWithAllDescendants(final AnchorEntity anchorEntity, + final DataNode dataNodeToBeConverted) { + final FragmentEntity parentFragment = toFragmentEntity(anchorEntity, dataNodeToBeConverted); + final Builder childFragmentsImmutableSetBuilder = ImmutableSet.builder(); + for (final DataNode childDataNode : dataNodeToBeConverted.getChildDataNodes()) { + final FragmentEntity childFragment = convertToFragmentWithAllDescendants(anchorEntity, childDataNode); + childFragmentsImmutableSetBuilder.add(childFragment); + } + parentFragment.setChildFragments(childFragmentsImmutableSetBuilder.build()); + return parentFragment; + } + + @Override + public void addListElements(final String dataspaceName, final String anchorName, final String parentNodeXpath, + final Collection newListElements) { + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); + addChildrenDataNodes(anchorEntity, parentNodeXpath, newListElements); + } + + @Override + public void addChildDataNodes(final String dataspaceName, final String anchorName, + final String parentNodeXpath, final Collection dataNodes) { + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); + addChildrenDataNodes(anchorEntity, parentNodeXpath, dataNodes); + } + + private void addChildrenDataNodes(final AnchorEntity anchorEntity, final String parentNodeXpath, + final Collection newChildren) { + final FragmentEntity parentFragmentEntity = getFragmentEntity(anchorEntity, parentNodeXpath); + final List fragmentEntities = new ArrayList<>(newChildren.size()); + try { + for (final DataNode newChildAsDataNode : newChildren) { + final FragmentEntity newChildAsFragmentEntity = + convertToFragmentWithAllDescendants(anchorEntity, newChildAsDataNode); + newChildAsFragmentEntity.setParentId(parentFragmentEntity.getId()); + fragmentEntities.add(newChildAsFragmentEntity); + } + fragmentRepository.saveAll(fragmentEntities); + } catch (final DataIntegrityViolationException dataIntegrityViolationException) { + log.warn("Exception occurred : {} , While saving : {} children, retrying using individual save operations", + dataIntegrityViolationException, fragmentEntities.size()); + retrySavingEachChildIndividually(anchorEntity, parentNodeXpath, newChildren); + } + } + + private void addNewChildDataNode(final AnchorEntity anchorEntity, final String parentNodeXpath, + final DataNode newChild) { + final FragmentEntity parentFragmentEntity = getFragmentEntity(anchorEntity, parentNodeXpath); + final FragmentEntity newChildAsFragmentEntity = convertToFragmentWithAllDescendants(anchorEntity, newChild); + newChildAsFragmentEntity.setParentId(parentFragmentEntity.getId()); + try { + fragmentRepository.save(newChildAsFragmentEntity); + } catch (final DataIntegrityViolationException dataIntegrityViolationException) { + throw AlreadyDefinedException.forDataNodes(Collections.singletonList(newChild.getXpath()), + anchorEntity.getName()); + } + } + + private void retrySavingEachChildIndividually(final AnchorEntity anchorEntity, final String parentNodeXpath, + final Collection newChildren) { + final Collection failedXpaths = new HashSet<>(); + for (final DataNode newChild : newChildren) { + try { + addNewChildDataNode(anchorEntity, parentNodeXpath, newChild); + } catch (final AlreadyDefinedException alreadyDefinedException) { + failedXpaths.add(newChild.getXpath()); + } + } + if (!failedXpaths.isEmpty()) { + throw AlreadyDefinedException.forDataNodes(failedXpaths, anchorEntity.getName()); + } + } + + @Override + public void batchUpdateDataLeaves(final String dataspaceName, final String anchorName, + final Map> updatedLeavesPerXPath) { + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); + + final Collection xpathsOfUpdatedLeaves = updatedLeavesPerXPath.keySet(); + final Collection fragmentEntities = getFragmentEntities(anchorEntity, xpathsOfUpdatedLeaves); + + for (final FragmentEntity fragmentEntity : fragmentEntities) { + final Map updatedLeaves = updatedLeavesPerXPath.get(fragmentEntity.getXpath()); + final String mergedLeaves = mergeLeaves(updatedLeaves, fragmentEntity.getAttributes()); + fragmentEntity.setAttributes(mergedLeaves); + } + + try { + fragmentRepository.saveAll(fragmentEntities); + } catch (final StaleStateException staleStateException) { + retryUpdateDataNodesIndividually(anchorEntity, fragmentEntities); + } + } + + @Override + public void updateDataNodesAndDescendants(final String dataspaceName, final String anchorName, + final Collection updatedDataNodes) { + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); + + final Map xpathToUpdatedDataNode = updatedDataNodes.stream() + .collect(Collectors.toMap(DataNode::getXpath, dataNode -> dataNode)); + + final Collection xpaths = xpathToUpdatedDataNode.keySet(); + Collection existingFragmentEntities = getFragmentEntities(anchorEntity, xpaths); + existingFragmentEntities = fragmentRepository.prefetchDescendantsOfFragmentEntities( + FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS, existingFragmentEntities); + + for (final FragmentEntity existingFragmentEntity : existingFragmentEntities) { + final DataNode updatedDataNode = xpathToUpdatedDataNode.get(existingFragmentEntity.getXpath()); + updateFragmentEntityAndDescendantsWithDataNode(existingFragmentEntity, updatedDataNode); + } + + try { + fragmentRepository.saveAll(existingFragmentEntities); + } catch (final StaleStateException staleStateException) { + retryUpdateDataNodesIndividually(anchorEntity, existingFragmentEntities); + } + } + + private void retryUpdateDataNodesIndividually(final AnchorEntity anchorEntity, + final Collection fragmentEntities) { + final Collection failedXpaths = new HashSet<>(); + for (final FragmentEntity dataNodeFragment : fragmentEntities) { + try { + fragmentRepository.save(dataNodeFragment); + } catch (final StaleStateException staleStateException) { + failedXpaths.add(dataNodeFragment.getXpath()); + } + } + if (!failedXpaths.isEmpty()) { + final String failedXpathsConcatenated = String.join(",", failedXpaths); + throw new ConcurrencyException("Concurrent Transactions", String.format( + "DataNodes : %s in Dataspace :'%s' with Anchor : '%s' are updated by another transaction.", + failedXpathsConcatenated, anchorEntity.getDataspace().getName(), anchorEntity.getName())); + } + } + + private void updateFragmentEntityAndDescendantsWithDataNode(final FragmentEntity existingFragmentEntity, + final DataNode newDataNode) { + copyAttributesFromNewDataNode(existingFragmentEntity, newDataNode); + + final Map existingChildrenByXpath = existingFragmentEntity.getChildFragments().stream() + .collect(Collectors.toMap(FragmentEntity::getXpath, childFragmentEntity -> childFragmentEntity)); + + final Collection updatedChildFragments = new HashSet<>(); + for (final DataNode newDataNodeChild : newDataNode.getChildDataNodes()) { + final FragmentEntity childFragment; + if (isNewDataNode(newDataNodeChild, existingChildrenByXpath)) { + childFragment = convertToFragmentWithAllDescendants(existingFragmentEntity.getAnchor(), + newDataNodeChild); + } else { + childFragment = existingChildrenByXpath.get(newDataNodeChild.getXpath()); + updateFragmentEntityAndDescendantsWithDataNode(childFragment, newDataNodeChild); + } + updatedChildFragments.add(childFragment); + } + + existingFragmentEntity.getChildFragments().clear(); + existingFragmentEntity.getChildFragments().addAll(updatedChildFragments); + } + + @Override + @Timed(value = "cps.data.persistence.service.datanode.query", + description = "Time taken to query data nodes") + public List queryDataNodes(final String dataspaceName, final String anchorName, final String cpsPath, + final FetchDescendantsOption fetchDescendantsOption) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); + final CpsPathQuery cpsPathQuery; + try { + cpsPathQuery = CpsPathUtil.getCpsPathQuery(cpsPath); + } catch (final PathParsingException pathParsingException) { + throw new CpsPathException(pathParsingException.getMessage()); + } + + Collection fragmentEntities; + fragmentEntities = fragmentRepository.findByAnchorAndCpsPath(anchorEntity, cpsPathQuery); + if (cpsPathQuery.hasAncestorAxis()) { + final Collection ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery); + fragmentEntities = fragmentRepository.findByAnchorAndXpathIn(anchorEntity, ancestorXpaths); + } + fragmentEntities = fragmentRepository.prefetchDescendantsOfFragmentEntities(fetchDescendantsOption, + fragmentEntities); + return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); + } + + @Override + @Timed(value = "cps.data.persistence.service.datanode.query.anchors", + description = "Time taken to query data nodes across all anchors or list of anchors") + public List queryDataNodesAcrossAnchors(final String dataspaceName, final String cpsPath, + final FetchDescendantsOption fetchDescendantsOption, + final PaginationOption paginationOption) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final CpsPathQuery cpsPathQuery; + try { + cpsPathQuery = CpsPathUtil.getCpsPathQuery(cpsPath); + } catch (final PathParsingException e) { + throw new CpsPathException(e.getMessage()); + } + + final List anchorIds; + if (paginationOption == NO_PAGINATION) { + anchorIds = Collections.emptyList(); + } else { + anchorIds = getAnchorIdsForPagination(dataspaceEntity, cpsPathQuery, paginationOption); + if (anchorIds.isEmpty()) { + return Collections.emptyList(); + } + } + Collection fragmentEntities = + fragmentRepository.findByDataspaceAndCpsPath(dataspaceEntity, cpsPathQuery, anchorIds); + + if (cpsPathQuery.hasAncestorAxis()) { + final Collection ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery); + if (anchorIds.isEmpty()) { + fragmentEntities = fragmentRepository.findByDataspaceAndXpathIn(dataspaceEntity, ancestorXpaths); + } else { + fragmentEntities = fragmentRepository.findByAnchorIdsAndXpathIn( + anchorIds.toArray(new Long[0]), ancestorXpaths.toArray(new String[0])); + } + + } + fragmentEntities = fragmentRepository.prefetchDescendantsOfFragmentEntities(fetchDescendantsOption, + fragmentEntities); + return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); + } + + private List createDataNodesFromFragmentEntities(final FetchDescendantsOption fetchDescendantsOption, + final Collection fragmentEntities) { + final List dataNodes = new ArrayList<>(fragmentEntities.size()); + for (final FragmentEntity fragmentEntity : fragmentEntities) { + dataNodes.add(toDataNode(fragmentEntity, fetchDescendantsOption)); + } + return Collections.unmodifiableList(dataNodes); + } + + @Override + public String startSession() { + return sessionManager.startSession(); + } + + @Override + public void closeSession(final String sessionId) { + sessionManager.closeSession(sessionId, SessionManager.WITH_COMMIT); + } + + @Override + public void lockAnchor(final String sessionId, final String dataspaceName, + final String anchorName, final Long timeoutInMilliseconds) { + sessionManager.lockAnchor(sessionId, dataspaceName, anchorName, timeoutInMilliseconds); + } + + @Override + public Integer countAnchorsForDataspaceAndCpsPath(final String dataspaceName, final String cpsPath) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final CpsPathQuery cpsPathQuery; + try { + cpsPathQuery = CpsPathUtil.getCpsPathQuery(cpsPath); + } catch (final PathParsingException e) { + throw new CpsPathException(e.getMessage()); + } + final List anchorIdList = getAnchorIdsForPagination(dataspaceEntity, cpsPathQuery, NO_PAGINATION); + return anchorIdList.size(); + } + + private DataNode toDataNode(final FragmentEntity fragmentEntity, + final FetchDescendantsOption fetchDescendantsOption) { + final List childDataNodes = getChildDataNodes(fragmentEntity, fetchDescendantsOption); + Map leaves = new HashMap<>(); + if (fragmentEntity.getAttributes() != null) { + leaves = jsonObjectMapper.convertJsonString(fragmentEntity.getAttributes(), Map.class); + } + return new DataNodeBuilder() + .withXpath(fragmentEntity.getXpath()) + .withLeaves(leaves) + .withDataspace(fragmentEntity.getAnchor().getDataspace().getName()) + .withAnchor(fragmentEntity.getAnchor().getName()) + .withChildDataNodes(childDataNodes).build(); + } + + private FragmentEntity toFragmentEntity(final AnchorEntity anchorEntity, final DataNode dataNode) { + return FragmentEntity.builder() + .anchor(anchorEntity) + .xpath(dataNode.getXpath()) + .attributes(jsonObjectMapper.asJsonString(dataNode.getLeaves())) + .build(); + } + + + + @Override + @Transactional + public void replaceListContent(final String dataspaceName, final String anchorName, final String parentNodeXpath, + final Collection newListElements) { + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); + final FragmentEntity parentEntity = getFragmentEntity(anchorEntity, parentNodeXpath); + final String listElementXpathPrefix = getListElementXpathPrefix(newListElements); + final Map existingListElementFragmentEntitiesByXPath = + extractListElementFragmentEntitiesByXPath(parentEntity.getChildFragments(), listElementXpathPrefix); + parentEntity.getChildFragments().removeAll(existingListElementFragmentEntitiesByXPath.values()); + final Set updatedChildFragmentEntities = new HashSet<>(); + for (final DataNode newListElement : newListElements) { + final FragmentEntity existingListElementEntity = + existingListElementFragmentEntitiesByXPath.get(newListElement.getXpath()); + final FragmentEntity entityToBeAdded = getFragmentForReplacement(parentEntity, newListElement, + existingListElementEntity); + updatedChildFragmentEntities.add(entityToBeAdded); + } + parentEntity.getChildFragments().addAll(updatedChildFragmentEntities); + fragmentRepository.save(parentEntity); + } + + @Override + @Transactional + public void deleteDataNodes(final String dataspaceName, final String anchorName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + anchorRepository.findByDataspaceAndName(dataspaceEntity, anchorName) + .ifPresent(anchorEntity -> fragmentRepository.deleteByAnchorIn(Collections.singletonList(anchorEntity))); + } + + @Override + @Transactional + public void deleteDataNodes(final String dataspaceName, final Collection anchorNames) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final Collection anchorEntities = + anchorRepository.findAllByDataspaceAndNameIn(dataspaceEntity, anchorNames); + fragmentRepository.deleteByAnchorIn(anchorEntities); + } + + @Override + @Transactional + public void deleteDataNodes(final String dataspaceName, final String anchorName, + final Collection xpathsToDelete) { + deleteDataNodes(dataspaceName, anchorName, xpathsToDelete, false); + } + + private void deleteDataNodes(final String dataspaceName, final String anchorName, + final Collection xpathsToDelete, final boolean onlySupportListDeletion) { + final boolean haveRootXpath = xpathsToDelete.stream().anyMatch(CpsDataPersistenceServiceImpl::isRootXpath); + if (haveRootXpath) { + deleteDataNodes(dataspaceName, anchorName); + return; + } + + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); + + final Collection deleteChecklist = getNormalizedXpaths(xpathsToDelete); + final Collection xpathsToExistingContainers = + fragmentRepository.findAllXpathByAnchorAndXpathIn(anchorEntity, deleteChecklist); + if (onlySupportListDeletion) { + final Collection xpathsToExistingListElements = xpathsToExistingContainers.stream() + .filter(CpsPathUtil::isPathToListElement).collect(Collectors.toList()); + deleteChecklist.removeAll(xpathsToExistingListElements); + } else { + deleteChecklist.removeAll(xpathsToExistingContainers); + } + + final Collection xpathsToExistingLists = deleteChecklist.stream() + .filter(xpath -> fragmentRepository.existsByAnchorAndXpathStartsWith(anchorEntity, xpath + "[")) + .collect(Collectors.toList()); + deleteChecklist.removeAll(xpathsToExistingLists); + + if (!deleteChecklist.isEmpty()) { + throw new DataNodeNotFoundExceptionBatch(dataspaceName, anchorName, deleteChecklist); + } + + fragmentRepository.deleteByAnchorIdAndXpaths(anchorEntity.getId(), xpathsToExistingContainers); + fragmentRepository.deleteListsByAnchorIdAndXpaths(anchorEntity.getId(), xpathsToExistingLists); + } + + @Override + @Transactional + public void deleteListDataNode(final String dataspaceName, final String anchorName, + final String targetXpath) { + deleteDataNode(dataspaceName, anchorName, targetXpath, true); + } + + @Override + @Transactional + public void deleteDataNode(final String dataspaceName, final String anchorName, final String targetXpath) { + deleteDataNode(dataspaceName, anchorName, targetXpath, false); + } + + private void deleteDataNode(final String dataspaceName, final String anchorName, final String targetXpath, + final boolean onlySupportListNodeDeletion) { + final String normalizedXpath = getNormalizedXpath(targetXpath); + try { + deleteDataNodes(dataspaceName, anchorName, Collections.singletonList(normalizedXpath), + onlySupportListNodeDeletion); + } catch (final DataNodeNotFoundExceptionBatch dataNodeNotFoundExceptionBatch) { + throw new DataNodeNotFoundException(dataspaceName, anchorName, targetXpath); + } + } + + @Override + @Timed(value = "cps.data.persistence.service.datanode.get", + description = "Time taken to get a data node") + public Collection getDataNodes(final String dataspaceName, final String anchorName, + final String xpath, + final FetchDescendantsOption fetchDescendantsOption) { + final String targetXpath = getNormalizedXpath(xpath); + final Collection dataNodes = getDataNodesForMultipleXpaths(dataspaceName, anchorName, + Collections.singletonList(targetXpath), fetchDescendantsOption); + if (dataNodes.isEmpty()) { + throw new DataNodeNotFoundException(dataspaceName, anchorName, xpath); + } + return dataNodes; + } + + @Override + @Timed(value = "cps.data.persistence.service.datanode.batch.get", + description = "Time taken to get data nodes") + public Collection getDataNodesForMultipleXpaths(final String dataspaceName, final String anchorName, + final Collection xpaths, + final FetchDescendantsOption fetchDescendantsOption) { + final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); + Collection fragmentEntities = getFragmentEntities(anchorEntity, xpaths); + fragmentEntities = fragmentRepository.prefetchDescendantsOfFragmentEntities(fetchDescendantsOption, + fragmentEntities); + return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); + } + + private List getChildDataNodes(final FragmentEntity fragmentEntity, + final FetchDescendantsOption fetchDescendantsOption) { + if (fetchDescendantsOption.hasNext()) { + return fragmentEntity.getChildFragments().stream() + .map(childFragmentEntity -> toDataNode(childFragmentEntity, fetchDescendantsOption.next())) + .collect(Collectors.toList()); + } + return Collections.emptyList(); + } + + private AnchorEntity getAnchorEntity(final String dataspaceName, final String anchorName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + return anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); + } + + private List getAnchorIdsForPagination(final DataspaceEntity dataspaceEntity, final CpsPathQuery cpsPathQuery, + final PaginationOption paginationOption) { + return fragmentRepository.findAnchorIdsForPagination(dataspaceEntity, cpsPathQuery, paginationOption); + } + + private static String getNormalizedXpath(final String xpathSource) { + if (isRootXpath(xpathSource)) { + return xpathSource; + } + try { + return CpsPathUtil.getNormalizedXpath(xpathSource); + } catch (final PathParsingException pathParsingException) { + throw new CpsPathException(pathParsingException.getMessage()); + } + } + + private static Collection getNormalizedXpaths(final Collection xpaths) { + final Collection normalizedXpaths = new HashSet<>(xpaths.size()); + for (final String xpath : xpaths) { + try { + normalizedXpaths.add(getNormalizedXpath(xpath)); + } catch (final CpsPathException cpsPathException) { + log.warn("Error parsing xpath \"{}\": {}", xpath, cpsPathException.getMessage()); + } + } + return normalizedXpaths; + } + + private FragmentEntity getFragmentEntity(final AnchorEntity anchorEntity, final String xpath) { + final FragmentEntity fragmentEntity; + if (isRootXpath(xpath)) { + fragmentEntity = fragmentRepository.findOneByAnchorId(anchorEntity.getId()).orElse(null); + } else { + fragmentEntity = fragmentRepository.getByAnchorAndXpath(anchorEntity, getNormalizedXpath(xpath)); + } + if (fragmentEntity == null) { + throw new DataNodeNotFoundException(anchorEntity.getDataspace().getName(), anchorEntity.getName(), xpath); + } + return fragmentEntity; + } + + private Collection getFragmentEntities(final AnchorEntity anchorEntity, + final Collection xpaths) { + final Collection normalizedXpaths = getNormalizedXpaths(xpaths); + + final boolean haveRootXpath = normalizedXpaths.removeIf(CpsDataPersistenceServiceImpl::isRootXpath); + + final List fragmentEntities = fragmentRepository.findByAnchorAndXpathIn(anchorEntity, + normalizedXpaths); + + for (final FragmentEntity fragmentEntity : fragmentEntities) { + normalizedXpaths.remove(fragmentEntity.getXpath()); + } + + for (final String xpath : normalizedXpaths) { + if (!CpsPathUtil.isPathToListElement(xpath)) { + fragmentEntities.addAll(fragmentRepository.findListByAnchorAndXpath(anchorEntity, xpath)); + } + } + + if (haveRootXpath) { + fragmentEntities.addAll(fragmentRepository.findRootsByAnchorId(anchorEntity.getId())); + } + + return fragmentEntities; + } + + private static String getListElementXpathPrefix(final Collection newListElements) { + if (newListElements.isEmpty()) { + throw new CpsAdminException("Invalid list replacement", + "Cannot replace list elements with empty collection"); + } + final String firstChildNodeXpath = newListElements.iterator().next().getXpath(); + return firstChildNodeXpath.substring(0, firstChildNodeXpath.lastIndexOf('[') + 1); + } + + private FragmentEntity getFragmentForReplacement(final FragmentEntity parentEntity, + final DataNode newListElement, + final FragmentEntity existingListElementEntity) { + if (existingListElementEntity == null) { + return convertToFragmentWithAllDescendants(parentEntity.getAnchor(), newListElement); + } + if (newListElement.getChildDataNodes().isEmpty()) { + copyAttributesFromNewDataNode(existingListElementEntity, newListElement); + existingListElementEntity.getChildFragments().clear(); + } else { + updateFragmentEntityAndDescendantsWithDataNode(existingListElementEntity, newListElement); + } + return existingListElementEntity; + } + + private String getOrderedLeavesAsJson(final Map currentLeaves) { + final Map sortedLeaves = new TreeMap<>(String::compareTo); + sortedLeaves.putAll(currentLeaves); + return jsonObjectMapper.asJsonString(sortedLeaves); + } + + private String getOrderedLeavesAsJson(final String currentLeavesAsString) { + if (currentLeavesAsString == null) { + return "{}"; + } + final Map sortedLeaves = jsonObjectMapper.convertJsonString(currentLeavesAsString, + TreeMap.class); + return jsonObjectMapper.asJsonString(sortedLeaves); + } + + private static Map extractListElementFragmentEntitiesByXPath( + final Set childEntities, final String listElementXpathPrefix) { + return childEntities.stream() + .filter(fragmentEntity -> fragmentEntity.getXpath().startsWith(listElementXpathPrefix)) + .collect(Collectors.toMap(FragmentEntity::getXpath, fragmentEntity -> fragmentEntity)); + } + + private static Set processAncestorXpath(final Collection fragmentEntities, + final CpsPathQuery cpsPathQuery) { + final Set ancestorXpath = new HashSet<>(); + final Pattern pattern = + Pattern.compile("(.*/" + Pattern.quote(cpsPathQuery.getAncestorSchemaNodeIdentifier()) + + REG_EX_FOR_OPTIONAL_LIST_INDEX + "/.*"); + for (final FragmentEntity fragmentEntity : fragmentEntities) { + final Matcher matcher = pattern.matcher(fragmentEntity.getXpath()); + if (matcher.matches()) { + ancestorXpath.add(matcher.group(1)); + } + } + return ancestorXpath; + } + + private static boolean isRootXpath(final String xpath) { + return "/".equals(xpath) || "".equals(xpath); + } + + private static boolean isNewDataNode(final DataNode replacementDataNode, + final Map existingListElementsByXpath) { + return !existingListElementsByXpath.containsKey(replacementDataNode.getXpath()); + } + + private void copyAttributesFromNewDataNode(final FragmentEntity existingFragmentEntity, + final DataNode newDataNode) { + final String oldOrderedLeavesAsJson = getOrderedLeavesAsJson(existingFragmentEntity.getAttributes()); + final String newOrderedLeavesAsJson = getOrderedLeavesAsJson(newDataNode.getLeaves()); + if (!oldOrderedLeavesAsJson.equals(newOrderedLeavesAsJson)) { + existingFragmentEntity.setAttributes(jsonObjectMapper.asJsonString(newDataNode.getLeaves())); + } + } + + private String mergeLeaves(final Map updateLeaves, final String currentLeavesAsString) { + Map currentLeavesAsMap = new HashMap<>(); + if (currentLeavesAsString != null) { + currentLeavesAsMap = jsonObjectMapper.convertJsonString(currentLeavesAsString, Map.class); + currentLeavesAsMap.putAll(updateLeaves); + } + + if (currentLeavesAsMap.isEmpty()) { + return ""; + } + return jsonObjectMapper.asJsonString(currentLeavesAsMap); + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/CpsModulePersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/CpsModulePersistenceServiceImpl.java new file mode 100755 index 0000000000..6f491ba3b7 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/CpsModulePersistenceServiceImpl.java @@ -0,0 +1,432 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2020-2024 Nordix Foundation + * Modifications Copyright (C) 2020-2022 Bell Canada. + * Modifications Copyright (C) 2021 Pantheon.tech + * Modifications Copyright (C) 2022 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableSet; +import jakarta.transaction.Transactional; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.commons.lang3.StringUtils; +import org.hibernate.exception.ConstraintViolationException; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.models.SchemaSetEntity; +import org.onap.cps.ri.models.YangResourceEntity; +import org.onap.cps.ri.models.YangResourceModuleReference; +import org.onap.cps.ri.repository.DataspaceRepository; +import org.onap.cps.ri.repository.ModuleReferenceRepository; +import org.onap.cps.ri.repository.SchemaSetRepository; +import org.onap.cps.ri.repository.YangResourceRepository; +import org.onap.cps.spi.CpsModulePersistenceService; +import org.onap.cps.spi.exceptions.AlreadyDefinedException; +import org.onap.cps.spi.exceptions.DuplicatedYangResourceException; +import org.onap.cps.spi.exceptions.ModelValidationException; +import org.onap.cps.spi.model.ModuleDefinition; +import org.onap.cps.spi.model.ModuleReference; +import org.onap.cps.spi.model.SchemaSet; +import org.opendaylight.yangtools.yang.common.Revision; +import org.opendaylight.yangtools.yang.model.repo.api.RevisionSourceIdentifier; +import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource; +import org.opendaylight.yangtools.yang.parser.api.YangSyntaxErrorException; +import org.opendaylight.yangtools.yang.parser.rfc7950.repo.YangModelDependencyInfo; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.retry.RetryContext; +import org.springframework.retry.annotation.Backoff; +import org.springframework.retry.annotation.Retryable; +import org.springframework.retry.support.RetrySynchronizationManager; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +@RequiredArgsConstructor +public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceService { + + private static final String YANG_RESOURCE_CHECKSUM_CONSTRAINT_NAME = "yang_resource_checksum_key"; + private static final String NO_MODULE_NAME_FILTER = null; + private static final String NO_MODULE_REVISION = null; + private static final Pattern CHECKSUM_EXCEPTION_PATTERN = Pattern.compile(".*\\(checksum\\)=\\((\\w+)\\).*"); + private static final Pattern RFC6020_RECOMMENDED_FILENAME_PATTERN = Pattern + .compile("([\\w-]+)@(\\d{4}-\\d{2}-\\d{2})(?:\\.yang)?", Pattern.CASE_INSENSITIVE); + + private final YangResourceRepository yangResourceRepository; + + private final SchemaSetRepository schemaSetRepository; + + private final DataspaceRepository dataspaceRepository; + + private final ModuleReferenceRepository moduleReferenceRepository; + + @Override + public Map getYangSchemaResources(final String dataspaceName, final String schemaSetName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final SchemaSetEntity schemaSetEntity = + schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); + return schemaSetEntity.getYangResources().stream().collect( + Collectors.toMap(YangResourceEntity::getFileName, YangResourceEntity::getContent)); + } + + @Override + public Collection getYangResourceModuleReferences(final String dataspaceName) { + final Set yangResourceModuleReferenceList = + yangResourceRepository.findAllModuleReferencesByDataspace(dataspaceName); + return yangResourceModuleReferenceList.stream().map(CpsModulePersistenceServiceImpl::toModuleReference) + .collect(Collectors.toList()); + } + + @Override + public Collection getYangResourceModuleReferences(final String dataspaceName, + final String anchorName) { + final Set yangResourceModuleReferenceList = + yangResourceRepository + .findAllModuleReferencesByDataspaceAndAnchor(dataspaceName, anchorName); + return yangResourceModuleReferenceList.stream().map(CpsModulePersistenceServiceImpl::toModuleReference) + .collect(Collectors.toList()); + } + + @Override + public Collection getYangResourceDefinitions(final String dataspaceName, + final String anchorName) { + final Set yangResourceEntities = + yangResourceRepository.findAllModuleDefinitionsByDataspaceAndAnchorAndModule(dataspaceName, anchorName, + NO_MODULE_NAME_FILTER, NO_MODULE_REVISION); + return convertYangResourceEntityToModuleDefinition(yangResourceEntities); + } + + @Override + public Collection getYangResourceDefinitionsByAnchorAndModule(final String dataspaceName, + final String anchorName, + final String moduleName, + final String moduleRevision) { + final Set yangResourceEntities = + yangResourceRepository.findAllModuleDefinitionsByDataspaceAndAnchorAndModule(dataspaceName, anchorName, + moduleName, moduleRevision); + return convertYangResourceEntityToModuleDefinition(yangResourceEntities); + } + + private List convertYangResourceEntityToModuleDefinition(final Set + yangResourceEntities) { + final List resultModuleDefinitions = new ArrayList<>(yangResourceEntities.size()); + for (final YangResourceEntity yangResourceEntity: yangResourceEntities) { + resultModuleDefinitions.add(toModuleDefinition(yangResourceEntity)); + } + return resultModuleDefinitions; + } + + @Override + @Transactional + // A retry is made to store the schema set if it fails because of duplicated yang resource exception that + // can occur in case of specific concurrent requests. + @Retryable(retryFor = DuplicatedYangResourceException.class, maxAttempts = 5, backoff = + @Backoff(random = true, delay = 200, maxDelay = 2000, multiplier = 2)) + public void storeSchemaSet(final String dataspaceName, final String schemaSetName, + final Map moduleReferenceNameToContentMap) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final Set yangResourceEntities = synchronizeYangResources(moduleReferenceNameToContentMap); + final SchemaSetEntity schemaSetEntity = new SchemaSetEntity(); + schemaSetEntity.setName(schemaSetName); + schemaSetEntity.setDataspace(dataspaceEntity); + schemaSetEntity.setYangResources(yangResourceEntities); + try { + schemaSetRepository.save(schemaSetEntity); + } catch (final DataIntegrityViolationException e) { + throw AlreadyDefinedException.forSchemaSet(schemaSetName, dataspaceName, e); + } + } + + @Override + public Collection getSchemaSetsByDataspaceName(final String dataspaceName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final List schemaSetEntities = schemaSetRepository.findByDataspace(dataspaceEntity); + return schemaSetEntities.stream() + .map(CpsModulePersistenceServiceImpl::toSchemaSet).collect(Collectors.toList()); + } + + @Override + @Transactional + // A retry is made to store the schema set if it fails because of duplicated yang resource exception that + // can occur in case of specific concurrent requests. + @Retryable(retryFor = DuplicatedYangResourceException.class, maxAttempts = 5, backoff = + @Backoff(random = true, delay = 200, maxDelay = 2000, multiplier = 2)) + public void storeSchemaSetFromModules(final String dataspaceName, final String schemaSetName, + final Map newModuleNameToContentMap, + final Collection allModuleReferences) { + storeSchemaSet(dataspaceName, schemaSetName, newModuleNameToContentMap); + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final SchemaSetEntity schemaSetEntity = + schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); + final List allYangResourceIds = + yangResourceRepository.getResourceIdsByModuleReferences(allModuleReferences); + yangResourceRepository.insertSchemaSetIdYangResourceId(schemaSetEntity.getId(), allYangResourceIds); + } + + @Override + @Transactional + public void deleteSchemaSet(final String dataspaceName, final String schemaSetName) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final SchemaSetEntity schemaSetEntity = + schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); + schemaSetRepository.delete(schemaSetEntity); + } + + @Override + @Transactional + public void deleteSchemaSets(final String dataspaceName, final Collection schemaSetNames) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + schemaSetRepository.deleteByDataspaceAndNameIn(dataspaceEntity, schemaSetNames); + } + + + @Override + @Transactional + public void updateSchemaSetFromModules(final String dataspaceName, final String schemaSetName, + final Map newModuleNameToContentMap, + final Collection allModuleReferences) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final SchemaSetEntity schemaSetEntity = + schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); + storeAndLinkNewModules(newModuleNameToContentMap, schemaSetEntity); + updateAllModuleReferences(allModuleReferences, schemaSetEntity.getId()); + } + + + + @Override + @Transactional + public void deleteUnusedYangResourceModules() { + yangResourceRepository.deleteOrphans(); + } + + @Override + public Collection identifyNewModuleReferences( + final Collection moduleReferencesToCheck) { + return moduleReferenceRepository.identifyNewModuleReferences(moduleReferencesToCheck); + } + + @Override + public Collection getModuleReferencesByAttribute(final String dataspaceName, + final String anchorName, + final Map parentAttributes, + final Map childAttributes) { + return moduleReferenceRepository.findModuleReferences(dataspaceName, anchorName, parentAttributes, + childAttributes); + } + + private Set synchronizeYangResources( + final Map moduleReferenceNameToContentMap) { + final Map checksumToEntityMap = moduleReferenceNameToContentMap.entrySet().stream() + .map(entry -> { + final String checksum = DigestUtils.sha256Hex(entry.getValue().getBytes(StandardCharsets.UTF_8)); + final Map moduleNameAndRevisionMap = createModuleNameAndRevisionMap(entry.getKey(), + entry.getValue()); + final YangResourceEntity yangResourceEntity = new YangResourceEntity(); + yangResourceEntity.setFileName(entry.getKey()); + yangResourceEntity.setContent(entry.getValue()); + yangResourceEntity.setModuleName(moduleNameAndRevisionMap.get("moduleName")); + yangResourceEntity.setRevision(moduleNameAndRevisionMap.get("revision")); + yangResourceEntity.setChecksum(checksum); + return yangResourceEntity; + }) + .collect(Collectors.toMap( + YangResourceEntity::getChecksum, + entity -> entity + )); + + final List existingYangResourceEntities = + yangResourceRepository.findAllByChecksumIn(checksumToEntityMap.keySet()); + existingYangResourceEntities.forEach(yangFile -> checksumToEntityMap.remove(yangFile.getChecksum())); + + final Collection newYangResourceEntities = checksumToEntityMap.values(); + if (!newYangResourceEntities.isEmpty()) { + try { + yangResourceRepository.saveAll(newYangResourceEntities); + } catch (final DataIntegrityViolationException dataIntegrityViolationException) { + // Throw a CPS duplicated Yang resource exception if the cause of the error is a yang checksum + // database constraint violation. + // If it is not, then throw the original exception + final Optional convertedException = + convertToDuplicatedYangResourceException( + dataIntegrityViolationException, newYangResourceEntities); + convertedException.ifPresent( + e -> { + final RetryContext retryContext = RetrySynchronizationManager.getContext(); + int retryCount = retryContext == null ? 0 : retryContext.getRetryCount(); + log.warn("Cannot persist duplicated yang resource. System will attempt this method " + + "up to 5 times. Current retry count : {}", ++retryCount, e); + }); + throw convertedException.isPresent() ? convertedException.get() : dataIntegrityViolationException; + } + } + + return ImmutableSet.builder() + .addAll(existingYangResourceEntities) + .addAll(newYangResourceEntities) + .build(); + } + + private static Map createModuleNameAndRevisionMap(final String sourceName, final String source) { + final Map metaDataMap = new HashMap<>(); + final RevisionSourceIdentifier revisionSourceIdentifier = + createIdentifierFromSourceName(checkNotNull(sourceName)); + + final YangTextSchemaSource tempYangTextSchemaSource = new YangTextSchemaSource(revisionSourceIdentifier) { + @Override + public Optional getSymbolicName() { + return Optional.empty(); + } + + @Override + protected MoreObjects.ToStringHelper addToStringAttributes( + final MoreObjects.ToStringHelper toStringHelper) { + return toStringHelper; + } + + @Override + public InputStream openStream() { + return new ByteArrayInputStream(source.getBytes(StandardCharsets.UTF_8)); + } + }; + try { + final YangModelDependencyInfo yangModelDependencyInfo + = YangModelDependencyInfo.forYangText(tempYangTextSchemaSource); + metaDataMap.put("moduleName", yangModelDependencyInfo.getName()); + metaDataMap.put("revision", yangModelDependencyInfo.getFormattedRevision()); + } catch (final YangSyntaxErrorException | IOException e) { + throw new ModelValidationException("Yang resource is invalid.", + String.format("Yang syntax validation failed for resource %s:%n%s", sourceName, e.getMessage()), e); + } + return metaDataMap; + } + + private static RevisionSourceIdentifier createIdentifierFromSourceName(final String sourceName) { + final Matcher matcher = RFC6020_RECOMMENDED_FILENAME_PATTERN.matcher(sourceName); + if (matcher.matches()) { + return RevisionSourceIdentifier.create(matcher.group(1), Revision.of(matcher.group(2))); + } + return RevisionSourceIdentifier.create(sourceName); + } + + /** + * Convert the specified data integrity violation exception into a CPS duplicated Yang resource exception + * if the cause of the error is a yang checksum database constraint violation. + * + * @param originalException the original db exception. + * @param yangResourceEntities the collection of Yang resources involved in the db failure. + * @return an optional converted CPS duplicated Yang resource exception. The optional is empty if the original + * cause of the error is not a yang checksum database constraint violation. + */ + private Optional convertToDuplicatedYangResourceException( + final DataIntegrityViolationException originalException, + final Collection yangResourceEntities) { + + // The exception result + DuplicatedYangResourceException duplicatedYangResourceException = null; + + final Throwable cause = originalException.getCause(); + if (cause instanceof ConstraintViolationException) { + final ConstraintViolationException constraintException = (ConstraintViolationException) cause; + if (YANG_RESOURCE_CHECKSUM_CONSTRAINT_NAME.equals(constraintException.getConstraintName())) { + // Db constraint related to yang resource checksum uniqueness is not respected + final String checksumInError = getDuplicatedChecksumFromException(constraintException); + final String nameInError = getNameForChecksum(checksumInError, yangResourceEntities); + duplicatedYangResourceException = + new DuplicatedYangResourceException(nameInError, checksumInError, constraintException); + } + } + + return Optional.ofNullable(duplicatedYangResourceException); + + } + + private String getNameForChecksum(final String checksum, + final Collection yangResourceEntities) { + final Optional optionalFileName = yangResourceEntities.stream() + .filter(entity -> StringUtils.equals(checksum, (entity.getChecksum()))) + .findFirst() + .map(YangResourceEntity::getFileName); + return optionalFileName.orElse("no filename"); + } + + private String getDuplicatedChecksumFromException(final ConstraintViolationException exception) { + final Matcher matcher = CHECKSUM_EXCEPTION_PATTERN.matcher(exception.getSQLException().getMessage()); + if (matcher.find()) { + return matcher.group(1); + } + return "no checksum found"; + } + + private static ModuleReference toModuleReference( + final YangResourceModuleReference yangResourceModuleReference) { + return ModuleReference.builder() + .moduleName(yangResourceModuleReference.getModuleName()) + .revision(yangResourceModuleReference.getRevision()) + .build(); + } + + private static ModuleDefinition toModuleDefinition(final YangResourceEntity yangResourceEntity) { + return new ModuleDefinition( + yangResourceEntity.getModuleName(), + yangResourceEntity.getRevision(), + yangResourceEntity.getContent()); + } + + private static SchemaSet toSchemaSet(final SchemaSetEntity schemaSetEntity) { + return SchemaSet.builder().name(schemaSetEntity.getName()) + .dataspaceName(schemaSetEntity.getDataspace().getName()).build(); + } + + private void storeAndLinkNewModules(final Map newModuleNameToContentMap, + final SchemaSetEntity schemaSetEntity) { + final Set yangResourceEntities + = new HashSet<>(synchronizeYangResources(newModuleNameToContentMap)); + schemaSetEntity.setYangResources(yangResourceEntities); + schemaSetRepository.save(schemaSetEntity); + } + + private void updateAllModuleReferences(final Collection allModuleReferences, + final Integer schemaSetEntityId) { + yangResourceRepository.deleteSchemaSetYangResourceForSchemaSetId(schemaSetEntityId); + final List allYangResourceIds = + yangResourceRepository.getResourceIdsByModuleReferences(allModuleReferences); + yangResourceRepository.insertSchemaSetIdYangResourceId(schemaSetEntityId, allYangResourceIds); + } + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/models/AnchorEntity.java b/cps-ri/src/main/java/org/onap/cps/ri/models/AnchorEntity.java new file mode 100644 index 0000000000..bf9e25daf1 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/models/AnchorEntity.java @@ -0,0 +1,76 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2021 Pantheon.tech + * Modifications Copyright (C) 2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.models; + + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; +import java.io.Serializable; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +/** + * Entity to store an anchor. + */ +@Getter +@Setter +@NoArgsConstructor +@AllArgsConstructor +@Builder +@Entity +@Table(name = "anchor") +@EqualsAndHashCode(onlyExplicitlyIncluded = true) +public class AnchorEntity implements Serializable { + + private static final long serialVersionUID = -8049987915308262518L; + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @NotNull + @Column + @EqualsAndHashCode.Include + private String name; + + @NotNull + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "schema_set_id") + private SchemaSetEntity schemaSet; + + @NotNull + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "dataspace_id") + @EqualsAndHashCode.Include + private DataspaceEntity dataspace; +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/models/DataspaceEntity.java b/cps-ri/src/main/java/org/onap/cps/ri/models/DataspaceEntity.java new file mode 100644 index 0000000000..689ae2c000 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/models/DataspaceEntity.java @@ -0,0 +1,70 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2020-2023 Nordix Foundation. + * Modifications Copyright (C) 2020-2021 Pantheon.tech + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.models; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; +import java.io.Serializable; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + + +/** + * Entity to store a dataspace. + */ +@Getter +@Setter +@Entity +@AllArgsConstructor +@NoArgsConstructor +@Table(name = "dataspace") +@EqualsAndHashCode(onlyExplicitlyIncluded = true) +public class DataspaceEntity implements Serializable { + + private static final long serialVersionUID = 8395254649813051882L; + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Integer id; + + @NotNull + @Column(columnDefinition = "text") + @EqualsAndHashCode.Include + private String name; + + /** + * Initialize a Dataspace . + * + * @param name the Dataspace name. + */ + public DataspaceEntity(final String name) { + this.name = name; + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/models/FragmentEntity.java b/cps-ri/src/main/java/org/onap/cps/ri/models/FragmentEntity.java new file mode 100644 index 0000000000..2c851b60a5 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/models/FragmentEntity.java @@ -0,0 +1,93 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2020-2024 Nordix Foundation. + * Modifications Copyright (C) 2021 Pantheon.tech + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.models; + +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.OneToMany; +import jakarta.persistence.SequenceGenerator; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; +import java.io.Serializable; +import java.util.Set; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.ToString; +import org.hibernate.annotations.JdbcTypeCode; +import org.hibernate.type.SqlTypes; + +/** + * Entity to store a fragment. + */ +@Data +@Getter +@Setter +@AllArgsConstructor +@NoArgsConstructor +@Builder +@Entity +@Table(name = "fragment") +@EqualsAndHashCode(onlyExplicitlyIncluded = true) +public class FragmentEntity implements Serializable { + + private static final long serialVersionUID = 7737669789097119667L; + + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "fragment_id_seq_generator") + @SequenceGenerator(name = "fragment_id_seq_generator", sequenceName = "fragment_id_seq", allocationSize = 100) + private Long id; + + @NotNull + @Column(columnDefinition = "text") + @EqualsAndHashCode.Include + private String xpath; + + @Column(name = "parent_id") + private Long parentId; + + @JdbcTypeCode(SqlTypes.JSON) + @Column(columnDefinition = "jsonb") + private String attributes; + + @NotNull + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "anchor_id") + @EqualsAndHashCode.Include + private AnchorEntity anchor; + + @ToString.Exclude + @OneToMany(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.LAZY) + @JoinColumn(name = "parent_id") + private Set childFragments; +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/models/SchemaSetEntity.java b/cps-ri/src/main/java/org/onap/cps/ri/models/SchemaSetEntity.java new file mode 100644 index 0000000000..e99f79e330 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/models/SchemaSetEntity.java @@ -0,0 +1,71 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2020 Pantheon.tech + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.models; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.JoinTable; +import jakarta.persistence.ManyToMany; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; +import java.io.Serializable; +import java.util.Set; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +/** + * Entity to store a Schema Set. + */ +@Getter +@Setter +@NoArgsConstructor +@Entity +@Table(name = "schema_set") +public class SchemaSetEntity implements Serializable { + + private static final long serialVersionUID = 6665056955069047269L; + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Integer id; + + @NotNull + @Column + private String name; + + @NotNull + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "dataspace_id", referencedColumnName = "ID") + private DataspaceEntity dataspace; + + @NotNull + @ManyToMany(fetch = FetchType.LAZY) + @JoinTable(name = "schema_set_yang_resources", + joinColumns = @JoinColumn(name = "schema_set_id"), + inverseJoinColumns = @JoinColumn(name = "yang_resource_id")) + private Set yangResources; +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/models/YangResourceEntity.java b/cps-ri/src/main/java/org/onap/cps/ri/models/YangResourceEntity.java new file mode 100644 index 0000000000..2b2d7924db --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/models/YangResourceEntity.java @@ -0,0 +1,75 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2020 Pantheon.tech + * Modifications Copyright (C) 2021-2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.models; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.ManyToMany; +import jakarta.persistence.Table; +import jakarta.validation.constraints.NotNull; +import java.io.Serializable; +import java.util.Set; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; + +/** + * Entity to store a Yang files. + */ +@Getter +@Setter +@NoArgsConstructor +@Entity +@Table(name = "yang_resource") +public class YangResourceEntity implements Serializable { + + private static final long serialVersionUID = -4496883162142106774L; + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Integer id; + + @NotNull + @Column + private String checksum; + + @NotNull + @Column + private String fileName; + + @NotNull + @Column + private String content; + + @NotNull + @Column + private String moduleName; + + @Column + private String revision; + + @ManyToMany(mappedBy = "yangResources") + private Set schemaSets; + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/models/YangResourceModuleReference.java b/cps-ri/src/main/java/org/onap/cps/ri/models/YangResourceModuleReference.java new file mode 100644 index 0000000000..28ef56d38a --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/models/YangResourceModuleReference.java @@ -0,0 +1,31 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2021 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.models; + +import org.springframework.beans.factory.annotation.Value; + +public interface YangResourceModuleReference { + + @Value("#{target.module_name}") + String getModuleName(); + + String getRevision(); +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/AnchorRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/AnchorRepository.java new file mode 100755 index 0000000000..7fe14b3173 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/AnchorRepository.java @@ -0,0 +1,118 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2021 Pantheon.tech + * Modifications Copyright (C) 2021-2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.util.Collection; +import java.util.Optional; +import org.onap.cps.ri.models.AnchorEntity; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.models.SchemaSetEntity; +import org.onap.cps.spi.exceptions.AnchorNotFoundException; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Modifying; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; +import org.springframework.stereotype.Repository; + +@Repository +public interface AnchorRepository extends JpaRepository { + + Optional findByDataspaceAndName(DataspaceEntity dataspaceEntity, String name); + + default AnchorEntity getByDataspaceAndName(DataspaceEntity dataspace, String anchorName) { + return findByDataspaceAndName(dataspace, anchorName) + .orElseThrow(() -> new AnchorNotFoundException(anchorName, dataspace.getName())); + } + + Collection findAllByDataspace(DataspaceEntity dataspaceEntity); + + Collection findAllBySchemaSet(SchemaSetEntity schemaSetEntity); + + @Query(value = "SELECT * FROM anchor WHERE dataspace_id = :dataspaceId AND name = ANY (:anchorNames)", + nativeQuery = true) + Collection findAllByDataspaceIdAndNameIn(@Param("dataspaceId") int dataspaceId, + @Param("anchorNames") String[] anchorNames); + + default Collection findAllByDataspaceAndNameIn(final DataspaceEntity dataspaceEntity, + final Collection anchorNames) { + return findAllByDataspaceIdAndNameIn(dataspaceEntity.getId(), anchorNames.toArray(new String[0])); + } + + @Query(value = "SELECT a.* FROM anchor a" + + " LEFT OUTER JOIN schema_set s ON a.schema_set_id = s.id" + + " WHERE a.dataspace_id = :dataspaceId AND s.name = ANY (:schemaSetNames)", + nativeQuery = true) + Collection findAllByDataspaceIdAndSchemaSetNameIn(@Param("dataspaceId") int dataspaceId, + @Param("schemaSetNames") String[] schemaSetNames); + + default Collection findAllByDataspaceAndSchemaSetNameIn(final DataspaceEntity dataspaceEntity, + final Collection schemaSetNames) { + return findAllByDataspaceIdAndSchemaSetNameIn(dataspaceEntity.getId(), schemaSetNames.toArray(new String[0])); + } + + Integer countByDataspace(DataspaceEntity dataspaceEntity); + + @Query(value = """ + SELECT + anchor.name + FROM + yang_resource + JOIN schema_set_yang_resources ON schema_set_yang_resources.yang_resource_id = yang_resource.id + JOIN schema_set ON schema_set.id = schema_set_yang_resources.schema_set_id + JOIN anchor ON anchor.schema_set_id = schema_set.id + WHERE + schema_set.dataspace_id = :dataspaceId + AND module_name = ANY ( :moduleNames ) + GROUP BY + anchor.id, + anchor.name, + anchor.dataspace_id, + anchor.schema_set_id + HAVING + COUNT(DISTINCT module_name) = :sizeOfModuleNames + """, nativeQuery = true) + Collection getAnchorNamesByDataspaceIdAndModuleNames(@Param("dataspaceId") int dataspaceId, + @Param("moduleNames") String[] moduleNames, + @Param("sizeOfModuleNames") int sizeOfModuleNames); + + default Collection getAnchorNamesByDataspaceIdAndModuleNames(final int dataspaceId, + final Collection moduleNames, + final int sizeOfModuleNames) { + final String[] moduleNamesArray = moduleNames.toArray(new String[0]); + return getAnchorNamesByDataspaceIdAndModuleNames(dataspaceId, moduleNamesArray, sizeOfModuleNames); + } + + @Modifying + @Query(value = "DELETE FROM anchor WHERE dataspace_id = :dataspaceId AND name = ANY (:anchorNames)", + nativeQuery = true) + void deleteAllByDataspaceIdAndNameIn(@Param("dataspaceId") int dataspaceId, + @Param("anchorNames") String[] anchorNames); + + default void deleteAllByDataspaceAndNameIn(final DataspaceEntity dataspaceEntity, + final Collection anchorNames) { + deleteAllByDataspaceIdAndNameIn(dataspaceEntity.getId(), anchorNames.toArray(new String[0])); + } + + @Modifying + @Query(value = "UPDATE anchor SET schema_set_id =:schemaSetId WHERE id = :anchorId ", nativeQuery = true) + void updateAnchorSchemaSetId(@Param("schemaSetId") int schemaSetId, @Param("anchorId") long anchorId); + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/DataspaceRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/DataspaceRepository.java new file mode 100755 index 0000000000..b79d802d95 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/DataspaceRepository.java @@ -0,0 +1,44 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2020 Bell Canada. All rights reserved. + * Modifications Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.util.Optional; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.spi.exceptions.DataspaceNotFoundException; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.stereotype.Repository; + +@Repository +public interface DataspaceRepository extends JpaRepository { + + Optional findByName(String name); + + /** + * Get a dataspace by name. + * throws a DataspaceNotFoundException if it does not exist + * + * @param name the name of the dataspace + * @return the Dataspace found + */ + default DataspaceEntity getByName(final String name) { + return findByName(name).orElseThrow(() -> new DataspaceNotFoundException(name)); + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepository.java new file mode 100644 index 0000000000..6813995d99 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepository.java @@ -0,0 +1,31 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.util.Collection; +import org.onap.cps.ri.models.FragmentEntity; +import org.onap.cps.spi.FetchDescendantsOption; + +public interface FragmentPrefetchRepository { + Collection prefetchDescendantsOfFragmentEntities( + final FetchDescendantsOption fetchDescendantsOption, + final Collection proxiedFragmentEntities); +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepositoryImpl.java new file mode 100644 index 0000000000..bcf01b3d76 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentPrefetchRepositoryImpl.java @@ -0,0 +1,127 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.sql.Connection; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import lombok.RequiredArgsConstructor; +import org.onap.cps.ri.models.AnchorEntity; +import org.onap.cps.ri.models.FragmentEntity; +import org.onap.cps.spi.FetchDescendantsOption; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.PreparedStatementSetter; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Repository; + +@Repository +@RequiredArgsConstructor +public class FragmentPrefetchRepositoryImpl implements FragmentPrefetchRepository { + + private final JdbcTemplate jdbcTemplate; + + @Override + public Collection prefetchDescendantsOfFragmentEntities( + final FetchDescendantsOption fetchDescendantsOption, + final Collection proxiedFragmentEntities) { + + if (FetchDescendantsOption.OMIT_DESCENDANTS.equals(fetchDescendantsOption)) { + return proxiedFragmentEntities; + } + + final List fragmentEntityIds = proxiedFragmentEntities.stream() + .map(FragmentEntity::getId).collect(Collectors.toList()); + + final Map anchorEntityPerId = proxiedFragmentEntities.stream() + .map(FragmentEntity::getAnchor) + .collect(Collectors.toMap(AnchorEntity::getId, anchor -> anchor, (anchor1, anchor2) -> anchor1)); + + final int maxDepth = fetchDescendantsOption.equals(FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + ? Integer.MAX_VALUE + : fetchDescendantsOption.getDepth(); + return findFragmentEntitiesWithDescendantsByIds(fragmentEntityIds, anchorEntityPerId, maxDepth); + } + + private Collection findFragmentEntitiesWithDescendantsByIds( + final Collection fragmentEntityIds, + final Map anchorEntityPerId, + final int maxDepth) { + final String sql + = "WITH RECURSIVE parent_search AS (" + + " SELECT id, 0 AS depth " + + " FROM fragment " + + " WHERE id = ANY (?) " + + " UNION " + + " SELECT child.id, depth + 1 " + + " FROM fragment child INNER JOIN parent_search parent ON child.parent_id = parent.id" + + " WHERE depth < ?" + + ") " + + "SELECT fragment.id, anchor_id AS anchorId, xpath, parent_id AS parentId, " + + " CAST(attributes AS TEXT) AS attributes " + + "FROM fragment INNER JOIN parent_search ON fragment.id = parent_search.id"; + + final PreparedStatementSetter preparedStatementSetter = preparedStatement -> { + final Connection connection = preparedStatement.getConnection(); + final java.sql.Array idArray = connection.createArrayOf("bigint", fragmentEntityIds.toArray()); + preparedStatement.setArray(1, idArray); + preparedStatement.setInt(2, maxDepth); + }; + + final RowMapper fragmentEntityRowMapper = (resultSet, rowNum) -> { + final FragmentEntity fragmentEntity = new FragmentEntity(); + fragmentEntity.setId(resultSet.getLong("id")); + fragmentEntity.setXpath(resultSet.getString("xpath")); + fragmentEntity.setParentId(resultSet.getObject("parentId", Long.class)); + fragmentEntity.setAttributes(resultSet.getString("attributes")); + fragmentEntity.setAnchor(anchorEntityPerId.get(resultSet.getLong("anchorId"))); + fragmentEntity.setChildFragments(new HashSet<>()); + return fragmentEntity; + }; + + final Map fragmentEntityPerId; + try (final Stream fragmentEntityStream = jdbcTemplate.queryForStream(sql, + preparedStatementSetter, fragmentEntityRowMapper)) { + fragmentEntityPerId = fragmentEntityStream.collect( + Collectors.toMap(FragmentEntity::getId, Function.identity())); + } + return reuniteChildrenWithTheirParents(fragmentEntityPerId); + } + + private static Collection reuniteChildrenWithTheirParents( + final Map fragmentEntityPerId) { + final Collection fragmentEntitiesWithoutParent = new HashSet<>(); + for (final FragmentEntity fragmentEntity : fragmentEntityPerId.values()) { + final FragmentEntity parentFragmentEntity = fragmentEntityPerId.get(fragmentEntity.getParentId()); + if (parentFragmentEntity == null) { + fragmentEntitiesWithoutParent.add(fragmentEntity); + } else { + parentFragmentEntity.getChildFragments().add(fragmentEntity); + } + } + return fragmentEntitiesWithoutParent; + } + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentQueryBuilder.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentQueryBuilder.java new file mode 100644 index 0000000000..b8bbf59c23 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentQueryBuilder.java @@ -0,0 +1,270 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2024 Nordix Foundation + * Modifications Copyright (C) 2023 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import lombok.RequiredArgsConstructor; +import org.onap.cps.cpspath.parser.CpsPathPrefixType; +import org.onap.cps.cpspath.parser.CpsPathQuery; +import org.onap.cps.ri.models.AnchorEntity; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.models.FragmentEntity; +import org.onap.cps.ri.utils.EscapeUtils; +import org.onap.cps.spi.PaginationOption; +import org.onap.cps.spi.exceptions.CpsPathException; +import org.springframework.stereotype.Component; + +@RequiredArgsConstructor +@Component +public class FragmentQueryBuilder { + + @PersistenceContext + private EntityManager entityManager; + + /** + * Create a sql query to retrieve by anchor(id) and cps path. + * + * @param anchorEntity the anchor + * @param cpsPathQuery the cps path query to be transformed into a sql query + * @return a executable query object + */ + public Query getQueryForAnchorAndCpsPath(final AnchorEntity anchorEntity, final CpsPathQuery cpsPathQuery) { + final StringBuilder sqlStringBuilder = new StringBuilder(); + final Map queryParameters = new HashMap<>(); + + sqlStringBuilder.append("SELECT fragment.* FROM fragment"); + addWhereClauseForAnchor(anchorEntity, sqlStringBuilder, queryParameters); + addNodeSearchConditions(cpsPathQuery, sqlStringBuilder, queryParameters, false); + + return getQuery(sqlStringBuilder.toString(), queryParameters, FragmentEntity.class); + } + + /** + * Create a sql query to retrieve by cps path. + * + * @param dataspaceEntity the dataspace + * @param cpsPathQuery the cps path query to be transformed into a sql query + * @return a executable query object + */ + public Query getQueryForDataspaceAndCpsPath(final DataspaceEntity dataspaceEntity, + final CpsPathQuery cpsPathQuery, + final List anchorIdsForPagination) { + final StringBuilder sqlStringBuilder = new StringBuilder(); + final Map queryParameters = new HashMap<>(); + + sqlStringBuilder.append("SELECT fragment.* FROM fragment"); + if (anchorIdsForPagination.isEmpty()) { + addWhereClauseForDataspace(dataspaceEntity, sqlStringBuilder, queryParameters); + } else { + addWhereClauseForAnchorIds(anchorIdsForPagination, sqlStringBuilder, queryParameters); + } + addNodeSearchConditions(cpsPathQuery, sqlStringBuilder, queryParameters, true); + + return getQuery(sqlStringBuilder.toString(), queryParameters, FragmentEntity.class); + } + + /** + * Get query for dataspace, cps path, page index and page size. + * @param dataspaceEntity data space entity + * @param cpsPathQuery cps path query + * @param paginationOption pagination option + * @return query for given dataspace, cps path and pagination parameters + */ + public Query getQueryForAnchorIdsForPagination(final DataspaceEntity dataspaceEntity, + final CpsPathQuery cpsPathQuery, + final PaginationOption paginationOption) { + final StringBuilder sqlStringBuilder = new StringBuilder(); + final Map queryParameters = new HashMap<>(); + + sqlStringBuilder.append("SELECT distinct(fragment.anchor_id) FROM fragment"); + addWhereClauseForDataspace(dataspaceEntity, sqlStringBuilder, queryParameters); + addNodeSearchConditions(cpsPathQuery, sqlStringBuilder, queryParameters, true); + sqlStringBuilder.append(" ORDER BY fragment.anchor_id"); + addPaginationCondition(sqlStringBuilder, queryParameters, paginationOption); + + return getQuery(sqlStringBuilder.toString(), queryParameters, Long.class); + } + + private Query getQuery(final String sql, final Map queryParameters, final Class returnType) { + final Query query = entityManager.createNativeQuery(sql, returnType); + setQueryParameters(query, queryParameters); + return query; + } + + private static void addWhereClauseForAnchor(final AnchorEntity anchorEntity, + final StringBuilder sqlStringBuilder, + final Map queryParameters) { + sqlStringBuilder.append(" WHERE anchor_id = :anchorId"); + queryParameters.put("anchorId", anchorEntity.getId()); + } + + private static void addWhereClauseForAnchorIds(final List anchorIdsForPagination, + final StringBuilder sqlStringBuilder, + final Map queryParameters) { + sqlStringBuilder.append(" WHERE anchor_id IN (:anchorIdsForPagination)"); + queryParameters.put("anchorIdsForPagination", anchorIdsForPagination); + } + + private static void addWhereClauseForDataspace(final DataspaceEntity dataspaceEntity, + final StringBuilder sqlStringBuilder, + final Map queryParameters) { + sqlStringBuilder.append(" JOIN anchor ON anchor.id = fragment.anchor_id WHERE dataspace_id = :dataspaceId"); + queryParameters.put("dataspaceId", dataspaceEntity.getId()); + } + + private static void addNodeSearchConditions(final CpsPathQuery cpsPathQuery, + final StringBuilder sqlStringBuilder, + final Map queryParameters, + final boolean acrossAnchors) { + addAbsoluteParentXpathSearchCondition(cpsPathQuery, sqlStringBuilder, queryParameters, acrossAnchors); + addXpathSearchCondition(cpsPathQuery, sqlStringBuilder, queryParameters); + addLeafConditions(cpsPathQuery, sqlStringBuilder); + addTextFunctionCondition(cpsPathQuery, sqlStringBuilder, queryParameters); + addContainsFunctionCondition(cpsPathQuery, sqlStringBuilder, queryParameters); + } + + private static void addXpathSearchCondition(final CpsPathQuery cpsPathQuery, + final StringBuilder sqlStringBuilder, + final Map queryParameters) { + sqlStringBuilder.append(" AND (xpath LIKE :escapedXpath OR " + + "(xpath LIKE :escapedXpath||'[@%]' AND xpath NOT LIKE :escapedXpath||'[@%]/%[@%]'))"); + if (CpsPathPrefixType.ABSOLUTE.equals(cpsPathQuery.getCpsPathPrefixType())) { + queryParameters.put("escapedXpath", EscapeUtils.escapeForSqlLike(cpsPathQuery.getXpathPrefix())); + } else { + queryParameters.put("escapedXpath", "%/" + EscapeUtils.escapeForSqlLike(cpsPathQuery.getDescendantName())); + } + } + + private static void addAbsoluteParentXpathSearchCondition(final CpsPathQuery cpsPathQuery, + final StringBuilder sqlStringBuilder, + final Map queryParameters, + final boolean acrossAnchors) { + if (CpsPathPrefixType.ABSOLUTE.equals(cpsPathQuery.getCpsPathPrefixType())) { + if (cpsPathQuery.getNormalizedParentPath().isEmpty()) { + sqlStringBuilder.append(" AND parent_id IS NULL"); + } else { + if (acrossAnchors) { + sqlStringBuilder.append(" AND parent_id IN (SELECT id FROM fragment WHERE xpath = :parentXpath)"); + } else { + sqlStringBuilder.append(" AND parent_id = (SELECT id FROM fragment WHERE xpath = :parentXpath" + + " AND anchor_id = :anchorId)"); + } + queryParameters.put("parentXpath", cpsPathQuery.getNormalizedParentPath()); + } + } + } + + private static void addPaginationCondition(final StringBuilder sqlStringBuilder, + final Map queryParameters, + final PaginationOption paginationOption) { + if (PaginationOption.NO_PAGINATION != paginationOption) { + final Integer offset = (paginationOption.getPageIndex() - 1) * paginationOption.getPageSize(); + sqlStringBuilder.append(" LIMIT :pageSize OFFSET :offset"); + queryParameters.put("pageSize", paginationOption.getPageSize()); + queryParameters.put("offset", offset); + } + } + + private static Integer getTextValueAsInt(final CpsPathQuery cpsPathQuery) { + try { + return Integer.parseInt(cpsPathQuery.getTextFunctionConditionValue()); + } catch (final NumberFormatException e) { + return null; + } + } + + private static void addLeafConditions(final CpsPathQuery cpsPathQuery, final StringBuilder sqlStringBuilder) { + if (cpsPathQuery.hasLeafConditions()) { + sqlStringBuilder.append(" AND ("); + final Queue booleanOperatorsQueue = new LinkedList<>(cpsPathQuery.getBooleanOperators()); + cpsPathQuery.getLeafConditions().forEach(leafCondition -> { + if (leafCondition.value() instanceof Integer) { + sqlStringBuilder.append("(attributes ->> '").append(leafCondition.name()).append("')\\:\\:int"); + sqlStringBuilder.append(leafCondition.operator()); + sqlStringBuilder.append(leafCondition.value()); + } else { + if ("=".equals(leafCondition.operator())) { + final String leafValueAsText = leafCondition.value().toString(); + sqlStringBuilder.append("attributes ->> '").append(leafCondition.name()).append("'"); + sqlStringBuilder.append(" = '"); + sqlStringBuilder.append(EscapeUtils.escapeForSqlStringLiteral(leafValueAsText)); + sqlStringBuilder.append("'"); + } else { + throw new CpsPathException(" can use only " + leafCondition.operator() + " with integer "); + } + } + if (!booleanOperatorsQueue.isEmpty()) { + sqlStringBuilder.append(" "); + sqlStringBuilder.append(booleanOperatorsQueue.poll()); + sqlStringBuilder.append(" "); + } + }); + sqlStringBuilder.append(")"); + } + } + + private static void addTextFunctionCondition(final CpsPathQuery cpsPathQuery, + final StringBuilder sqlStringBuilder, + final Map queryParameters) { + if (cpsPathQuery.hasTextFunctionCondition()) { + sqlStringBuilder.append(" AND ("); + sqlStringBuilder.append("attributes @> jsonb_build_object(:textLeafName, :textValue)"); + sqlStringBuilder + .append(" OR attributes @> jsonb_build_object(:textLeafName, json_build_array(:textValue))"); + queryParameters.put("textLeafName", cpsPathQuery.getTextFunctionConditionLeafName()); + queryParameters.put("textValue", cpsPathQuery.getTextFunctionConditionValue()); + final Integer textValueAsInt = getTextValueAsInt(cpsPathQuery); + if (textValueAsInt != null) { + sqlStringBuilder.append(" OR attributes @> jsonb_build_object(:textLeafName, :textValueAsInt)"); + sqlStringBuilder + .append(" OR attributes @> jsonb_build_object(:textLeafName, json_build_array(:textValueAsInt))"); + queryParameters.put("textValueAsInt", textValueAsInt); + } + sqlStringBuilder.append(")"); + } + } + + private static void addContainsFunctionCondition(final CpsPathQuery cpsPathQuery, + final StringBuilder sqlStringBuilder, + final Map queryParameters) { + if (cpsPathQuery.hasContainsFunctionCondition()) { + sqlStringBuilder.append(" AND attributes ->> :containsLeafName LIKE CONCAT('%',:containsValue,'%') "); + queryParameters.put("containsLeafName", cpsPathQuery.getContainsFunctionConditionLeafName()); + queryParameters.put("containsValue", + EscapeUtils.escapeForSqlLike(cpsPathQuery.getContainsFunctionConditionValue())); + } + } + + private static void setQueryParameters(final Query query, final Map queryParameters) { + for (final Map.Entry queryParameter : queryParameters.entrySet()) { + query.setParameter(queryParameter.getKey(), queryParameter.getValue()); + } + } + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepository.java new file mode 100755 index 0000000000..8edc3f2311 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepository.java @@ -0,0 +1,140 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2021-2023 Nordix Foundation. + * Modifications Copyright (C) 2020-2021 Bell Canada. + * Modifications Copyright (C) 2020-2021 Pantheon.tech. + * Modifications Copyright (C) 2023 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import org.onap.cps.ri.models.AnchorEntity; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.models.FragmentEntity; +import org.onap.cps.ri.utils.EscapeUtils; +import org.onap.cps.spi.exceptions.DataNodeNotFoundException; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Modifying; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; +import org.springframework.stereotype.Repository; + +@Repository +public interface FragmentRepository extends JpaRepository, FragmentRepositoryCpsPathQuery, + FragmentPrefetchRepository { + + Optional findByAnchorAndXpath(AnchorEntity anchorEntity, String xpath); + + default FragmentEntity getByAnchorAndXpath(final AnchorEntity anchorEntity, final String xpath) { + return findByAnchorAndXpath(anchorEntity, xpath).orElseThrow(() -> + new DataNodeNotFoundException(anchorEntity.getDataspace().getName(), anchorEntity.getName(), xpath)); + } + + @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths)", + nativeQuery = true) + List findByAnchorIdAndXpathIn(@Param("anchorId") long anchorId, + @Param("xpaths") String[] xpaths); + + default List findByAnchorAndXpathIn(final AnchorEntity anchorEntity, + final Collection xpaths) { + return findByAnchorIdAndXpathIn(anchorEntity.getId(), xpaths.toArray(new String[0])); + } + + @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId \n" + + "AND xpath LIKE :escapedXpath||'[@%]' AND xpath NOT LIKE :escapedXpath||'[@%]/%[@%]'", + nativeQuery = true) + List findListByAnchorIdAndEscapedXpath(@Param("anchorId") long anchorId, + @Param("escapedXpath") String escapedXpath); + + default List findListByAnchorAndXpath(final AnchorEntity anchorEntity, final String xpath) { + final String escapedXpath = EscapeUtils.escapeForSqlLike(xpath); + return findListByAnchorIdAndEscapedXpath(anchorEntity.getId(), escapedXpath); + } + + @Query(value = "SELECT fragment.* FROM fragment JOIN anchor ON anchor.id = fragment.anchor_id " + + "WHERE dataspace_id = :dataspaceId AND xpath = ANY (:xpaths)", nativeQuery = true) + List findByDataspaceIdAndXpathIn(@Param("dataspaceId") int dataspaceId, + @Param("xpaths") String[] xpaths); + + default List findByDataspaceAndXpathIn(final DataspaceEntity dataspaceEntity, + final Collection xpaths) { + return findByDataspaceIdAndXpathIn(dataspaceEntity.getId(), xpaths.toArray(new String[0])); + } + + @Query(value = "SELECT * FROM fragment WHERE anchor_id IN (:anchorIds)" + + " AND xpath = ANY (:xpaths)", nativeQuery = true) + List findByAnchorIdsAndXpathIn(@Param("anchorIds") Long[] anchorIds, + @Param("xpaths") String[] xpaths); + + @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId LIMIT 1", nativeQuery = true) + Optional findOneByAnchorId(@Param("anchorId") long anchorId); + + @Modifying + @Query(value = "DELETE FROM fragment WHERE anchor_id = ANY (:anchorIds)", nativeQuery = true) + void deleteByAnchorIdIn(@Param("anchorIds") long[] anchorIds); + + default void deleteByAnchorIn(final Collection anchorEntities) { + deleteByAnchorIdIn(anchorEntities.stream().map(AnchorEntity::getId).mapToLong(id -> id).toArray()); + } + + @Modifying + @Query(value = "DELETE FROM fragment WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths)", nativeQuery = true) + void deleteByAnchorIdAndXpaths(@Param("anchorId") long anchorId, @Param("xpaths") String[] xpaths); + + default void deleteByAnchorIdAndXpaths(final long anchorId, final Collection xpaths) { + deleteByAnchorIdAndXpaths(anchorId, xpaths.toArray(new String[0])); + } + + @Modifying + @Query(value = "DELETE FROM fragment f WHERE anchor_id = :anchorId AND xpath LIKE ANY (:xpathPatterns)", + nativeQuery = true) + void deleteByAnchorIdAndXpathLikeAny(@Param("anchorId") long anchorId, + @Param("xpathPatterns") String[] xpathPatterns); + + default void deleteListsByAnchorIdAndXpaths(long anchorId, Collection xpaths) { + deleteByAnchorIdAndXpathLikeAny(anchorId, + xpaths.stream().map(xpath -> EscapeUtils.escapeForSqlLike(xpath) + "[@%").toArray(String[]::new)); + } + + @Query(value = "SELECT xpath FROM fragment WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths)", + nativeQuery = true) + List findAllXpathByAnchorIdAndXpathIn(@Param("anchorId") long anchorId, + @Param("xpaths") String[] xpaths); + + default List findAllXpathByAnchorAndXpathIn(final AnchorEntity anchorEntity, + final Collection xpaths) { + return findAllXpathByAnchorIdAndXpathIn(anchorEntity.getId(), xpaths.toArray(new String[0])); + } + + @Query(value = "SELECT EXISTS(SELECT 1 FROM fragment WHERE anchor_id = :anchorId" + + " AND xpath LIKE :xpathPattern LIMIT 1)", nativeQuery = true) + boolean existsByAnchorIdAndParentXpathAndXpathLike(@Param("anchorId") long anchorId, + @Param("xpathPattern") String xpathPattern); + + default boolean existsByAnchorAndXpathStartsWith(final AnchorEntity anchorEntity, final String xpath) { + return existsByAnchorIdAndParentXpathAndXpathLike(anchorEntity.getId(), + EscapeUtils.escapeForSqlLike(xpath) + "%"); + } + + @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId AND parent_id IS NULL", nativeQuery = true) + List findRootsByAnchorId(@Param("anchorId") long anchorId); + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQuery.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQuery.java new file mode 100644 index 0000000000..49c8e76ab5 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQuery.java @@ -0,0 +1,40 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2021-2023 Nordix Foundation. + * Modifications Copyright (C) 2023 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.util.List; +import org.onap.cps.cpspath.parser.CpsPathQuery; +import org.onap.cps.ri.models.AnchorEntity; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.models.FragmentEntity; +import org.onap.cps.spi.PaginationOption; + +public interface FragmentRepositoryCpsPathQuery { + List findByAnchorAndCpsPath(AnchorEntity anchorEntity, CpsPathQuery cpsPathQuery); + + List findByDataspaceAndCpsPath(DataspaceEntity dataspaceEntity, + CpsPathQuery cpsPathQuery, List anchorIds); + + List findAnchorIdsForPagination(DataspaceEntity dataspaceEntity, CpsPathQuery cpsPathQuery, + PaginationOption paginationOption); + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQueryImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQueryImpl.java new file mode 100644 index 0000000000..01b2813cd3 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/FragmentRepositoryCpsPathQueryImpl.java @@ -0,0 +1,71 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2021-2024 Nordix Foundation. + * Modifications Copyright (C) 2023 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import jakarta.persistence.Query; +import jakarta.transaction.Transactional; +import java.util.List; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.cpspath.parser.CpsPathQuery; +import org.onap.cps.ri.models.AnchorEntity; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.models.FragmentEntity; +import org.onap.cps.spi.PaginationOption; + +@RequiredArgsConstructor +@Slf4j +public class FragmentRepositoryCpsPathQueryImpl implements FragmentRepositoryCpsPathQuery { + + private final FragmentQueryBuilder fragmentQueryBuilder; + + @Override + @Transactional + public List findByAnchorAndCpsPath(final AnchorEntity anchorEntity, + final CpsPathQuery cpsPathQuery) { + final Query query = fragmentQueryBuilder.getQueryForAnchorAndCpsPath(anchorEntity, cpsPathQuery); + final List fragmentEntities = query.getResultList(); + log.debug("Fetched {} fragment entities by anchor and cps path.", fragmentEntities.size()); + return fragmentEntities; + } + + @Override + @Transactional + public List findByDataspaceAndCpsPath(final DataspaceEntity dataspaceEntity, + final CpsPathQuery cpsPathQuery, final List anchorIds) { + final Query query = fragmentQueryBuilder.getQueryForDataspaceAndCpsPath( + dataspaceEntity, cpsPathQuery, anchorIds); + final List fragmentEntities = query.getResultList(); + log.debug("Fetched {} fragment entities by cps path across all anchors.", fragmentEntities.size()); + return fragmentEntities; + } + + @Override + @Transactional + public List findAnchorIdsForPagination(final DataspaceEntity dataspaceEntity, final CpsPathQuery cpsPathQuery, + final PaginationOption paginationOption) { + final Query query = fragmentQueryBuilder.getQueryForAnchorIdsForPagination( + dataspaceEntity, cpsPathQuery, paginationOption); + return query.getResultList(); + } + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceQuery.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceQuery.java new file mode 100644 index 0000000000..ad0f9c5c61 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceQuery.java @@ -0,0 +1,37 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2024 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.util.Collection; +import java.util.Map; +import org.onap.cps.spi.model.ModuleReference; + +/** + * This interface is used in conjunction with {@link ModuleReferenceRepository} to create native sql queries. + */ +public interface ModuleReferenceQuery { + + Collection identifyNewModuleReferences(final Collection moduleReferencesToCheck); + + Collection findModuleReferences(final String dataspaceName, final String anchorName, + final Map parentAttributes, + final Map childAttributes); +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceRepository.java new file mode 100644 index 0000000000..e9b866cc19 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceRepository.java @@ -0,0 +1,29 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import org.onap.cps.ri.models.YangResourceEntity; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.stereotype.Repository; + +@Repository +public interface ModuleReferenceRepository extends JpaRepository, ModuleReferenceQuery {} + diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceRepositoryImpl.java new file mode 100644 index 0000000000..c160fb1e38 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/ModuleReferenceRepositoryImpl.java @@ -0,0 +1,179 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.spi.model.ModuleReference; +import org.springframework.transaction.annotation.Transactional; + +@Slf4j +@Transactional +@RequiredArgsConstructor +public class ModuleReferenceRepositoryImpl implements ModuleReferenceQuery { + + @PersistenceContext + private EntityManager entityManager; + + private final TempTableCreator tempTableCreator; + + @Override + @SneakyThrows + public Collection identifyNewModuleReferences( + final Collection moduleReferencesToCheck) { + + if (moduleReferencesToCheck == null || moduleReferencesToCheck.isEmpty()) { + return Collections.emptyList(); + } + + final Collection> sqlData = new HashSet<>(moduleReferencesToCheck.size()); + for (final ModuleReference moduleReference : moduleReferencesToCheck) { + final List row = new ArrayList<>(2); + row.add(moduleReference.getModuleName()); + row.add(moduleReference.getRevision()); + sqlData.add(row); + } + + final String tempTableName = tempTableCreator.createTemporaryTable( + "moduleReferencesToCheckTemp", sqlData, "module_name", "revision"); + + return identifyNewModuleReferencesForCmHandle(tempTableName); + } + + /** + * Finds module references based on specified dataspace, anchor, and attribute filters. + * This method constructs and executes a SQL query to retrieve module references. The query applies filters to + * parent and child fragments using the provided attribute maps. The `parentAttributes` are used to filter + * parent fragments, while `childAttributes` filter child fragments. + * + * @param dataspaceName the name of the dataspace to filter on. + * @param anchorName the name of the anchor to filter on. + * @param parentAttributes a map of attributes for filtering parent fragments. + * @param childAttributes a map of attributes for filtering child fragments. + * @return a collection of {@link ModuleReference} objects that match the specified filters. + */ + @Transactional + @SuppressWarnings("unchecked") + @Override + public Collection findModuleReferences(final String dataspaceName, final String anchorName, + final Map parentAttributes, + final Map childAttributes) { + + final String parentFragmentWhereClause = buildWhereClause(childAttributes, "parentFragment"); + final String childFragmentWhereClause = buildWhereClause(parentAttributes, "childFragment"); + + final String moduleReferencesSqlQuery = buildModuleReferencesSqlQuery(parentFragmentWhereClause, + childFragmentWhereClause); + + final Query query = entityManager.createNativeQuery(moduleReferencesSqlQuery); + setQueryParameters(query, parentAttributes, childAttributes, anchorName, dataspaceName); + return processQueryResults(query.getResultList()); + } + + private String buildWhereClause(final Map attributes, final String alias) { + return attributes.keySet().stream() + .map(attributeName -> String.format("%s.attributes->>'%s' = ?", alias, attributeName)) + .collect(Collectors.joining(" AND ")); + } + + private void setQueryParameters(final Query query, final Map parentAttributes, + final Map childAttributes, final String anchorName, + final String dataspaceName) { + final String childAttributeValue = childAttributes.entrySet().iterator().next().getValue(); + query.setParameter(1, childAttributeValue); + + final String parentAttributeValue = parentAttributes.entrySet().iterator().next().getValue(); + query.setParameter(2, parentAttributeValue); + + query.setParameter(3, anchorName); + query.setParameter(4, dataspaceName); + } + + private String buildModuleReferencesSqlQuery(final String parentFragmentClause, final String childFragmentClause) { + return """ + WITH Fragment AS ( + SELECT childFragment.attributes->>'id' AS schema_set_name + FROM fragment parentFragment + JOIN fragment childFragment ON parentFragment.parent_id = childFragment.id + JOIN anchor anchorInfo ON parentFragment.anchor_id = anchorInfo.id + JOIN dataspace dataspaceInfo ON anchorInfo.dataspace_id = dataspaceInfo.id + WHERE %s + AND %s + AND anchorInfo.name = ? + AND dataspaceInfo.name = ? + LIMIT 1 + ), + SchemaSet AS ( + SELECT id + FROM schema_set + WHERE name = (SELECT schema_set_name FROM Fragment) + ) + SELECT yangResource.module_name, yangResource.revision + FROM yang_resource yangResource + JOIN schema_set_yang_resources schemaSetYangResources + ON yangResource.id = schemaSetYangResources.yang_resource_id + WHERE schemaSetYangResources.schema_set_id = (SELECT id FROM SchemaSet); + """.formatted(parentFragmentClause, childFragmentClause); + } + + private Collection processQueryResults(final List queryResults) { + if (queryResults.isEmpty()) { + log.info("No module references found for the provided attributes."); + return Collections.emptyList(); + } + return queryResults.stream() + .map(queryResult -> { + final String name = (String) queryResult[0]; + final String revision = (String) queryResult[1]; + return new ModuleReference(name, revision); + }) + .collect(Collectors.toList()); + } + + private Collection identifyNewModuleReferencesForCmHandle(final String tempTableName) { + final String sql = String.format( + "SELECT %1$s.module_name, %1$s.revision" + + " FROM %1$s LEFT JOIN yang_resource" + + " ON yang_resource.module_name=%1$s.module_name" + + " AND yang_resource.revision=%1$s.revision" + + " WHERE yang_resource.module_name IS NULL;", tempTableName); + + @SuppressWarnings("unchecked") + final List resultsAsObjects = entityManager.createNativeQuery(sql).getResultList(); + + final List resultsAsModuleReferences = new ArrayList<>(resultsAsObjects.size()); + for (final Object[] row : resultsAsObjects) { + resultsAsModuleReferences.add(new ModuleReference((String) row[0], (String) row[1])); + } + return resultsAsModuleReferences; + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetRepository.java new file mode 100644 index 0000000000..9357a5c6a7 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetRepository.java @@ -0,0 +1,79 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2020 Pantheon.tech + * Modifications Copyright (C) 2022 TechMahindra Ltd. + * Modifications Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.models.SchemaSetEntity; +import org.onap.cps.spi.exceptions.SchemaSetNotFoundException; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Modifying; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; +import org.springframework.stereotype.Repository; + +@Repository +public interface SchemaSetRepository extends JpaRepository { + + Optional findByDataspaceAndName(DataspaceEntity dataspaceEntity, String schemaSetName); + + /** + * Gets schema sets by dataspace. + * @param dataspaceEntity dataspace entity + * @return list of schema set entity + */ + List findByDataspace(DataspaceEntity dataspaceEntity); + + Integer countByDataspace(DataspaceEntity dataspaceEntity); + + /** + * Gets a schema set by dataspace and schema set name. + * + * @param dataspaceEntity dataspace entity + * @param schemaSetName schema set name + * @return schema set entity + * @throws SchemaSetNotFoundException if SchemaSet not found + */ + default SchemaSetEntity getByDataspaceAndName(final DataspaceEntity dataspaceEntity, final String schemaSetName) { + return findByDataspaceAndName(dataspaceEntity, schemaSetName) + .orElseThrow(() -> new SchemaSetNotFoundException(dataspaceEntity.getName(), schemaSetName)); + } + + @Modifying + @Query(value = "DELETE FROM schema_set WHERE dataspace_id = :dataspaceId AND name = ANY (:schemaSetNames)", + nativeQuery = true) + void deleteByDataspaceIdAndNameIn(@Param("dataspaceId") final int dataspaceId, + @Param("schemaSetNames") final String[] schemaSetNames); + + /** + * Delete multiple schema sets in a given dataspace. + * @param dataspaceEntity dataspace entity + * @param schemaSetNames schema set names + */ + default void deleteByDataspaceAndNameIn(final DataspaceEntity dataspaceEntity, + final Collection schemaSetNames) { + deleteByDataspaceIdAndNameIn(dataspaceEntity.getId(), schemaSetNames.toArray(new String[0])); + } + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepository.java new file mode 100644 index 0000000000..8350d5728c --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepository.java @@ -0,0 +1,29 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2021-2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.util.List; + +public interface SchemaSetYangResourceRepository { + + void insertSchemaSetIdYangResourceId(final Integer schemaSetId, final List yangResourceIds); + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepositoryImpl.java new file mode 100644 index 0000000000..287bcda01b --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/SchemaSetYangResourceRepositoryImpl.java @@ -0,0 +1,59 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2021-2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import java.sql.PreparedStatement; +import java.util.List; +import org.hibernate.Session; +import org.springframework.transaction.annotation.Transactional; + + +@Transactional +public class SchemaSetYangResourceRepositoryImpl implements SchemaSetYangResourceRepository { + + private static final int MAX_INSERT_BATCH_SIZE = 100; + + @PersistenceContext + private EntityManager entityManager; + + @Override + public void insertSchemaSetIdYangResourceId(final Integer schemaSetId, final List yangResourceIds) { + final Session session = entityManager.unwrap(Session.class); + session.doWork(connection -> { + try (PreparedStatement preparedStatement = connection.prepareStatement( + "INSERT INTO SCHEMA_SET_YANG_RESOURCES (SCHEMA_SET_ID, YANG_RESOURCE_ID) VALUES ( ?, ?)")) { + int sqlQueryCount = 1; + for (final int yangResourceId : yangResourceIds) { + preparedStatement.setInt(1, schemaSetId); + preparedStatement.setInt(2, yangResourceId); + preparedStatement.addBatch(); + if (sqlQueryCount % MAX_INSERT_BATCH_SIZE == 0 || sqlQueryCount == yangResourceIds.size()) { + preparedStatement.executeBatch(); + } + sqlQueryCount++; + } + } + }); + } +} + diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/TempTableCreator.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/TempTableCreator.java new file mode 100644 index 0000000000..cc83ab7d94 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/TempTableCreator.java @@ -0,0 +1,102 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.ri.utils.EscapeUtils; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +@Slf4j +@Transactional +@AllArgsConstructor +@Component +public class TempTableCreator { + + @PersistenceContext + private EntityManager entityManager; + + /** + * Create a uniquely named temporary table. + * + * @param prefix prefix for the table name (so you can recognize it) + * @param sqlData data to insert (strings only) the inner List present a row of data + * @param columnNames column names (in same order as data in rows in sqlData) + * @return a unique temporary table name with given prefix + */ + public String createTemporaryTable(final String prefix, + final Collection> sqlData, + final String... columnNames) { + final String tempTableName = prefix + UUID.randomUUID().toString().replace("-", ""); + final StringBuilder sqlStringBuilder = new StringBuilder("CREATE TEMPORARY TABLE "); + sqlStringBuilder.append(tempTableName); + defineColumns(sqlStringBuilder, columnNames); + sqlStringBuilder.append(" ON COMMIT DROP;"); + insertData(sqlStringBuilder, tempTableName, columnNames, sqlData); + entityManager.createNativeQuery(sqlStringBuilder.toString()).executeUpdate(); + return tempTableName; + } + + private static void defineColumns(final StringBuilder sqlStringBuilder, final String[] columnNames) { + sqlStringBuilder.append('('); + final Iterator it = Arrays.stream(columnNames).iterator(); + while (it.hasNext()) { + final String columnName = it.next(); + sqlStringBuilder.append(" "); + sqlStringBuilder.append(columnName); + sqlStringBuilder.append(" varchar NOT NULL"); + if (it.hasNext()) { + sqlStringBuilder.append(","); + } + } + sqlStringBuilder.append(")"); + } + + private static void insertData(final StringBuilder sqlStringBuilder, + final String tempTableName, + final String[] columnNames, + final Collection> sqlData) { + final Collection sqlInserts = new HashSet<>(sqlData.size()); + for (final Collection rowValues : sqlData) { + final Collection escapedValues = + rowValues.stream().map(EscapeUtils::escapeForSqlStringLiteral).collect(Collectors.toList()); + sqlInserts.add("('" + String.join("','", escapedValues) + "')"); + } + sqlStringBuilder.append("INSERT INTO "); + sqlStringBuilder.append(tempTableName); + sqlStringBuilder.append(" ("); + sqlStringBuilder.append(String.join(",", columnNames)); + sqlStringBuilder.append(") VALUES "); + sqlStringBuilder.append(String.join(",", sqlInserts)); + sqlStringBuilder.append(";"); + } + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepository.java new file mode 100644 index 0000000000..ef7b12dc9c --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepository.java @@ -0,0 +1,31 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.util.Collection; +import java.util.List; +import org.onap.cps.spi.model.ModuleReference; + +public interface YangResourceNativeRepository { + + List getResourceIdsByModuleReferences(Collection moduleReferences); + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepositoryImpl.java new file mode 100644 index 0000000000..c65ab7d6fa --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceNativeRepositoryImpl.java @@ -0,0 +1,69 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.StringJoiner; +import lombok.extern.slf4j.Slf4j; +import org.hibernate.type.StandardBasicTypes; +import org.onap.cps.spi.model.ModuleReference; +import org.springframework.stereotype.Repository; +import org.springframework.transaction.annotation.Transactional; + +@Slf4j +@Repository +public class YangResourceNativeRepositoryImpl implements YangResourceNativeRepository { + + @PersistenceContext + private EntityManager entityManager; + + @Override + @Transactional + public List getResourceIdsByModuleReferences(final Collection moduleReferences) { + if (moduleReferences.isEmpty()) { + return Collections.emptyList(); + } + final Query query = entityManager.createNativeQuery(getCombinedSelectSqlQuery(moduleReferences)) + .unwrap(org.hibernate.query.NativeQuery.class) + .addScalar("id", StandardBasicTypes.INTEGER); + final List yangResourceIds = query.getResultList(); + if (yangResourceIds.size() != moduleReferences.size()) { + log.warn("ModuleReferences size : {} and QueryResult size : {}", moduleReferences.size(), + yangResourceIds.size()); + } + return yangResourceIds; + } + + private String getCombinedSelectSqlQuery(final Collection moduleReferences) { + final StringJoiner sqlQueryJoiner = new StringJoiner(" UNION ALL "); + moduleReferences.forEach(moduleReference -> + sqlQueryJoiner.add(String.format("SELECT id FROM yang_resource WHERE module_name='%s' and revision='%s'", + moduleReference.getModuleName(), + moduleReference.getRevision())) + ); + return sqlQueryJoiner.toString(); + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceRepository.java b/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceRepository.java new file mode 100644 index 0000000000..9a11592310 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/repository/YangResourceRepository.java @@ -0,0 +1,102 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2020 Pantheon.tech + * Modifications Copyright (C) 2021-2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.repository; + +import java.util.Collection; +import java.util.List; +import java.util.Set; +import org.onap.cps.ri.models.YangResourceEntity; +import org.onap.cps.ri.models.YangResourceModuleReference; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Modifying; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; +import org.springframework.stereotype.Repository; + +@Repository +public interface YangResourceRepository extends JpaRepository, + YangResourceNativeRepository, SchemaSetYangResourceRepository { + + List findAllByChecksumIn(String[] checksums); + + default List findAllByChecksumIn(final Collection checksums) { + return findAllByChecksumIn(checksums.toArray(new String[0])); + } + + @Query(value = """ + SELECT DISTINCT + yang_resource.module_name AS module_name, + yang_resource.revision AS revision + FROM + dataspace + JOIN schema_set ON schema_set.dataspace_id = dataspace.id + JOIN schema_set_yang_resources ON schema_set_yang_resources.schema_set_id = schema_set.id + JOIN yang_resource ON yang_resource.id = schema_set_yang_resources.yang_resource_id + WHERE + dataspace.name = :dataspaceName + """, nativeQuery = true) + Set findAllModuleReferencesByDataspace(@Param("dataspaceName") String dataspaceName); + + @Query(value = """ + SELECT DISTINCT + yang_resource.module_name AS module_name, + yang_resource.revision AS revision + FROM + dataspace + JOIN anchor ON anchor.dataspace_id = dataspace.id + JOIN schema_set ON schema_set.id = anchor.schema_set_id + JOIN schema_set_yang_resources ON schema_set_yang_resources.schema_set_id = schema_set.id + JOIN yang_resource ON yang_resource.id = schema_set_yang_resources.yang_resource_id + WHERE + dataspace.name = :dataspaceName + AND anchor.name = :anchorName + """, nativeQuery = true) + Set findAllModuleReferencesByDataspaceAndAnchor( + @Param("dataspaceName") String dataspaceName, @Param("anchorName") String anchorName); + + @Query(value = """ + SELECT DISTINCT + yang_resource.* + FROM + dataspace + JOIN anchor ON anchor.dataspace_id = dataspace.id + JOIN schema_set ON schema_set.id = anchor.schema_set_id + JOIN schema_set_yang_resources ON schema_set_yang_resources.schema_set_id = schema_set.id + JOIN yang_resource ON yang_resource.id = schema_set_yang_resources.yang_resource_id + WHERE + dataspace.name = :dataspaceName + AND anchor.name = :anchorName + AND (:moduleName IS NULL OR yang_resource.module_name = :moduleName) + AND (:revision IS NULL OR yang_resource.revision = :revision) + """, nativeQuery = true) + Set findAllModuleDefinitionsByDataspaceAndAnchorAndModule( + @Param("dataspaceName") String dataspaceName, @Param("anchorName") String anchorName, + @Param("moduleName") String moduleName, @Param("revision") String revision); + + @Modifying + @Query(value = "DELETE FROM schema_set_yang_resources WHERE schema_set_id = :schemaSetId", nativeQuery = true) + void deleteSchemaSetYangResourceForSchemaSetId(@Param("schemaSetId") int schemaSetId); + + @Modifying + @Query(value = "DELETE FROM yang_resource yr WHERE NOT EXISTS " + + "(SELECT 1 FROM schema_set_yang_resources ssyr WHERE ssyr.yang_resource_id = yr.id)", nativeQuery = true) + void deleteOrphans(); +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsSessionFactory.java b/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsSessionFactory.java new file mode 100644 index 0000000000..c0291176f4 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsSessionFactory.java @@ -0,0 +1,70 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.utils; + +import org.hibernate.HibernateException; +import org.hibernate.Session; +import org.hibernate.SessionFactory; +import org.onap.cps.ri.models.AnchorEntity; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.models.SchemaSetEntity; +import org.onap.cps.ri.models.YangResourceEntity; +import org.springframework.beans.factory.config.ConfigurableBeanFactory; +import org.springframework.context.annotation.Scope; +import org.springframework.stereotype.Component; + +@Component +@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON) +public class CpsSessionFactory { + + private SessionFactory sessionFactory = null; + + /** + * Open a session from session factory. + * + * @return session + * @throws HibernateException hibernate exception + */ + public Session openSession() throws HibernateException { + return getSessionFactory().openSession(); + } + + /** + * Close session factory. + * + * @throws HibernateException hibernate exception + */ + public void closeSessionFactory() throws HibernateException { + getSessionFactory().close(); + } + + private SessionFactory getSessionFactory() { + if (sessionFactory == null) { + sessionFactory = new org.hibernate.cfg.Configuration().configure("hibernate.cfg.xml") + .addAnnotatedClass(AnchorEntity.class) + .addAnnotatedClass(DataspaceEntity.class) + .addAnnotatedClass(SchemaSetEntity.class) + .addAnnotatedClass(YangResourceEntity.class) + .buildSessionFactory(); + } + return sessionFactory; + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsValidatorImpl.java b/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsValidatorImpl.java new file mode 100644 index 0000000000..4f942a37ea --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/utils/CpsValidatorImpl.java @@ -0,0 +1,70 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.utils; + +import com.google.common.collect.Lists; +import java.util.Arrays; +import java.util.Collection; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.impl.utils.CpsValidator; +import org.onap.cps.spi.PaginationOption; +import org.onap.cps.spi.exceptions.DataValidationException; +import org.springframework.stereotype.Component; + +@Slf4j +@Component +@RequiredArgsConstructor +public class CpsValidatorImpl implements CpsValidator { + + private static final char[] UNSUPPORTED_NAME_CHARACTERS = "!\" #$%&'()*+,./\\:;<=>?@[]^`{|}~".toCharArray(); + + @Override + public void validateNameCharacters(final String... names) { + validateNameCharacters(Arrays.asList(names)); + } + + @Override + public void validateNameCharacters(final Iterable names) { + for (final String name : names) { + final Collection charactersOfName = Lists.charactersOf(name); + for (final char unsupportedCharacter : UNSUPPORTED_NAME_CHARACTERS) { + if (charactersOfName.contains(unsupportedCharacter)) { + throw new DataValidationException("Name or ID Validation Error.", + name + " invalid token encountered at position " + + (name.indexOf(unsupportedCharacter) + 1)); + } + } + } + } + + @Override + public void validatePaginationOption(final PaginationOption paginationOption) { + if (PaginationOption.NO_PAGINATION == paginationOption) { + return; + } + + if (!paginationOption.isValidPaginationOption()) { + throw new DataValidationException("Pagination validation error.", + "Invalid page index or size"); + } + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/utils/EscapeUtils.java b/cps-ri/src/main/java/org/onap/cps/ri/utils/EscapeUtils.java new file mode 100644 index 0000000000..5323ae6bc9 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/utils/EscapeUtils.java @@ -0,0 +1,37 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.utils; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class EscapeUtils { + + public static String escapeForSqlLike(final String value) { + return value.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_"); + } + + public static String escapeForSqlStringLiteral(final String value) { + return value.replace("'", "''"); + } + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/utils/SessionManager.java b/cps-ri/src/main/java/org/onap/cps/ri/utils/SessionManager.java new file mode 100644 index 0000000000..b81a0bd39d --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/utils/SessionManager.java @@ -0,0 +1,180 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.utils; + +import com.google.common.util.concurrent.TimeLimiter; +import com.google.common.util.concurrent.UncheckedExecutionException; +import jakarta.annotation.PostConstruct; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import lombok.RequiredArgsConstructor; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.hibernate.HibernateException; +import org.hibernate.LockMode; +import org.hibernate.Session; +import org.onap.cps.ri.models.AnchorEntity; +import org.onap.cps.ri.models.DataspaceEntity; +import org.onap.cps.ri.repository.AnchorRepository; +import org.onap.cps.ri.repository.DataspaceRepository; +import org.onap.cps.spi.exceptions.SessionManagerException; +import org.onap.cps.spi.exceptions.SessionTimeoutException; +import org.springframework.beans.factory.config.ConfigurableBeanFactory; +import org.springframework.context.annotation.Scope; +import org.springframework.stereotype.Component; + +@RequiredArgsConstructor +@Slf4j +@Component +@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON) +public class SessionManager { + + private final CpsSessionFactory cpsSessionFactory; + private final TimeLimiterProvider timeLimiterProvider; + private final DataspaceRepository dataspaceRepository; + private final AnchorRepository anchorRepository; + private final ConcurrentHashMap sessionMap = new ConcurrentHashMap<>(); + public static final boolean WITH_COMMIT = true; + public static final boolean WITH_ROLLBACK = false; + + @PostConstruct + private void postConstruct() { + final Thread shutdownHook = new Thread(this::closeAllSessionsInShutdown); + Runtime.getRuntime().addShutdownHook(shutdownHook); + } + + private void closeAllSessionsInShutdown() { + for (final String sessionId : sessionMap.keySet()) { + try { + closeSession(sessionId, WITH_ROLLBACK); + log.info("Session with session ID {} rolled back and closed", sessionId); + } catch (final Exception e) { + log.warn("Session with session ID {} failed to close", sessionId); + } + } + cpsSessionFactory.closeSessionFactory(); + } + + /** + * Starts a session which allows use of locks and batch interaction with the persistence service. + * + * @return Session ID string + */ + public String startSession() { + final Session session = cpsSessionFactory.openSession(); + final String sessionId = UUID.randomUUID().toString(); + sessionMap.put(sessionId, session); + session.beginTransaction(); + return sessionId; + } + + /** + * Close session. + * Changes are committed when commit boolean is set to true. + * Rollback will execute when commit boolean is set to false. + * + * @param sessionId session ID + * @param commit indicator whether session will commit or rollback + */ + public void closeSession(final String sessionId, final boolean commit) { + try { + final Session session = getSession(sessionId); + if (commit) { + session.getTransaction().commit(); + } else { + session.getTransaction().rollback(); + } + session.close(); + } catch (final HibernateException e) { + throw new SessionManagerException("Cannot close session", + String.format("Unable to close session with session ID '%s'", sessionId), e); + } finally { + sessionMap.remove(sessionId); + } + } + + /** + * Lock Anchor. + * To release locks(s), the session holding the lock(s) must be closed. + * + * @param sessionId session ID + * @param dataspaceName dataspace name + * @param anchorName anchor name + * @param timeoutInMilliseconds lock attempt timeout in milliseconds + */ + @SneakyThrows + public void lockAnchor(final String sessionId, final String dataspaceName, + final String anchorName, final Long timeoutInMilliseconds) { + final ExecutorService executorService = Executors.newSingleThreadExecutor(); + final TimeLimiter timeLimiter = timeLimiterProvider.getTimeLimiter(executorService); + + try { + timeLimiter.callWithTimeout(() -> { + applyPessimisticWriteLockOnAnchor(sessionId, dataspaceName, anchorName); + return null; + }, timeoutInMilliseconds, TimeUnit.MILLISECONDS); + } catch (final TimeoutException e) { + throw new SessionTimeoutException( + "Timeout: Anchor locking failed", + "The error could be caused by another session holding a lock on the specified table. " + + "Retrying the sending the request could be required.", e); + } catch (final InterruptedException e) { + Thread.currentThread().interrupt(); + throw new SessionManagerException("Operation interrupted", "This thread was interrupted.", e); + } catch (final ExecutionException | UncheckedExecutionException e) { + if (e.getCause() != null) { + throw e.getCause(); + } + throw new SessionManagerException( + "Operation Aborted", + "The transaction request was aborted. " + + "Retrying and checking all details are correct could be required", e); + } finally { + executorService.shutdownNow(); + } + } + + private void applyPessimisticWriteLockOnAnchor(final String sessionId, final String dataspaceName, + final String anchorName) { + final Session session = getSession(sessionId); + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); + final long anchorId = anchorEntity.getId(); + log.debug("Attempting to lock anchor {} for session {}", anchorName, sessionId); + session.get(AnchorEntity.class, anchorId, LockMode.PESSIMISTIC_WRITE); + log.info("Anchor {} successfully locked", anchorName); + } + + private Session getSession(final String sessionId) { + final Session session = sessionMap.get(sessionId); + if (session == null) { + throw new SessionManagerException("Session not found", + String.format("Session with ID %s does not exist", sessionId)); + } + return session; + } + +} diff --git a/cps-ri/src/main/java/org/onap/cps/ri/utils/TimeLimiterProvider.java b/cps-ri/src/main/java/org/onap/cps/ri/utils/TimeLimiterProvider.java new file mode 100644 index 0000000000..10031c0b28 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/ri/utils/TimeLimiterProvider.java @@ -0,0 +1,33 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.utils; + +import com.google.common.util.concurrent.SimpleTimeLimiter; +import com.google.common.util.concurrent.TimeLimiter; +import java.util.concurrent.ExecutorService; +import org.springframework.stereotype.Component; + +@Component +public class TimeLimiterProvider { + public TimeLimiter getTimeLimiter(final ExecutorService executorService) { + return SimpleTimeLimiter.create(executorService); + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/config/CpsSessionFactory.java b/cps-ri/src/main/java/org/onap/cps/spi/config/CpsSessionFactory.java deleted file mode 100644 index 5241ea0096..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/config/CpsSessionFactory.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.config; - -import org.hibernate.HibernateException; -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.onap.cps.spi.entities.AnchorEntity; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.entities.SchemaSetEntity; -import org.onap.cps.spi.entities.YangResourceEntity; -import org.springframework.beans.factory.config.ConfigurableBeanFactory; -import org.springframework.context.annotation.Scope; -import org.springframework.stereotype.Component; - -@Component -@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON) -public class CpsSessionFactory { - - private SessionFactory sessionFactory = null; - - /** - * Open a session from session factory. - * - * @return session - * @throws HibernateException hibernate exception - */ - public Session openSession() throws HibernateException { - return getSessionFactory().openSession(); - } - - /** - * Close session factory. - * - * @throws HibernateException hibernate exception - */ - public void closeSessionFactory() throws HibernateException { - getSessionFactory().close(); - } - - private SessionFactory getSessionFactory() { - if (sessionFactory == null) { - sessionFactory = new org.hibernate.cfg.Configuration().configure("hibernate.cfg.xml") - .addAnnotatedClass(AnchorEntity.class) - .addAnnotatedClass(DataspaceEntity.class) - .addAnnotatedClass(SchemaSetEntity.class) - .addAnnotatedClass(YangResourceEntity.class) - .buildSessionFactory(); - } - return sessionFactory; - } -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/AnchorEntity.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/AnchorEntity.java deleted file mode 100644 index ac06b0b5a4..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/entities/AnchorEntity.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.entities; - - -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.FetchType; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.JoinColumn; -import jakarta.persistence.ManyToOne; -import jakarta.persistence.Table; -import jakarta.validation.constraints.NotNull; -import java.io.Serializable; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - -/** - * Entity to store an anchor. - */ -@Getter -@Setter -@NoArgsConstructor -@AllArgsConstructor -@Builder -@Entity -@Table(name = "anchor") -@EqualsAndHashCode(onlyExplicitlyIncluded = true) -public class AnchorEntity implements Serializable { - - private static final long serialVersionUID = -8049987915308262518L; - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Long id; - - @NotNull - @Column - @EqualsAndHashCode.Include - private String name; - - @NotNull - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "schema_set_id") - private SchemaSetEntity schemaSet; - - @NotNull - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "dataspace_id") - @EqualsAndHashCode.Include - private DataspaceEntity dataspace; -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/DataspaceEntity.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/DataspaceEntity.java deleted file mode 100644 index ddfb09c942..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/entities/DataspaceEntity.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020-2023 Nordix Foundation. - * Modifications Copyright (C) 2020-2021 Pantheon.tech - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.entities; - -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.Table; -import jakarta.validation.constraints.NotNull; -import java.io.Serializable; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - - -/** - * Entity to store a dataspace. - */ -@Getter -@Setter -@Entity -@AllArgsConstructor -@NoArgsConstructor -@Table(name = "dataspace") -@EqualsAndHashCode(onlyExplicitlyIncluded = true) -public class DataspaceEntity implements Serializable { - - private static final long serialVersionUID = 8395254649813051882L; - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - - @NotNull - @Column(columnDefinition = "text") - @EqualsAndHashCode.Include - private String name; - - /** - * Initialize a Dataspace . - * - * @param name the Dataspace name. - */ - public DataspaceEntity(final String name) { - this.name = name; - } -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntity.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntity.java deleted file mode 100644 index c763f61f8f..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntity.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020-2024 Nordix Foundation. - * Modifications Copyright (C) 2021 Pantheon.tech - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.entities; - -import jakarta.persistence.CascadeType; -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.FetchType; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.JoinColumn; -import jakarta.persistence.ManyToOne; -import jakarta.persistence.OneToMany; -import jakarta.persistence.SequenceGenerator; -import jakarta.persistence.Table; -import jakarta.validation.constraints.NotNull; -import java.io.Serializable; -import java.util.Set; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Data; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; -import lombok.ToString; -import org.hibernate.annotations.JdbcTypeCode; -import org.hibernate.type.SqlTypes; - -/** - * Entity to store a fragment. - */ -@Data -@Getter -@Setter -@AllArgsConstructor -@NoArgsConstructor -@Builder -@Entity -@Table(name = "fragment") -@EqualsAndHashCode(onlyExplicitlyIncluded = true) -public class FragmentEntity implements Serializable { - - private static final long serialVersionUID = 7737669789097119667L; - - @Id - @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "fragment_id_seq_generator") - @SequenceGenerator(name = "fragment_id_seq_generator", sequenceName = "fragment_id_seq", allocationSize = 100) - private Long id; - - @NotNull - @Column(columnDefinition = "text") - @EqualsAndHashCode.Include - private String xpath; - - @Column(name = "parent_id") - private Long parentId; - - @JdbcTypeCode(SqlTypes.JSON) - @Column(columnDefinition = "jsonb") - private String attributes; - - @NotNull - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "anchor_id") - @EqualsAndHashCode.Include - private AnchorEntity anchor; - - @ToString.Exclude - @OneToMany(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.LAZY) - @JoinColumn(name = "parent_id") - private Set childFragments; -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/SchemaSetEntity.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/SchemaSetEntity.java deleted file mode 100644 index e07f766ed0..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/entities/SchemaSetEntity.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020 Pantheon.tech - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.entities; - -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.FetchType; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.JoinColumn; -import jakarta.persistence.JoinTable; -import jakarta.persistence.ManyToMany; -import jakarta.persistence.ManyToOne; -import jakarta.persistence.Table; -import jakarta.validation.constraints.NotNull; -import java.io.Serializable; -import java.util.Set; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - -/** - * Entity to store a Schema Set. - */ -@Getter -@Setter -@NoArgsConstructor -@Entity -@Table(name = "schema_set") -public class SchemaSetEntity implements Serializable { - - private static final long serialVersionUID = 6665056955069047269L; - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - - @NotNull - @Column - private String name; - - @NotNull - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "dataspace_id", referencedColumnName = "ID") - private DataspaceEntity dataspace; - - @NotNull - @ManyToMany(fetch = FetchType.LAZY) - @JoinTable(name = "schema_set_yang_resources", - joinColumns = @JoinColumn(name = "schema_set_id"), - inverseJoinColumns = @JoinColumn(name = "yang_resource_id")) - private Set yangResources; -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/YangResourceEntity.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/YangResourceEntity.java deleted file mode 100644 index 0c54baa4df..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/entities/YangResourceEntity.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020 Pantheon.tech - * Modifications Copyright (C) 2021-2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.entities; - -import jakarta.persistence.Column; -import jakarta.persistence.Entity; -import jakarta.persistence.GeneratedValue; -import jakarta.persistence.GenerationType; -import jakarta.persistence.Id; -import jakarta.persistence.ManyToMany; -import jakarta.persistence.Table; -import jakarta.validation.constraints.NotNull; -import java.io.Serializable; -import java.util.Set; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - -/** - * Entity to store a Yang files. - */ -@Getter -@Setter -@NoArgsConstructor -@Entity -@Table(name = "yang_resource") -public class YangResourceEntity implements Serializable { - - private static final long serialVersionUID = -4496883162142106774L; - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Integer id; - - @NotNull - @Column - private String checksum; - - @NotNull - @Column - private String fileName; - - @NotNull - @Column - private String content; - - @NotNull - @Column - private String moduleName; - - @Column - private String revision; - - @ManyToMany(mappedBy = "yangResources") - private Set schemaSets; - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/YangResourceModuleReference.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/YangResourceModuleReference.java deleted file mode 100644 index 3c39c6baac..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/entities/YangResourceModuleReference.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2021 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.entities; - -import org.springframework.beans.factory.annotation.Value; - -public interface YangResourceModuleReference { - - @Value("#{target.module_name}") - String getModuleName(); - - String getRevision(); -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsAdminPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsAdminPersistenceServiceImpl.java deleted file mode 100755 index 13710dbec0..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsAdminPersistenceServiceImpl.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020-2024 Nordix Foundation. - * Modifications Copyright (C) 2020-2022 Bell Canada. - * Modifications Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2022 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.impl; - -import jakarta.transaction.Transactional; -import java.util.Collection; -import java.util.stream.Collectors; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.onap.cps.spi.CpsAdminPersistenceService; -import org.onap.cps.spi.entities.AnchorEntity; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.entities.SchemaSetEntity; -import org.onap.cps.spi.exceptions.AlreadyDefinedException; -import org.onap.cps.spi.exceptions.DataspaceInUseException; -import org.onap.cps.spi.model.Anchor; -import org.onap.cps.spi.model.Dataspace; -import org.onap.cps.spi.repository.AnchorRepository; -import org.onap.cps.spi.repository.DataspaceRepository; -import org.onap.cps.spi.repository.SchemaSetRepository; -import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.stereotype.Component; - -@Slf4j -@Component -@RequiredArgsConstructor -public class CpsAdminPersistenceServiceImpl implements CpsAdminPersistenceService { - - private final DataspaceRepository dataspaceRepository; - private final AnchorRepository anchorRepository; - private final SchemaSetRepository schemaSetRepository; - - @Override - public void createDataspace(final String dataspaceName) { - try { - dataspaceRepository.save(new DataspaceEntity(dataspaceName)); - } catch (final DataIntegrityViolationException e) { - throw AlreadyDefinedException.forDataspace(dataspaceName, e); - } - } - - @Override - public void deleteDataspace(final String dataspaceName) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final int numberOfAssociatedAnchors = anchorRepository.countByDataspace(dataspaceEntity); - if (numberOfAssociatedAnchors != 0) { - throw new DataspaceInUseException(dataspaceName, - String.format("Dataspace contains %d anchor(s)", numberOfAssociatedAnchors)); - } - final int numberOfAssociatedSchemaSets = schemaSetRepository.countByDataspace(dataspaceEntity); - if (numberOfAssociatedSchemaSets != 0) { - throw new DataspaceInUseException(dataspaceName, - String.format("Dataspace contains %d schemaset(s)", numberOfAssociatedSchemaSets)); - } - dataspaceRepository.delete(dataspaceEntity); - } - - @Override - public Dataspace getDataspace(final String dataspaceName) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - return toDataspace(dataspaceEntity); - } - - @Override - public Collection getAllDataspaces() { - final Collection dataspaceEntities = dataspaceRepository.findAll(); - return dataspaceEntities.stream().map(CpsAdminPersistenceServiceImpl::toDataspace) - .collect(Collectors.toSet()); - } - - @Override - public void createAnchor(final String dataspaceName, final String schemaSetName, final String anchorName) { - final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final var schemaSetEntity = - schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); - final var anchorEntity = AnchorEntity.builder() - .name(anchorName) - .dataspace(dataspaceEntity) - .schemaSet(schemaSetEntity) - .build(); - try { - anchorRepository.save(anchorEntity); - } catch (final DataIntegrityViolationException e) { - throw AlreadyDefinedException.forAnchor(anchorName, dataspaceName, e); - } - } - - @Override - public Anchor getAnchor(final String dataspaceName, final String anchorName) { - final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); - return toAnchor(anchorEntity); - } - - @Override - public Collection getAnchors(final String dataspaceName) { - final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final Collection anchorEntities = anchorRepository.findAllByDataspace(dataspaceEntity); - return anchorEntities.stream().map(CpsAdminPersistenceServiceImpl::toAnchor).collect(Collectors.toSet()); - } - - @Override - public Collection getAnchors(final String dataspaceName, final Collection anchorNames) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - return anchorRepository.findAllByDataspaceAndNameIn(dataspaceEntity, anchorNames) - .stream().map(CpsAdminPersistenceServiceImpl::toAnchor).collect(Collectors.toSet()); - } - - @Override - public Collection getAnchorsBySchemaSetName(final String dataspaceName, final String schemaSetName) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final SchemaSetEntity schemaSetEntity = schemaSetRepository.getByDataspaceAndName( - dataspaceEntity, schemaSetName); - return anchorRepository.findAllBySchemaSet(schemaSetEntity) - .stream().map(CpsAdminPersistenceServiceImpl::toAnchor) - .collect(Collectors.toSet()); - } - - @Override - public Collection getAnchorsBySchemaSetNames(final String dataspaceName, - final Collection schemaSetNames) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - return anchorRepository.findAllByDataspaceAndSchemaSetNameIn(dataspaceEntity, schemaSetNames) - .stream().map(CpsAdminPersistenceServiceImpl::toAnchor).collect(Collectors.toSet()); - } - - @Override - public Collection queryAnchorNames(final String dataspaceName, final Collection inputModuleNames) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - return anchorRepository.getAnchorNamesByDataspaceIdAndModuleNames(dataspaceEntity.getId(), inputModuleNames, - inputModuleNames.size()); - } - - @Transactional - @Override - public void deleteAnchor(final String dataspaceName, final String anchorName) { - final var anchorEntity = getAnchorEntity(dataspaceName, anchorName); - anchorRepository.delete(anchorEntity); - } - - @Transactional - @Override - public void deleteAnchors(final String dataspaceName, final Collection anchorNames) { - final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - anchorRepository.deleteAllByDataspaceAndNameIn(dataspaceEntity, anchorNames); - } - - @Transactional - @Override - public void updateAnchorSchemaSet(final String dataspaceName, - final String anchorName, - final String schemaSetName) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); - final SchemaSetEntity schemaSetEntity = schemaSetRepository - .getByDataspaceAndName(dataspaceEntity, schemaSetName); - anchorRepository.updateAnchorSchemaSetId(schemaSetEntity.getId(), anchorEntity.getId()); - } - - private AnchorEntity getAnchorEntity(final String dataspaceName, final String anchorName) { - final var dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - return anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); - } - - private static Anchor toAnchor(final AnchorEntity anchorEntity) { - return Anchor.builder() - .name(anchorEntity.getName()) - .dataspaceName(anchorEntity.getDataspace().getName()) - .schemaSetName(anchorEntity.getSchemaSet().getName()) - .build(); - } - - private static Dataspace toDataspace(final DataspaceEntity dataspaceEntity) { - return Dataspace.builder().name(dataspaceEntity.getName()).build(); - } -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java deleted file mode 100644 index fd47793a7a..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java +++ /dev/null @@ -1,722 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation - * Modifications Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2020-2022 Bell Canada. - * Modifications Copyright (C) 2022-2023 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.impl; - -import static org.onap.cps.spi.PaginationOption.NO_PAGINATION; - -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.ImmutableSet.Builder; -import io.micrometer.core.annotation.Timed; -import jakarta.transaction.Transactional; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.hibernate.StaleStateException; -import org.onap.cps.cpspath.parser.CpsPathQuery; -import org.onap.cps.cpspath.parser.CpsPathUtil; -import org.onap.cps.cpspath.parser.PathParsingException; -import org.onap.cps.spi.CpsDataPersistenceService; -import org.onap.cps.spi.FetchDescendantsOption; -import org.onap.cps.spi.PaginationOption; -import org.onap.cps.spi.entities.AnchorEntity; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.entities.FragmentEntity; -import org.onap.cps.spi.exceptions.AlreadyDefinedException; -import org.onap.cps.spi.exceptions.ConcurrencyException; -import org.onap.cps.spi.exceptions.CpsAdminException; -import org.onap.cps.spi.exceptions.CpsPathException; -import org.onap.cps.spi.exceptions.DataNodeNotFoundException; -import org.onap.cps.spi.exceptions.DataNodeNotFoundExceptionBatch; -import org.onap.cps.spi.model.DataNode; -import org.onap.cps.spi.model.DataNodeBuilder; -import org.onap.cps.spi.repository.AnchorRepository; -import org.onap.cps.spi.repository.DataspaceRepository; -import org.onap.cps.spi.repository.FragmentRepository; -import org.onap.cps.spi.utils.SessionManager; -import org.onap.cps.utils.JsonObjectMapper; -import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.stereotype.Service; - -@Service -@Slf4j -@RequiredArgsConstructor -public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService { - - private final DataspaceRepository dataspaceRepository; - private final AnchorRepository anchorRepository; - private final FragmentRepository fragmentRepository; - private final JsonObjectMapper jsonObjectMapper; - private final SessionManager sessionManager; - - private static final String REG_EX_FOR_OPTIONAL_LIST_INDEX = "(\\[@.+?])?)"; - - @Override - public void storeDataNodes(final String dataspaceName, final String anchorName, - final Collection dataNodes) { - final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); - final List fragmentEntities = new ArrayList<>(dataNodes.size()); - try { - for (final DataNode dataNode: dataNodes) { - final FragmentEntity fragmentEntity = convertToFragmentWithAllDescendants(anchorEntity, dataNode); - fragmentEntities.add(fragmentEntity); - } - fragmentRepository.saveAll(fragmentEntities); - } catch (final DataIntegrityViolationException exception) { - log.warn("Exception occurred : {} , While saving : {} data nodes, Retrying saving data nodes individually", - exception, dataNodes.size()); - storeDataNodesIndividually(anchorEntity, dataNodes); - } - } - - private void storeDataNodesIndividually(final AnchorEntity anchorEntity, final Collection dataNodes) { - final Collection failedXpaths = new HashSet<>(); - for (final DataNode dataNode: dataNodes) { - try { - final FragmentEntity fragmentEntity = convertToFragmentWithAllDescendants(anchorEntity, dataNode); - fragmentRepository.save(fragmentEntity); - } catch (final DataIntegrityViolationException dataIntegrityViolationException) { - failedXpaths.add(dataNode.getXpath()); - } - } - if (!failedXpaths.isEmpty()) { - throw AlreadyDefinedException.forDataNodes(failedXpaths, anchorEntity.getName()); - } - } - - /** - * Convert DataNode object into Fragment and places the result in the fragments placeholder. Performs same action - * for all DataNode children recursively. - * - * @param anchorEntity anchorEntity - * @param dataNodeToBeConverted dataNode - * @return a Fragment built from current DataNode - */ - private FragmentEntity convertToFragmentWithAllDescendants(final AnchorEntity anchorEntity, - final DataNode dataNodeToBeConverted) { - final FragmentEntity parentFragment = toFragmentEntity(anchorEntity, dataNodeToBeConverted); - final Builder childFragmentsImmutableSetBuilder = ImmutableSet.builder(); - for (final DataNode childDataNode : dataNodeToBeConverted.getChildDataNodes()) { - final FragmentEntity childFragment = convertToFragmentWithAllDescendants(anchorEntity, childDataNode); - childFragmentsImmutableSetBuilder.add(childFragment); - } - parentFragment.setChildFragments(childFragmentsImmutableSetBuilder.build()); - return parentFragment; - } - - @Override - public void addListElements(final String dataspaceName, final String anchorName, final String parentNodeXpath, - final Collection newListElements) { - final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); - addChildrenDataNodes(anchorEntity, parentNodeXpath, newListElements); - } - - @Override - public void addChildDataNodes(final String dataspaceName, final String anchorName, - final String parentNodeXpath, final Collection dataNodes) { - final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); - addChildrenDataNodes(anchorEntity, parentNodeXpath, dataNodes); - } - - private void addChildrenDataNodes(final AnchorEntity anchorEntity, final String parentNodeXpath, - final Collection newChildren) { - final FragmentEntity parentFragmentEntity = getFragmentEntity(anchorEntity, parentNodeXpath); - final List fragmentEntities = new ArrayList<>(newChildren.size()); - try { - for (final DataNode newChildAsDataNode : newChildren) { - final FragmentEntity newChildAsFragmentEntity = - convertToFragmentWithAllDescendants(anchorEntity, newChildAsDataNode); - newChildAsFragmentEntity.setParentId(parentFragmentEntity.getId()); - fragmentEntities.add(newChildAsFragmentEntity); - } - fragmentRepository.saveAll(fragmentEntities); - } catch (final DataIntegrityViolationException dataIntegrityViolationException) { - log.warn("Exception occurred : {} , While saving : {} children, retrying using individual save operations", - dataIntegrityViolationException, fragmentEntities.size()); - retrySavingEachChildIndividually(anchorEntity, parentNodeXpath, newChildren); - } - } - - private void addNewChildDataNode(final AnchorEntity anchorEntity, final String parentNodeXpath, - final DataNode newChild) { - final FragmentEntity parentFragmentEntity = getFragmentEntity(anchorEntity, parentNodeXpath); - final FragmentEntity newChildAsFragmentEntity = convertToFragmentWithAllDescendants(anchorEntity, newChild); - newChildAsFragmentEntity.setParentId(parentFragmentEntity.getId()); - try { - fragmentRepository.save(newChildAsFragmentEntity); - } catch (final DataIntegrityViolationException dataIntegrityViolationException) { - throw AlreadyDefinedException.forDataNodes(Collections.singletonList(newChild.getXpath()), - anchorEntity.getName()); - } - } - - private void retrySavingEachChildIndividually(final AnchorEntity anchorEntity, final String parentNodeXpath, - final Collection newChildren) { - final Collection failedXpaths = new HashSet<>(); - for (final DataNode newChild : newChildren) { - try { - addNewChildDataNode(anchorEntity, parentNodeXpath, newChild); - } catch (final AlreadyDefinedException alreadyDefinedException) { - failedXpaths.add(newChild.getXpath()); - } - } - if (!failedXpaths.isEmpty()) { - throw AlreadyDefinedException.forDataNodes(failedXpaths, anchorEntity.getName()); - } - } - - @Override - public void batchUpdateDataLeaves(final String dataspaceName, final String anchorName, - final Map> updatedLeavesPerXPath) { - final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); - - final Collection xpathsOfUpdatedLeaves = updatedLeavesPerXPath.keySet(); - final Collection fragmentEntities = getFragmentEntities(anchorEntity, xpathsOfUpdatedLeaves); - - for (final FragmentEntity fragmentEntity : fragmentEntities) { - final Map updatedLeaves = updatedLeavesPerXPath.get(fragmentEntity.getXpath()); - final String mergedLeaves = mergeLeaves(updatedLeaves, fragmentEntity.getAttributes()); - fragmentEntity.setAttributes(mergedLeaves); - } - - try { - fragmentRepository.saveAll(fragmentEntities); - } catch (final StaleStateException staleStateException) { - retryUpdateDataNodesIndividually(anchorEntity, fragmentEntities); - } - } - - @Override - public void updateDataNodesAndDescendants(final String dataspaceName, final String anchorName, - final Collection updatedDataNodes) { - final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); - - final Map xpathToUpdatedDataNode = updatedDataNodes.stream() - .collect(Collectors.toMap(DataNode::getXpath, dataNode -> dataNode)); - - final Collection xpaths = xpathToUpdatedDataNode.keySet(); - Collection existingFragmentEntities = getFragmentEntities(anchorEntity, xpaths); - existingFragmentEntities = fragmentRepository.prefetchDescendantsOfFragmentEntities( - FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS, existingFragmentEntities); - - for (final FragmentEntity existingFragmentEntity : existingFragmentEntities) { - final DataNode updatedDataNode = xpathToUpdatedDataNode.get(existingFragmentEntity.getXpath()); - updateFragmentEntityAndDescendantsWithDataNode(existingFragmentEntity, updatedDataNode); - } - - try { - fragmentRepository.saveAll(existingFragmentEntities); - } catch (final StaleStateException staleStateException) { - retryUpdateDataNodesIndividually(anchorEntity, existingFragmentEntities); - } - } - - private void retryUpdateDataNodesIndividually(final AnchorEntity anchorEntity, - final Collection fragmentEntities) { - final Collection failedXpaths = new HashSet<>(); - for (final FragmentEntity dataNodeFragment : fragmentEntities) { - try { - fragmentRepository.save(dataNodeFragment); - } catch (final StaleStateException staleStateException) { - failedXpaths.add(dataNodeFragment.getXpath()); - } - } - if (!failedXpaths.isEmpty()) { - final String failedXpathsConcatenated = String.join(",", failedXpaths); - throw new ConcurrencyException("Concurrent Transactions", String.format( - "DataNodes : %s in Dataspace :'%s' with Anchor : '%s' are updated by another transaction.", - failedXpathsConcatenated, anchorEntity.getDataspace().getName(), anchorEntity.getName())); - } - } - - private void updateFragmentEntityAndDescendantsWithDataNode(final FragmentEntity existingFragmentEntity, - final DataNode newDataNode) { - copyAttributesFromNewDataNode(existingFragmentEntity, newDataNode); - - final Map existingChildrenByXpath = existingFragmentEntity.getChildFragments().stream() - .collect(Collectors.toMap(FragmentEntity::getXpath, childFragmentEntity -> childFragmentEntity)); - - final Collection updatedChildFragments = new HashSet<>(); - for (final DataNode newDataNodeChild : newDataNode.getChildDataNodes()) { - final FragmentEntity childFragment; - if (isNewDataNode(newDataNodeChild, existingChildrenByXpath)) { - childFragment = convertToFragmentWithAllDescendants(existingFragmentEntity.getAnchor(), - newDataNodeChild); - } else { - childFragment = existingChildrenByXpath.get(newDataNodeChild.getXpath()); - updateFragmentEntityAndDescendantsWithDataNode(childFragment, newDataNodeChild); - } - updatedChildFragments.add(childFragment); - } - - existingFragmentEntity.getChildFragments().clear(); - existingFragmentEntity.getChildFragments().addAll(updatedChildFragments); - } - - @Override - @Timed(value = "cps.data.persistence.service.datanode.query", - description = "Time taken to query data nodes") - public List queryDataNodes(final String dataspaceName, final String anchorName, final String cpsPath, - final FetchDescendantsOption fetchDescendantsOption) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); - final CpsPathQuery cpsPathQuery; - try { - cpsPathQuery = CpsPathUtil.getCpsPathQuery(cpsPath); - } catch (final PathParsingException pathParsingException) { - throw new CpsPathException(pathParsingException.getMessage()); - } - - Collection fragmentEntities; - fragmentEntities = fragmentRepository.findByAnchorAndCpsPath(anchorEntity, cpsPathQuery); - if (cpsPathQuery.hasAncestorAxis()) { - final Collection ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery); - fragmentEntities = fragmentRepository.findByAnchorAndXpathIn(anchorEntity, ancestorXpaths); - } - fragmentEntities = fragmentRepository.prefetchDescendantsOfFragmentEntities(fetchDescendantsOption, - fragmentEntities); - return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); - } - - @Override - @Timed(value = "cps.data.persistence.service.datanode.query.anchors", - description = "Time taken to query data nodes across all anchors or list of anchors") - public List queryDataNodesAcrossAnchors(final String dataspaceName, final String cpsPath, - final FetchDescendantsOption fetchDescendantsOption, - final PaginationOption paginationOption) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final CpsPathQuery cpsPathQuery; - try { - cpsPathQuery = CpsPathUtil.getCpsPathQuery(cpsPath); - } catch (final PathParsingException e) { - throw new CpsPathException(e.getMessage()); - } - - final List anchorIds; - if (paginationOption == NO_PAGINATION) { - anchorIds = Collections.emptyList(); - } else { - anchorIds = getAnchorIdsForPagination(dataspaceEntity, cpsPathQuery, paginationOption); - if (anchorIds.isEmpty()) { - return Collections.emptyList(); - } - } - Collection fragmentEntities = - fragmentRepository.findByDataspaceAndCpsPath(dataspaceEntity, cpsPathQuery, anchorIds); - - if (cpsPathQuery.hasAncestorAxis()) { - final Collection ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery); - if (anchorIds.isEmpty()) { - fragmentEntities = fragmentRepository.findByDataspaceAndXpathIn(dataspaceEntity, ancestorXpaths); - } else { - fragmentEntities = fragmentRepository.findByAnchorIdsAndXpathIn( - anchorIds.toArray(new Long[0]), ancestorXpaths.toArray(new String[0])); - } - - } - fragmentEntities = fragmentRepository.prefetchDescendantsOfFragmentEntities(fetchDescendantsOption, - fragmentEntities); - return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); - } - - private List createDataNodesFromFragmentEntities(final FetchDescendantsOption fetchDescendantsOption, - final Collection fragmentEntities) { - final List dataNodes = new ArrayList<>(fragmentEntities.size()); - for (final FragmentEntity fragmentEntity : fragmentEntities) { - dataNodes.add(toDataNode(fragmentEntity, fetchDescendantsOption)); - } - return Collections.unmodifiableList(dataNodes); - } - - @Override - public String startSession() { - return sessionManager.startSession(); - } - - @Override - public void closeSession(final String sessionId) { - sessionManager.closeSession(sessionId, SessionManager.WITH_COMMIT); - } - - @Override - public void lockAnchor(final String sessionId, final String dataspaceName, - final String anchorName, final Long timeoutInMilliseconds) { - sessionManager.lockAnchor(sessionId, dataspaceName, anchorName, timeoutInMilliseconds); - } - - @Override - public Integer countAnchorsForDataspaceAndCpsPath(final String dataspaceName, final String cpsPath) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final CpsPathQuery cpsPathQuery; - try { - cpsPathQuery = CpsPathUtil.getCpsPathQuery(cpsPath); - } catch (final PathParsingException e) { - throw new CpsPathException(e.getMessage()); - } - final List anchorIdList = getAnchorIdsForPagination(dataspaceEntity, cpsPathQuery, NO_PAGINATION); - return anchorIdList.size(); - } - - private DataNode toDataNode(final FragmentEntity fragmentEntity, - final FetchDescendantsOption fetchDescendantsOption) { - final List childDataNodes = getChildDataNodes(fragmentEntity, fetchDescendantsOption); - Map leaves = new HashMap<>(); - if (fragmentEntity.getAttributes() != null) { - leaves = jsonObjectMapper.convertJsonString(fragmentEntity.getAttributes(), Map.class); - } - return new DataNodeBuilder() - .withXpath(fragmentEntity.getXpath()) - .withLeaves(leaves) - .withDataspace(fragmentEntity.getAnchor().getDataspace().getName()) - .withAnchor(fragmentEntity.getAnchor().getName()) - .withChildDataNodes(childDataNodes).build(); - } - - private FragmentEntity toFragmentEntity(final AnchorEntity anchorEntity, final DataNode dataNode) { - return FragmentEntity.builder() - .anchor(anchorEntity) - .xpath(dataNode.getXpath()) - .attributes(jsonObjectMapper.asJsonString(dataNode.getLeaves())) - .build(); - } - - - - @Override - @Transactional - public void replaceListContent(final String dataspaceName, final String anchorName, final String parentNodeXpath, - final Collection newListElements) { - final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); - final FragmentEntity parentEntity = getFragmentEntity(anchorEntity, parentNodeXpath); - final String listElementXpathPrefix = getListElementXpathPrefix(newListElements); - final Map existingListElementFragmentEntitiesByXPath = - extractListElementFragmentEntitiesByXPath(parentEntity.getChildFragments(), listElementXpathPrefix); - parentEntity.getChildFragments().removeAll(existingListElementFragmentEntitiesByXPath.values()); - final Set updatedChildFragmentEntities = new HashSet<>(); - for (final DataNode newListElement : newListElements) { - final FragmentEntity existingListElementEntity = - existingListElementFragmentEntitiesByXPath.get(newListElement.getXpath()); - final FragmentEntity entityToBeAdded = getFragmentForReplacement(parentEntity, newListElement, - existingListElementEntity); - updatedChildFragmentEntities.add(entityToBeAdded); - } - parentEntity.getChildFragments().addAll(updatedChildFragmentEntities); - fragmentRepository.save(parentEntity); - } - - @Override - @Transactional - public void deleteDataNodes(final String dataspaceName, final String anchorName) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - anchorRepository.findByDataspaceAndName(dataspaceEntity, anchorName) - .ifPresent(anchorEntity -> fragmentRepository.deleteByAnchorIn(Collections.singletonList(anchorEntity))); - } - - @Override - @Transactional - public void deleteDataNodes(final String dataspaceName, final Collection anchorNames) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final Collection anchorEntities = - anchorRepository.findAllByDataspaceAndNameIn(dataspaceEntity, anchorNames); - fragmentRepository.deleteByAnchorIn(anchorEntities); - } - - @Override - @Transactional - public void deleteDataNodes(final String dataspaceName, final String anchorName, - final Collection xpathsToDelete) { - deleteDataNodes(dataspaceName, anchorName, xpathsToDelete, false); - } - - private void deleteDataNodes(final String dataspaceName, final String anchorName, - final Collection xpathsToDelete, final boolean onlySupportListDeletion) { - final boolean haveRootXpath = xpathsToDelete.stream().anyMatch(CpsDataPersistenceServiceImpl::isRootXpath); - if (haveRootXpath) { - deleteDataNodes(dataspaceName, anchorName); - return; - } - - final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); - - final Collection deleteChecklist = getNormalizedXpaths(xpathsToDelete); - final Collection xpathsToExistingContainers = - fragmentRepository.findAllXpathByAnchorAndXpathIn(anchorEntity, deleteChecklist); - if (onlySupportListDeletion) { - final Collection xpathsToExistingListElements = xpathsToExistingContainers.stream() - .filter(CpsPathUtil::isPathToListElement).collect(Collectors.toList()); - deleteChecklist.removeAll(xpathsToExistingListElements); - } else { - deleteChecklist.removeAll(xpathsToExistingContainers); - } - - final Collection xpathsToExistingLists = deleteChecklist.stream() - .filter(xpath -> fragmentRepository.existsByAnchorAndXpathStartsWith(anchorEntity, xpath + "[")) - .collect(Collectors.toList()); - deleteChecklist.removeAll(xpathsToExistingLists); - - if (!deleteChecklist.isEmpty()) { - throw new DataNodeNotFoundExceptionBatch(dataspaceName, anchorName, deleteChecklist); - } - - fragmentRepository.deleteByAnchorIdAndXpaths(anchorEntity.getId(), xpathsToExistingContainers); - fragmentRepository.deleteListsByAnchorIdAndXpaths(anchorEntity.getId(), xpathsToExistingLists); - } - - @Override - @Transactional - public void deleteListDataNode(final String dataspaceName, final String anchorName, - final String targetXpath) { - deleteDataNode(dataspaceName, anchorName, targetXpath, true); - } - - @Override - @Transactional - public void deleteDataNode(final String dataspaceName, final String anchorName, final String targetXpath) { - deleteDataNode(dataspaceName, anchorName, targetXpath, false); - } - - private void deleteDataNode(final String dataspaceName, final String anchorName, final String targetXpath, - final boolean onlySupportListNodeDeletion) { - final String normalizedXpath = getNormalizedXpath(targetXpath); - try { - deleteDataNodes(dataspaceName, anchorName, Collections.singletonList(normalizedXpath), - onlySupportListNodeDeletion); - } catch (final DataNodeNotFoundExceptionBatch dataNodeNotFoundExceptionBatch) { - throw new DataNodeNotFoundException(dataspaceName, anchorName, targetXpath); - } - } - - @Override - @Timed(value = "cps.data.persistence.service.datanode.get", - description = "Time taken to get a data node") - public Collection getDataNodes(final String dataspaceName, final String anchorName, - final String xpath, - final FetchDescendantsOption fetchDescendantsOption) { - final String targetXpath = getNormalizedXpath(xpath); - final Collection dataNodes = getDataNodesForMultipleXpaths(dataspaceName, anchorName, - Collections.singletonList(targetXpath), fetchDescendantsOption); - if (dataNodes.isEmpty()) { - throw new DataNodeNotFoundException(dataspaceName, anchorName, xpath); - } - return dataNodes; - } - - @Override - @Timed(value = "cps.data.persistence.service.datanode.batch.get", - description = "Time taken to get data nodes") - public Collection getDataNodesForMultipleXpaths(final String dataspaceName, final String anchorName, - final Collection xpaths, - final FetchDescendantsOption fetchDescendantsOption) { - final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); - Collection fragmentEntities = getFragmentEntities(anchorEntity, xpaths); - fragmentEntities = fragmentRepository.prefetchDescendantsOfFragmentEntities(fetchDescendantsOption, - fragmentEntities); - return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); - } - - private List getChildDataNodes(final FragmentEntity fragmentEntity, - final FetchDescendantsOption fetchDescendantsOption) { - if (fetchDescendantsOption.hasNext()) { - return fragmentEntity.getChildFragments().stream() - .map(childFragmentEntity -> toDataNode(childFragmentEntity, fetchDescendantsOption.next())) - .collect(Collectors.toList()); - } - return Collections.emptyList(); - } - - private AnchorEntity getAnchorEntity(final String dataspaceName, final String anchorName) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - return anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); - } - - private List getAnchorIdsForPagination(final DataspaceEntity dataspaceEntity, final CpsPathQuery cpsPathQuery, - final PaginationOption paginationOption) { - return fragmentRepository.findAnchorIdsForPagination(dataspaceEntity, cpsPathQuery, paginationOption); - } - - private static String getNormalizedXpath(final String xpathSource) { - if (isRootXpath(xpathSource)) { - return xpathSource; - } - try { - return CpsPathUtil.getNormalizedXpath(xpathSource); - } catch (final PathParsingException pathParsingException) { - throw new CpsPathException(pathParsingException.getMessage()); - } - } - - private static Collection getNormalizedXpaths(final Collection xpaths) { - final Collection normalizedXpaths = new HashSet<>(xpaths.size()); - for (final String xpath : xpaths) { - try { - normalizedXpaths.add(getNormalizedXpath(xpath)); - } catch (final CpsPathException cpsPathException) { - log.warn("Error parsing xpath \"{}\": {}", xpath, cpsPathException.getMessage()); - } - } - return normalizedXpaths; - } - - private FragmentEntity getFragmentEntity(final AnchorEntity anchorEntity, final String xpath) { - final FragmentEntity fragmentEntity; - if (isRootXpath(xpath)) { - fragmentEntity = fragmentRepository.findOneByAnchorId(anchorEntity.getId()).orElse(null); - } else { - fragmentEntity = fragmentRepository.getByAnchorAndXpath(anchorEntity, getNormalizedXpath(xpath)); - } - if (fragmentEntity == null) { - throw new DataNodeNotFoundException(anchorEntity.getDataspace().getName(), anchorEntity.getName(), xpath); - } - return fragmentEntity; - } - - private Collection getFragmentEntities(final AnchorEntity anchorEntity, - final Collection xpaths) { - final Collection normalizedXpaths = getNormalizedXpaths(xpaths); - - final boolean haveRootXpath = normalizedXpaths.removeIf(CpsDataPersistenceServiceImpl::isRootXpath); - - final List fragmentEntities = fragmentRepository.findByAnchorAndXpathIn(anchorEntity, - normalizedXpaths); - - for (final FragmentEntity fragmentEntity : fragmentEntities) { - normalizedXpaths.remove(fragmentEntity.getXpath()); - } - - for (final String xpath : normalizedXpaths) { - if (!CpsPathUtil.isPathToListElement(xpath)) { - fragmentEntities.addAll(fragmentRepository.findListByAnchorAndXpath(anchorEntity, xpath)); - } - } - - if (haveRootXpath) { - fragmentEntities.addAll(fragmentRepository.findRootsByAnchorId(anchorEntity.getId())); - } - - return fragmentEntities; - } - - private static String getListElementXpathPrefix(final Collection newListElements) { - if (newListElements.isEmpty()) { - throw new CpsAdminException("Invalid list replacement", - "Cannot replace list elements with empty collection"); - } - final String firstChildNodeXpath = newListElements.iterator().next().getXpath(); - return firstChildNodeXpath.substring(0, firstChildNodeXpath.lastIndexOf('[') + 1); - } - - private FragmentEntity getFragmentForReplacement(final FragmentEntity parentEntity, - final DataNode newListElement, - final FragmentEntity existingListElementEntity) { - if (existingListElementEntity == null) { - return convertToFragmentWithAllDescendants(parentEntity.getAnchor(), newListElement); - } - if (newListElement.getChildDataNodes().isEmpty()) { - copyAttributesFromNewDataNode(existingListElementEntity, newListElement); - existingListElementEntity.getChildFragments().clear(); - } else { - updateFragmentEntityAndDescendantsWithDataNode(existingListElementEntity, newListElement); - } - return existingListElementEntity; - } - - private String getOrderedLeavesAsJson(final Map currentLeaves) { - final Map sortedLeaves = new TreeMap<>(String::compareTo); - sortedLeaves.putAll(currentLeaves); - return jsonObjectMapper.asJsonString(sortedLeaves); - } - - private String getOrderedLeavesAsJson(final String currentLeavesAsString) { - if (currentLeavesAsString == null) { - return "{}"; - } - final Map sortedLeaves = jsonObjectMapper.convertJsonString(currentLeavesAsString, - TreeMap.class); - return jsonObjectMapper.asJsonString(sortedLeaves); - } - - private static Map extractListElementFragmentEntitiesByXPath( - final Set childEntities, final String listElementXpathPrefix) { - return childEntities.stream() - .filter(fragmentEntity -> fragmentEntity.getXpath().startsWith(listElementXpathPrefix)) - .collect(Collectors.toMap(FragmentEntity::getXpath, fragmentEntity -> fragmentEntity)); - } - - private static Set processAncestorXpath(final Collection fragmentEntities, - final CpsPathQuery cpsPathQuery) { - final Set ancestorXpath = new HashSet<>(); - final Pattern pattern = - Pattern.compile("(.*/" + Pattern.quote(cpsPathQuery.getAncestorSchemaNodeIdentifier()) - + REG_EX_FOR_OPTIONAL_LIST_INDEX + "/.*"); - for (final FragmentEntity fragmentEntity : fragmentEntities) { - final Matcher matcher = pattern.matcher(fragmentEntity.getXpath()); - if (matcher.matches()) { - ancestorXpath.add(matcher.group(1)); - } - } - return ancestorXpath; - } - - private static boolean isRootXpath(final String xpath) { - return "/".equals(xpath) || "".equals(xpath); - } - - private static boolean isNewDataNode(final DataNode replacementDataNode, - final Map existingListElementsByXpath) { - return !existingListElementsByXpath.containsKey(replacementDataNode.getXpath()); - } - - private void copyAttributesFromNewDataNode(final FragmentEntity existingFragmentEntity, - final DataNode newDataNode) { - final String oldOrderedLeavesAsJson = getOrderedLeavesAsJson(existingFragmentEntity.getAttributes()); - final String newOrderedLeavesAsJson = getOrderedLeavesAsJson(newDataNode.getLeaves()); - if (!oldOrderedLeavesAsJson.equals(newOrderedLeavesAsJson)) { - existingFragmentEntity.setAttributes(jsonObjectMapper.asJsonString(newDataNode.getLeaves())); - } - } - - private String mergeLeaves(final Map updateLeaves, final String currentLeavesAsString) { - Map currentLeavesAsMap = new HashMap<>(); - if (currentLeavesAsString != null) { - currentLeavesAsMap = jsonObjectMapper.convertJsonString(currentLeavesAsString, Map.class); - currentLeavesAsMap.putAll(updateLeaves); - } - - if (currentLeavesAsMap.isEmpty()) { - return ""; - } - return jsonObjectMapper.asJsonString(currentLeavesAsMap); - } -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java deleted file mode 100755 index 2c4cc7486b..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java +++ /dev/null @@ -1,432 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020-2024 Nordix Foundation - * Modifications Copyright (C) 2020-2022 Bell Canada. - * Modifications Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2022 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.impl; - -import static com.google.common.base.Preconditions.checkNotNull; - -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableSet; -import jakarta.transaction.Transactional; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.codec.digest.DigestUtils; -import org.apache.commons.lang3.StringUtils; -import org.hibernate.exception.ConstraintViolationException; -import org.onap.cps.spi.CpsModulePersistenceService; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.entities.SchemaSetEntity; -import org.onap.cps.spi.entities.YangResourceEntity; -import org.onap.cps.spi.entities.YangResourceModuleReference; -import org.onap.cps.spi.exceptions.AlreadyDefinedException; -import org.onap.cps.spi.exceptions.DuplicatedYangResourceException; -import org.onap.cps.spi.exceptions.ModelValidationException; -import org.onap.cps.spi.model.ModuleDefinition; -import org.onap.cps.spi.model.ModuleReference; -import org.onap.cps.spi.model.SchemaSet; -import org.onap.cps.spi.repository.DataspaceRepository; -import org.onap.cps.spi.repository.ModuleReferenceRepository; -import org.onap.cps.spi.repository.SchemaSetRepository; -import org.onap.cps.spi.repository.YangResourceRepository; -import org.opendaylight.yangtools.yang.common.Revision; -import org.opendaylight.yangtools.yang.model.repo.api.RevisionSourceIdentifier; -import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource; -import org.opendaylight.yangtools.yang.parser.api.YangSyntaxErrorException; -import org.opendaylight.yangtools.yang.parser.rfc7950.repo.YangModelDependencyInfo; -import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.retry.RetryContext; -import org.springframework.retry.annotation.Backoff; -import org.springframework.retry.annotation.Retryable; -import org.springframework.retry.support.RetrySynchronizationManager; -import org.springframework.stereotype.Component; - -@Slf4j -@Component -@RequiredArgsConstructor -public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceService { - - private static final String YANG_RESOURCE_CHECKSUM_CONSTRAINT_NAME = "yang_resource_checksum_key"; - private static final String NO_MODULE_NAME_FILTER = null; - private static final String NO_MODULE_REVISION = null; - private static final Pattern CHECKSUM_EXCEPTION_PATTERN = Pattern.compile(".*\\(checksum\\)=\\((\\w+)\\).*"); - private static final Pattern RFC6020_RECOMMENDED_FILENAME_PATTERN = Pattern - .compile("([\\w-]+)@(\\d{4}-\\d{2}-\\d{2})(?:\\.yang)?", Pattern.CASE_INSENSITIVE); - - private final YangResourceRepository yangResourceRepository; - - private final SchemaSetRepository schemaSetRepository; - - private final DataspaceRepository dataspaceRepository; - - private final ModuleReferenceRepository moduleReferenceRepository; - - @Override - public Map getYangSchemaResources(final String dataspaceName, final String schemaSetName) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final SchemaSetEntity schemaSetEntity = - schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); - return schemaSetEntity.getYangResources().stream().collect( - Collectors.toMap(YangResourceEntity::getFileName, YangResourceEntity::getContent)); - } - - @Override - public Collection getYangResourceModuleReferences(final String dataspaceName) { - final Set yangResourceModuleReferenceList = - yangResourceRepository.findAllModuleReferencesByDataspace(dataspaceName); - return yangResourceModuleReferenceList.stream().map(CpsModulePersistenceServiceImpl::toModuleReference) - .collect(Collectors.toList()); - } - - @Override - public Collection getYangResourceModuleReferences(final String dataspaceName, - final String anchorName) { - final Set yangResourceModuleReferenceList = - yangResourceRepository - .findAllModuleReferencesByDataspaceAndAnchor(dataspaceName, anchorName); - return yangResourceModuleReferenceList.stream().map(CpsModulePersistenceServiceImpl::toModuleReference) - .collect(Collectors.toList()); - } - - @Override - public Collection getYangResourceDefinitions(final String dataspaceName, - final String anchorName) { - final Set yangResourceEntities = - yangResourceRepository.findAllModuleDefinitionsByDataspaceAndAnchorAndModule(dataspaceName, anchorName, - NO_MODULE_NAME_FILTER, NO_MODULE_REVISION); - return convertYangResourceEntityToModuleDefinition(yangResourceEntities); - } - - @Override - public Collection getYangResourceDefinitionsByAnchorAndModule(final String dataspaceName, - final String anchorName, - final String moduleName, - final String moduleRevision) { - final Set yangResourceEntities = - yangResourceRepository.findAllModuleDefinitionsByDataspaceAndAnchorAndModule(dataspaceName, anchorName, - moduleName, moduleRevision); - return convertYangResourceEntityToModuleDefinition(yangResourceEntities); - } - - private List convertYangResourceEntityToModuleDefinition(final Set - yangResourceEntities) { - final List resultModuleDefinitions = new ArrayList<>(yangResourceEntities.size()); - for (final YangResourceEntity yangResourceEntity: yangResourceEntities) { - resultModuleDefinitions.add(toModuleDefinition(yangResourceEntity)); - } - return resultModuleDefinitions; - } - - @Override - @Transactional - // A retry is made to store the schema set if it fails because of duplicated yang resource exception that - // can occur in case of specific concurrent requests. - @Retryable(retryFor = DuplicatedYangResourceException.class, maxAttempts = 5, backoff = - @Backoff(random = true, delay = 200, maxDelay = 2000, multiplier = 2)) - public void storeSchemaSet(final String dataspaceName, final String schemaSetName, - final Map moduleReferenceNameToContentMap) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final Set yangResourceEntities = synchronizeYangResources(moduleReferenceNameToContentMap); - final SchemaSetEntity schemaSetEntity = new SchemaSetEntity(); - schemaSetEntity.setName(schemaSetName); - schemaSetEntity.setDataspace(dataspaceEntity); - schemaSetEntity.setYangResources(yangResourceEntities); - try { - schemaSetRepository.save(schemaSetEntity); - } catch (final DataIntegrityViolationException e) { - throw AlreadyDefinedException.forSchemaSet(schemaSetName, dataspaceName, e); - } - } - - @Override - public Collection getSchemaSetsByDataspaceName(final String dataspaceName) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final List schemaSetEntities = schemaSetRepository.findByDataspace(dataspaceEntity); - return schemaSetEntities.stream() - .map(CpsModulePersistenceServiceImpl::toSchemaSet).collect(Collectors.toList()); - } - - @Override - @Transactional - // A retry is made to store the schema set if it fails because of duplicated yang resource exception that - // can occur in case of specific concurrent requests. - @Retryable(retryFor = DuplicatedYangResourceException.class, maxAttempts = 5, backoff = - @Backoff(random = true, delay = 200, maxDelay = 2000, multiplier = 2)) - public void storeSchemaSetFromModules(final String dataspaceName, final String schemaSetName, - final Map newModuleNameToContentMap, - final Collection allModuleReferences) { - storeSchemaSet(dataspaceName, schemaSetName, newModuleNameToContentMap); - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final SchemaSetEntity schemaSetEntity = - schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); - final List allYangResourceIds = - yangResourceRepository.getResourceIdsByModuleReferences(allModuleReferences); - yangResourceRepository.insertSchemaSetIdYangResourceId(schemaSetEntity.getId(), allYangResourceIds); - } - - @Override - @Transactional - public void deleteSchemaSet(final String dataspaceName, final String schemaSetName) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final SchemaSetEntity schemaSetEntity = - schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); - schemaSetRepository.delete(schemaSetEntity); - } - - @Override - @Transactional - public void deleteSchemaSets(final String dataspaceName, final Collection schemaSetNames) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - schemaSetRepository.deleteByDataspaceAndNameIn(dataspaceEntity, schemaSetNames); - } - - - @Override - @Transactional - public void updateSchemaSetFromModules(final String dataspaceName, final String schemaSetName, - final Map newModuleNameToContentMap, - final Collection allModuleReferences) { - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final SchemaSetEntity schemaSetEntity = - schemaSetRepository.getByDataspaceAndName(dataspaceEntity, schemaSetName); - storeAndLinkNewModules(newModuleNameToContentMap, schemaSetEntity); - updateAllModuleReferences(allModuleReferences, schemaSetEntity.getId()); - } - - - - @Override - @Transactional - public void deleteUnusedYangResourceModules() { - yangResourceRepository.deleteOrphans(); - } - - @Override - public Collection identifyNewModuleReferences( - final Collection moduleReferencesToCheck) { - return moduleReferenceRepository.identifyNewModuleReferences(moduleReferencesToCheck); - } - - @Override - public Collection getModuleReferencesByAttribute(final String dataspaceName, - final String anchorName, - final Map parentAttributes, - final Map childAttributes) { - return moduleReferenceRepository.findModuleReferences(dataspaceName, anchorName, parentAttributes, - childAttributes); - } - - private Set synchronizeYangResources( - final Map moduleReferenceNameToContentMap) { - final Map checksumToEntityMap = moduleReferenceNameToContentMap.entrySet().stream() - .map(entry -> { - final String checksum = DigestUtils.sha256Hex(entry.getValue().getBytes(StandardCharsets.UTF_8)); - final Map moduleNameAndRevisionMap = createModuleNameAndRevisionMap(entry.getKey(), - entry.getValue()); - final YangResourceEntity yangResourceEntity = new YangResourceEntity(); - yangResourceEntity.setFileName(entry.getKey()); - yangResourceEntity.setContent(entry.getValue()); - yangResourceEntity.setModuleName(moduleNameAndRevisionMap.get("moduleName")); - yangResourceEntity.setRevision(moduleNameAndRevisionMap.get("revision")); - yangResourceEntity.setChecksum(checksum); - return yangResourceEntity; - }) - .collect(Collectors.toMap( - YangResourceEntity::getChecksum, - entity -> entity - )); - - final List existingYangResourceEntities = - yangResourceRepository.findAllByChecksumIn(checksumToEntityMap.keySet()); - existingYangResourceEntities.forEach(yangFile -> checksumToEntityMap.remove(yangFile.getChecksum())); - - final Collection newYangResourceEntities = checksumToEntityMap.values(); - if (!newYangResourceEntities.isEmpty()) { - try { - yangResourceRepository.saveAll(newYangResourceEntities); - } catch (final DataIntegrityViolationException dataIntegrityViolationException) { - // Throw a CPS duplicated Yang resource exception if the cause of the error is a yang checksum - // database constraint violation. - // If it is not, then throw the original exception - final Optional convertedException = - convertToDuplicatedYangResourceException( - dataIntegrityViolationException, newYangResourceEntities); - convertedException.ifPresent( - e -> { - final RetryContext retryContext = RetrySynchronizationManager.getContext(); - int retryCount = retryContext == null ? 0 : retryContext.getRetryCount(); - log.warn("Cannot persist duplicated yang resource. System will attempt this method " - + "up to 5 times. Current retry count : {}", ++retryCount, e); - }); - throw convertedException.isPresent() ? convertedException.get() : dataIntegrityViolationException; - } - } - - return ImmutableSet.builder() - .addAll(existingYangResourceEntities) - .addAll(newYangResourceEntities) - .build(); - } - - private static Map createModuleNameAndRevisionMap(final String sourceName, final String source) { - final Map metaDataMap = new HashMap<>(); - final RevisionSourceIdentifier revisionSourceIdentifier = - createIdentifierFromSourceName(checkNotNull(sourceName)); - - final YangTextSchemaSource tempYangTextSchemaSource = new YangTextSchemaSource(revisionSourceIdentifier) { - @Override - public Optional getSymbolicName() { - return Optional.empty(); - } - - @Override - protected MoreObjects.ToStringHelper addToStringAttributes( - final MoreObjects.ToStringHelper toStringHelper) { - return toStringHelper; - } - - @Override - public InputStream openStream() { - return new ByteArrayInputStream(source.getBytes(StandardCharsets.UTF_8)); - } - }; - try { - final YangModelDependencyInfo yangModelDependencyInfo - = YangModelDependencyInfo.forYangText(tempYangTextSchemaSource); - metaDataMap.put("moduleName", yangModelDependencyInfo.getName()); - metaDataMap.put("revision", yangModelDependencyInfo.getFormattedRevision()); - } catch (final YangSyntaxErrorException | IOException e) { - throw new ModelValidationException("Yang resource is invalid.", - String.format("Yang syntax validation failed for resource %s:%n%s", sourceName, e.getMessage()), e); - } - return metaDataMap; - } - - private static RevisionSourceIdentifier createIdentifierFromSourceName(final String sourceName) { - final Matcher matcher = RFC6020_RECOMMENDED_FILENAME_PATTERN.matcher(sourceName); - if (matcher.matches()) { - return RevisionSourceIdentifier.create(matcher.group(1), Revision.of(matcher.group(2))); - } - return RevisionSourceIdentifier.create(sourceName); - } - - /** - * Convert the specified data integrity violation exception into a CPS duplicated Yang resource exception - * if the cause of the error is a yang checksum database constraint violation. - * - * @param originalException the original db exception. - * @param yangResourceEntities the collection of Yang resources involved in the db failure. - * @return an optional converted CPS duplicated Yang resource exception. The optional is empty if the original - * cause of the error is not a yang checksum database constraint violation. - */ - private Optional convertToDuplicatedYangResourceException( - final DataIntegrityViolationException originalException, - final Collection yangResourceEntities) { - - // The exception result - DuplicatedYangResourceException duplicatedYangResourceException = null; - - final Throwable cause = originalException.getCause(); - if (cause instanceof ConstraintViolationException) { - final ConstraintViolationException constraintException = (ConstraintViolationException) cause; - if (YANG_RESOURCE_CHECKSUM_CONSTRAINT_NAME.equals(constraintException.getConstraintName())) { - // Db constraint related to yang resource checksum uniqueness is not respected - final String checksumInError = getDuplicatedChecksumFromException(constraintException); - final String nameInError = getNameForChecksum(checksumInError, yangResourceEntities); - duplicatedYangResourceException = - new DuplicatedYangResourceException(nameInError, checksumInError, constraintException); - } - } - - return Optional.ofNullable(duplicatedYangResourceException); - - } - - private String getNameForChecksum(final String checksum, - final Collection yangResourceEntities) { - final Optional optionalFileName = yangResourceEntities.stream() - .filter(entity -> StringUtils.equals(checksum, (entity.getChecksum()))) - .findFirst() - .map(YangResourceEntity::getFileName); - return optionalFileName.orElse("no filename"); - } - - private String getDuplicatedChecksumFromException(final ConstraintViolationException exception) { - final Matcher matcher = CHECKSUM_EXCEPTION_PATTERN.matcher(exception.getSQLException().getMessage()); - if (matcher.find()) { - return matcher.group(1); - } - return "no checksum found"; - } - - private static ModuleReference toModuleReference( - final YangResourceModuleReference yangResourceModuleReference) { - return ModuleReference.builder() - .moduleName(yangResourceModuleReference.getModuleName()) - .revision(yangResourceModuleReference.getRevision()) - .build(); - } - - private static ModuleDefinition toModuleDefinition(final YangResourceEntity yangResourceEntity) { - return new ModuleDefinition( - yangResourceEntity.getModuleName(), - yangResourceEntity.getRevision(), - yangResourceEntity.getContent()); - } - - private static SchemaSet toSchemaSet(final SchemaSetEntity schemaSetEntity) { - return SchemaSet.builder().name(schemaSetEntity.getName()) - .dataspaceName(schemaSetEntity.getDataspace().getName()).build(); - } - - private void storeAndLinkNewModules(final Map newModuleNameToContentMap, - final SchemaSetEntity schemaSetEntity) { - final Set yangResourceEntities - = new HashSet<>(synchronizeYangResources(newModuleNameToContentMap)); - schemaSetEntity.setYangResources(yangResourceEntities); - schemaSetRepository.save(schemaSetEntity); - } - - private void updateAllModuleReferences(final Collection allModuleReferences, - final Integer schemaSetEntityId) { - yangResourceRepository.deleteSchemaSetYangResourceForSchemaSetId(schemaSetEntityId); - final List allYangResourceIds = - yangResourceRepository.getResourceIdsByModuleReferences(allModuleReferences); - yangResourceRepository.insertSchemaSetIdYangResourceId(schemaSetEntityId, allYangResourceIds); - } - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/utils/CpsValidatorImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/utils/CpsValidatorImpl.java deleted file mode 100644 index c727388b25..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/impl/utils/CpsValidatorImpl.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.impl.utils; - -import com.google.common.collect.Lists; -import java.util.Arrays; -import java.util.Collection; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.onap.cps.spi.PaginationOption; -import org.onap.cps.spi.exceptions.DataValidationException; -import org.onap.cps.spi.utils.CpsValidator; -import org.springframework.stereotype.Component; - -@Slf4j -@Component -@RequiredArgsConstructor -public class CpsValidatorImpl implements CpsValidator { - - private static final char[] UNSUPPORTED_NAME_CHARACTERS = "!\" #$%&'()*+,./\\:;<=>?@[]^`{|}~".toCharArray(); - - @Override - public void validateNameCharacters(final String... names) { - validateNameCharacters(Arrays.asList(names)); - } - - @Override - public void validateNameCharacters(final Iterable names) { - for (final String name : names) { - final Collection charactersOfName = Lists.charactersOf(name); - for (final char unsupportedCharacter : UNSUPPORTED_NAME_CHARACTERS) { - if (charactersOfName.contains(unsupportedCharacter)) { - throw new DataValidationException("Name or ID Validation Error.", - name + " invalid token encountered at position " - + (name.indexOf(unsupportedCharacter) + 1)); - } - } - } - } - - @Override - public void validatePaginationOption(final PaginationOption paginationOption) { - if (PaginationOption.NO_PAGINATION == paginationOption) { - return; - } - - if (!paginationOption.isValidPaginationOption()) { - throw new DataValidationException("Pagination validation error.", - "Invalid page index or size"); - } - } -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/AnchorRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/AnchorRepository.java deleted file mode 100755 index d78a016c2e..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/AnchorRepository.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2021-2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.util.Collection; -import java.util.Optional; -import org.onap.cps.spi.entities.AnchorEntity; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.entities.SchemaSetEntity; -import org.onap.cps.spi.exceptions.AnchorNotFoundException; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.data.jpa.repository.Modifying; -import org.springframework.data.jpa.repository.Query; -import org.springframework.data.repository.query.Param; -import org.springframework.stereotype.Repository; - -@Repository -public interface AnchorRepository extends JpaRepository { - - Optional findByDataspaceAndName(DataspaceEntity dataspaceEntity, String name); - - default AnchorEntity getByDataspaceAndName(DataspaceEntity dataspace, String anchorName) { - return findByDataspaceAndName(dataspace, anchorName) - .orElseThrow(() -> new AnchorNotFoundException(anchorName, dataspace.getName())); - } - - Collection findAllByDataspace(DataspaceEntity dataspaceEntity); - - Collection findAllBySchemaSet(SchemaSetEntity schemaSetEntity); - - @Query(value = "SELECT * FROM anchor WHERE dataspace_id = :dataspaceId AND name = ANY (:anchorNames)", - nativeQuery = true) - Collection findAllByDataspaceIdAndNameIn(@Param("dataspaceId") int dataspaceId, - @Param("anchorNames") String[] anchorNames); - - default Collection findAllByDataspaceAndNameIn(final DataspaceEntity dataspaceEntity, - final Collection anchorNames) { - return findAllByDataspaceIdAndNameIn(dataspaceEntity.getId(), anchorNames.toArray(new String[0])); - } - - @Query(value = "SELECT a.* FROM anchor a" - + " LEFT OUTER JOIN schema_set s ON a.schema_set_id = s.id" - + " WHERE a.dataspace_id = :dataspaceId AND s.name = ANY (:schemaSetNames)", - nativeQuery = true) - Collection findAllByDataspaceIdAndSchemaSetNameIn(@Param("dataspaceId") int dataspaceId, - @Param("schemaSetNames") String[] schemaSetNames); - - default Collection findAllByDataspaceAndSchemaSetNameIn(final DataspaceEntity dataspaceEntity, - final Collection schemaSetNames) { - return findAllByDataspaceIdAndSchemaSetNameIn(dataspaceEntity.getId(), schemaSetNames.toArray(new String[0])); - } - - Integer countByDataspace(DataspaceEntity dataspaceEntity); - - @Query(value = """ - SELECT - anchor.name - FROM - yang_resource - JOIN schema_set_yang_resources ON schema_set_yang_resources.yang_resource_id = yang_resource.id - JOIN schema_set ON schema_set.id = schema_set_yang_resources.schema_set_id - JOIN anchor ON anchor.schema_set_id = schema_set.id - WHERE - schema_set.dataspace_id = :dataspaceId - AND module_name = ANY ( :moduleNames ) - GROUP BY - anchor.id, - anchor.name, - anchor.dataspace_id, - anchor.schema_set_id - HAVING - COUNT(DISTINCT module_name) = :sizeOfModuleNames - """, nativeQuery = true) - Collection getAnchorNamesByDataspaceIdAndModuleNames(@Param("dataspaceId") int dataspaceId, - @Param("moduleNames") String[] moduleNames, - @Param("sizeOfModuleNames") int sizeOfModuleNames); - - default Collection getAnchorNamesByDataspaceIdAndModuleNames(final int dataspaceId, - final Collection moduleNames, - final int sizeOfModuleNames) { - final String[] moduleNamesArray = moduleNames.toArray(new String[0]); - return getAnchorNamesByDataspaceIdAndModuleNames(dataspaceId, moduleNamesArray, sizeOfModuleNames); - } - - @Modifying - @Query(value = "DELETE FROM anchor WHERE dataspace_id = :dataspaceId AND name = ANY (:anchorNames)", - nativeQuery = true) - void deleteAllByDataspaceIdAndNameIn(@Param("dataspaceId") int dataspaceId, - @Param("anchorNames") String[] anchorNames); - - default void deleteAllByDataspaceAndNameIn(final DataspaceEntity dataspaceEntity, - final Collection anchorNames) { - deleteAllByDataspaceIdAndNameIn(dataspaceEntity.getId(), anchorNames.toArray(new String[0])); - } - - @Modifying - @Query(value = "UPDATE anchor SET schema_set_id =:schemaSetId WHERE id = :anchorId ", nativeQuery = true) - void updateAnchorSchemaSetId(@Param("schemaSetId") int schemaSetId, @Param("anchorId") long anchorId); - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/DataspaceRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/DataspaceRepository.java deleted file mode 100755 index b1ce127c4a..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/DataspaceRepository.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020 Bell Canada. All rights reserved. - * Modifications Copyright (C) 2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.util.Optional; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.exceptions.DataspaceNotFoundException; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface DataspaceRepository extends JpaRepository { - - Optional findByName(String name); - - /** - * Get a dataspace by name. - * throws a DataspaceNotFoundException if it does not exist - * - * @param name the name of the dataspace - * @return the Dataspace found - */ - default DataspaceEntity getByName(final String name) { - return findByName(name).orElseThrow(() -> new DataspaceNotFoundException(name)); - } -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepository.java deleted file mode 100644 index 2460db869a..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepository.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.util.Collection; -import org.onap.cps.spi.FetchDescendantsOption; -import org.onap.cps.spi.entities.FragmentEntity; - -public interface FragmentPrefetchRepository { - Collection prefetchDescendantsOfFragmentEntities( - final FetchDescendantsOption fetchDescendantsOption, - final Collection proxiedFragmentEntities); -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java deleted file mode 100644 index c187f20ea9..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.sql.Connection; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.RequiredArgsConstructor; -import org.onap.cps.spi.FetchDescendantsOption; -import org.onap.cps.spi.entities.AnchorEntity; -import org.onap.cps.spi.entities.FragmentEntity; -import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.PreparedStatementSetter; -import org.springframework.jdbc.core.RowMapper; -import org.springframework.stereotype.Repository; - -@Repository -@RequiredArgsConstructor -public class FragmentPrefetchRepositoryImpl implements FragmentPrefetchRepository { - - private final JdbcTemplate jdbcTemplate; - - @Override - public Collection prefetchDescendantsOfFragmentEntities( - final FetchDescendantsOption fetchDescendantsOption, - final Collection proxiedFragmentEntities) { - - if (FetchDescendantsOption.OMIT_DESCENDANTS.equals(fetchDescendantsOption)) { - return proxiedFragmentEntities; - } - - final List fragmentEntityIds = proxiedFragmentEntities.stream() - .map(FragmentEntity::getId).collect(Collectors.toList()); - - final Map anchorEntityPerId = proxiedFragmentEntities.stream() - .map(FragmentEntity::getAnchor) - .collect(Collectors.toMap(AnchorEntity::getId, anchor -> anchor, (anchor1, anchor2) -> anchor1)); - - final int maxDepth = fetchDescendantsOption.equals(FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) - ? Integer.MAX_VALUE - : fetchDescendantsOption.getDepth(); - return findFragmentEntitiesWithDescendantsByIds(fragmentEntityIds, anchorEntityPerId, maxDepth); - } - - private Collection findFragmentEntitiesWithDescendantsByIds( - final Collection fragmentEntityIds, - final Map anchorEntityPerId, - final int maxDepth) { - final String sql - = "WITH RECURSIVE parent_search AS (" - + " SELECT id, 0 AS depth " - + " FROM fragment " - + " WHERE id = ANY (?) " - + " UNION " - + " SELECT child.id, depth + 1 " - + " FROM fragment child INNER JOIN parent_search parent ON child.parent_id = parent.id" - + " WHERE depth < ?" - + ") " - + "SELECT fragment.id, anchor_id AS anchorId, xpath, parent_id AS parentId, " - + " CAST(attributes AS TEXT) AS attributes " - + "FROM fragment INNER JOIN parent_search ON fragment.id = parent_search.id"; - - final PreparedStatementSetter preparedStatementSetter = preparedStatement -> { - final Connection connection = preparedStatement.getConnection(); - final java.sql.Array idArray = connection.createArrayOf("bigint", fragmentEntityIds.toArray()); - preparedStatement.setArray(1, idArray); - preparedStatement.setInt(2, maxDepth); - }; - - final RowMapper fragmentEntityRowMapper = (resultSet, rowNum) -> { - final FragmentEntity fragmentEntity = new FragmentEntity(); - fragmentEntity.setId(resultSet.getLong("id")); - fragmentEntity.setXpath(resultSet.getString("xpath")); - fragmentEntity.setParentId(resultSet.getObject("parentId", Long.class)); - fragmentEntity.setAttributes(resultSet.getString("attributes")); - fragmentEntity.setAnchor(anchorEntityPerId.get(resultSet.getLong("anchorId"))); - fragmentEntity.setChildFragments(new HashSet<>()); - return fragmentEntity; - }; - - final Map fragmentEntityPerId; - try (final Stream fragmentEntityStream = jdbcTemplate.queryForStream(sql, - preparedStatementSetter, fragmentEntityRowMapper)) { - fragmentEntityPerId = fragmentEntityStream.collect( - Collectors.toMap(FragmentEntity::getId, Function.identity())); - } - return reuniteChildrenWithTheirParents(fragmentEntityPerId); - } - - private static Collection reuniteChildrenWithTheirParents( - final Map fragmentEntityPerId) { - final Collection fragmentEntitiesWithoutParent = new HashSet<>(); - for (final FragmentEntity fragmentEntity : fragmentEntityPerId.values()) { - final FragmentEntity parentFragmentEntity = fragmentEntityPerId.get(fragmentEntity.getParentId()); - if (parentFragmentEntity == null) { - fragmentEntitiesWithoutParent.add(fragmentEntity); - } else { - parentFragmentEntity.getChildFragments().add(fragmentEntity); - } - } - return fragmentEntitiesWithoutParent; - } - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentQueryBuilder.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentQueryBuilder.java deleted file mode 100644 index eb61d56632..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentQueryBuilder.java +++ /dev/null @@ -1,270 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation - * Modifications Copyright (C) 2023 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import jakarta.persistence.EntityManager; -import jakarta.persistence.PersistenceContext; -import jakarta.persistence.Query; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Queue; -import lombok.RequiredArgsConstructor; -import org.onap.cps.cpspath.parser.CpsPathPrefixType; -import org.onap.cps.cpspath.parser.CpsPathQuery; -import org.onap.cps.spi.PaginationOption; -import org.onap.cps.spi.entities.AnchorEntity; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.entities.FragmentEntity; -import org.onap.cps.spi.exceptions.CpsPathException; -import org.onap.cps.spi.utils.EscapeUtils; -import org.springframework.stereotype.Component; - -@RequiredArgsConstructor -@Component -public class FragmentQueryBuilder { - - @PersistenceContext - private EntityManager entityManager; - - /** - * Create a sql query to retrieve by anchor(id) and cps path. - * - * @param anchorEntity the anchor - * @param cpsPathQuery the cps path query to be transformed into a sql query - * @return a executable query object - */ - public Query getQueryForAnchorAndCpsPath(final AnchorEntity anchorEntity, final CpsPathQuery cpsPathQuery) { - final StringBuilder sqlStringBuilder = new StringBuilder(); - final Map queryParameters = new HashMap<>(); - - sqlStringBuilder.append("SELECT fragment.* FROM fragment"); - addWhereClauseForAnchor(anchorEntity, sqlStringBuilder, queryParameters); - addNodeSearchConditions(cpsPathQuery, sqlStringBuilder, queryParameters, false); - - return getQuery(sqlStringBuilder.toString(), queryParameters, FragmentEntity.class); - } - - /** - * Create a sql query to retrieve by cps path. - * - * @param dataspaceEntity the dataspace - * @param cpsPathQuery the cps path query to be transformed into a sql query - * @return a executable query object - */ - public Query getQueryForDataspaceAndCpsPath(final DataspaceEntity dataspaceEntity, - final CpsPathQuery cpsPathQuery, - final List anchorIdsForPagination) { - final StringBuilder sqlStringBuilder = new StringBuilder(); - final Map queryParameters = new HashMap<>(); - - sqlStringBuilder.append("SELECT fragment.* FROM fragment"); - if (anchorIdsForPagination.isEmpty()) { - addWhereClauseForDataspace(dataspaceEntity, sqlStringBuilder, queryParameters); - } else { - addWhereClauseForAnchorIds(anchorIdsForPagination, sqlStringBuilder, queryParameters); - } - addNodeSearchConditions(cpsPathQuery, sqlStringBuilder, queryParameters, true); - - return getQuery(sqlStringBuilder.toString(), queryParameters, FragmentEntity.class); - } - - /** - * Get query for dataspace, cps path, page index and page size. - * @param dataspaceEntity data space entity - * @param cpsPathQuery cps path query - * @param paginationOption pagination option - * @return query for given dataspace, cps path and pagination parameters - */ - public Query getQueryForAnchorIdsForPagination(final DataspaceEntity dataspaceEntity, - final CpsPathQuery cpsPathQuery, - final PaginationOption paginationOption) { - final StringBuilder sqlStringBuilder = new StringBuilder(); - final Map queryParameters = new HashMap<>(); - - sqlStringBuilder.append("SELECT distinct(fragment.anchor_id) FROM fragment"); - addWhereClauseForDataspace(dataspaceEntity, sqlStringBuilder, queryParameters); - addNodeSearchConditions(cpsPathQuery, sqlStringBuilder, queryParameters, true); - sqlStringBuilder.append(" ORDER BY fragment.anchor_id"); - addPaginationCondition(sqlStringBuilder, queryParameters, paginationOption); - - return getQuery(sqlStringBuilder.toString(), queryParameters, Long.class); - } - - private Query getQuery(final String sql, final Map queryParameters, final Class returnType) { - final Query query = entityManager.createNativeQuery(sql, returnType); - setQueryParameters(query, queryParameters); - return query; - } - - private static void addWhereClauseForAnchor(final AnchorEntity anchorEntity, - final StringBuilder sqlStringBuilder, - final Map queryParameters) { - sqlStringBuilder.append(" WHERE anchor_id = :anchorId"); - queryParameters.put("anchorId", anchorEntity.getId()); - } - - private static void addWhereClauseForAnchorIds(final List anchorIdsForPagination, - final StringBuilder sqlStringBuilder, - final Map queryParameters) { - sqlStringBuilder.append(" WHERE anchor_id IN (:anchorIdsForPagination)"); - queryParameters.put("anchorIdsForPagination", anchorIdsForPagination); - } - - private static void addWhereClauseForDataspace(final DataspaceEntity dataspaceEntity, - final StringBuilder sqlStringBuilder, - final Map queryParameters) { - sqlStringBuilder.append(" JOIN anchor ON anchor.id = fragment.anchor_id WHERE dataspace_id = :dataspaceId"); - queryParameters.put("dataspaceId", dataspaceEntity.getId()); - } - - private static void addNodeSearchConditions(final CpsPathQuery cpsPathQuery, - final StringBuilder sqlStringBuilder, - final Map queryParameters, - final boolean acrossAnchors) { - addAbsoluteParentXpathSearchCondition(cpsPathQuery, sqlStringBuilder, queryParameters, acrossAnchors); - addXpathSearchCondition(cpsPathQuery, sqlStringBuilder, queryParameters); - addLeafConditions(cpsPathQuery, sqlStringBuilder); - addTextFunctionCondition(cpsPathQuery, sqlStringBuilder, queryParameters); - addContainsFunctionCondition(cpsPathQuery, sqlStringBuilder, queryParameters); - } - - private static void addXpathSearchCondition(final CpsPathQuery cpsPathQuery, - final StringBuilder sqlStringBuilder, - final Map queryParameters) { - sqlStringBuilder.append(" AND (xpath LIKE :escapedXpath OR " - + "(xpath LIKE :escapedXpath||'[@%]' AND xpath NOT LIKE :escapedXpath||'[@%]/%[@%]'))"); - if (CpsPathPrefixType.ABSOLUTE.equals(cpsPathQuery.getCpsPathPrefixType())) { - queryParameters.put("escapedXpath", EscapeUtils.escapeForSqlLike(cpsPathQuery.getXpathPrefix())); - } else { - queryParameters.put("escapedXpath", "%/" + EscapeUtils.escapeForSqlLike(cpsPathQuery.getDescendantName())); - } - } - - private static void addAbsoluteParentXpathSearchCondition(final CpsPathQuery cpsPathQuery, - final StringBuilder sqlStringBuilder, - final Map queryParameters, - final boolean acrossAnchors) { - if (CpsPathPrefixType.ABSOLUTE.equals(cpsPathQuery.getCpsPathPrefixType())) { - if (cpsPathQuery.getNormalizedParentPath().isEmpty()) { - sqlStringBuilder.append(" AND parent_id IS NULL"); - } else { - if (acrossAnchors) { - sqlStringBuilder.append(" AND parent_id IN (SELECT id FROM fragment WHERE xpath = :parentXpath)"); - } else { - sqlStringBuilder.append(" AND parent_id = (SELECT id FROM fragment WHERE xpath = :parentXpath" - + " AND anchor_id = :anchorId)"); - } - queryParameters.put("parentXpath", cpsPathQuery.getNormalizedParentPath()); - } - } - } - - private static void addPaginationCondition(final StringBuilder sqlStringBuilder, - final Map queryParameters, - final PaginationOption paginationOption) { - if (PaginationOption.NO_PAGINATION != paginationOption) { - final Integer offset = (paginationOption.getPageIndex() - 1) * paginationOption.getPageSize(); - sqlStringBuilder.append(" LIMIT :pageSize OFFSET :offset"); - queryParameters.put("pageSize", paginationOption.getPageSize()); - queryParameters.put("offset", offset); - } - } - - private static Integer getTextValueAsInt(final CpsPathQuery cpsPathQuery) { - try { - return Integer.parseInt(cpsPathQuery.getTextFunctionConditionValue()); - } catch (final NumberFormatException e) { - return null; - } - } - - private static void addLeafConditions(final CpsPathQuery cpsPathQuery, final StringBuilder sqlStringBuilder) { - if (cpsPathQuery.hasLeafConditions()) { - sqlStringBuilder.append(" AND ("); - final Queue booleanOperatorsQueue = new LinkedList<>(cpsPathQuery.getBooleanOperators()); - cpsPathQuery.getLeafConditions().forEach(leafCondition -> { - if (leafCondition.value() instanceof Integer) { - sqlStringBuilder.append("(attributes ->> '").append(leafCondition.name()).append("')\\:\\:int"); - sqlStringBuilder.append(leafCondition.operator()); - sqlStringBuilder.append(leafCondition.value()); - } else { - if ("=".equals(leafCondition.operator())) { - final String leafValueAsText = leafCondition.value().toString(); - sqlStringBuilder.append("attributes ->> '").append(leafCondition.name()).append("'"); - sqlStringBuilder.append(" = '"); - sqlStringBuilder.append(EscapeUtils.escapeForSqlStringLiteral(leafValueAsText)); - sqlStringBuilder.append("'"); - } else { - throw new CpsPathException(" can use only " + leafCondition.operator() + " with integer "); - } - } - if (!booleanOperatorsQueue.isEmpty()) { - sqlStringBuilder.append(" "); - sqlStringBuilder.append(booleanOperatorsQueue.poll()); - sqlStringBuilder.append(" "); - } - }); - sqlStringBuilder.append(")"); - } - } - - private static void addTextFunctionCondition(final CpsPathQuery cpsPathQuery, - final StringBuilder sqlStringBuilder, - final Map queryParameters) { - if (cpsPathQuery.hasTextFunctionCondition()) { - sqlStringBuilder.append(" AND ("); - sqlStringBuilder.append("attributes @> jsonb_build_object(:textLeafName, :textValue)"); - sqlStringBuilder - .append(" OR attributes @> jsonb_build_object(:textLeafName, json_build_array(:textValue))"); - queryParameters.put("textLeafName", cpsPathQuery.getTextFunctionConditionLeafName()); - queryParameters.put("textValue", cpsPathQuery.getTextFunctionConditionValue()); - final Integer textValueAsInt = getTextValueAsInt(cpsPathQuery); - if (textValueAsInt != null) { - sqlStringBuilder.append(" OR attributes @> jsonb_build_object(:textLeafName, :textValueAsInt)"); - sqlStringBuilder - .append(" OR attributes @> jsonb_build_object(:textLeafName, json_build_array(:textValueAsInt))"); - queryParameters.put("textValueAsInt", textValueAsInt); - } - sqlStringBuilder.append(")"); - } - } - - private static void addContainsFunctionCondition(final CpsPathQuery cpsPathQuery, - final StringBuilder sqlStringBuilder, - final Map queryParameters) { - if (cpsPathQuery.hasContainsFunctionCondition()) { - sqlStringBuilder.append(" AND attributes ->> :containsLeafName LIKE CONCAT('%',:containsValue,'%') "); - queryParameters.put("containsLeafName", cpsPathQuery.getContainsFunctionConditionLeafName()); - queryParameters.put("containsValue", - EscapeUtils.escapeForSqlLike(cpsPathQuery.getContainsFunctionConditionValue())); - } - } - - private static void setQueryParameters(final Query query, final Map queryParameters) { - for (final Map.Entry queryParameter : queryParameters.entrySet()) { - query.setParameter(queryParameter.getKey(), queryParameter.getValue()); - } - } - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java deleted file mode 100755 index 1a31d2b499..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2021-2023 Nordix Foundation. - * Modifications Copyright (C) 2020-2021 Bell Canada. - * Modifications Copyright (C) 2020-2021 Pantheon.tech. - * Modifications Copyright (C) 2023 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.util.Collection; -import java.util.List; -import java.util.Optional; -import org.onap.cps.spi.entities.AnchorEntity; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.entities.FragmentEntity; -import org.onap.cps.spi.exceptions.DataNodeNotFoundException; -import org.onap.cps.spi.utils.EscapeUtils; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.data.jpa.repository.Modifying; -import org.springframework.data.jpa.repository.Query; -import org.springframework.data.repository.query.Param; -import org.springframework.stereotype.Repository; - -@Repository -public interface FragmentRepository extends JpaRepository, FragmentRepositoryCpsPathQuery, - FragmentPrefetchRepository { - - Optional findByAnchorAndXpath(AnchorEntity anchorEntity, String xpath); - - default FragmentEntity getByAnchorAndXpath(final AnchorEntity anchorEntity, final String xpath) { - return findByAnchorAndXpath(anchorEntity, xpath).orElseThrow(() -> - new DataNodeNotFoundException(anchorEntity.getDataspace().getName(), anchorEntity.getName(), xpath)); - } - - @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths)", - nativeQuery = true) - List findByAnchorIdAndXpathIn(@Param("anchorId") long anchorId, - @Param("xpaths") String[] xpaths); - - default List findByAnchorAndXpathIn(final AnchorEntity anchorEntity, - final Collection xpaths) { - return findByAnchorIdAndXpathIn(anchorEntity.getId(), xpaths.toArray(new String[0])); - } - - @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId \n" - + "AND xpath LIKE :escapedXpath||'[@%]' AND xpath NOT LIKE :escapedXpath||'[@%]/%[@%]'", - nativeQuery = true) - List findListByAnchorIdAndEscapedXpath(@Param("anchorId") long anchorId, - @Param("escapedXpath") String escapedXpath); - - default List findListByAnchorAndXpath(final AnchorEntity anchorEntity, final String xpath) { - final String escapedXpath = EscapeUtils.escapeForSqlLike(xpath); - return findListByAnchorIdAndEscapedXpath(anchorEntity.getId(), escapedXpath); - } - - @Query(value = "SELECT fragment.* FROM fragment JOIN anchor ON anchor.id = fragment.anchor_id " - + "WHERE dataspace_id = :dataspaceId AND xpath = ANY (:xpaths)", nativeQuery = true) - List findByDataspaceIdAndXpathIn(@Param("dataspaceId") int dataspaceId, - @Param("xpaths") String[] xpaths); - - default List findByDataspaceAndXpathIn(final DataspaceEntity dataspaceEntity, - final Collection xpaths) { - return findByDataspaceIdAndXpathIn(dataspaceEntity.getId(), xpaths.toArray(new String[0])); - } - - @Query(value = "SELECT * FROM fragment WHERE anchor_id IN (:anchorIds)" - + " AND xpath = ANY (:xpaths)", nativeQuery = true) - List findByAnchorIdsAndXpathIn(@Param("anchorIds") Long[] anchorIds, - @Param("xpaths") String[] xpaths); - - @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId LIMIT 1", nativeQuery = true) - Optional findOneByAnchorId(@Param("anchorId") long anchorId); - - @Modifying - @Query(value = "DELETE FROM fragment WHERE anchor_id = ANY (:anchorIds)", nativeQuery = true) - void deleteByAnchorIdIn(@Param("anchorIds") long[] anchorIds); - - default void deleteByAnchorIn(final Collection anchorEntities) { - deleteByAnchorIdIn(anchorEntities.stream().map(AnchorEntity::getId).mapToLong(id -> id).toArray()); - } - - @Modifying - @Query(value = "DELETE FROM fragment WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths)", nativeQuery = true) - void deleteByAnchorIdAndXpaths(@Param("anchorId") long anchorId, @Param("xpaths") String[] xpaths); - - default void deleteByAnchorIdAndXpaths(final long anchorId, final Collection xpaths) { - deleteByAnchorIdAndXpaths(anchorId, xpaths.toArray(new String[0])); - } - - @Modifying - @Query(value = "DELETE FROM fragment f WHERE anchor_id = :anchorId AND xpath LIKE ANY (:xpathPatterns)", - nativeQuery = true) - void deleteByAnchorIdAndXpathLikeAny(@Param("anchorId") long anchorId, - @Param("xpathPatterns") String[] xpathPatterns); - - default void deleteListsByAnchorIdAndXpaths(long anchorId, Collection xpaths) { - deleteByAnchorIdAndXpathLikeAny(anchorId, - xpaths.stream().map(xpath -> EscapeUtils.escapeForSqlLike(xpath) + "[@%").toArray(String[]::new)); - } - - @Query(value = "SELECT xpath FROM fragment WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths)", - nativeQuery = true) - List findAllXpathByAnchorIdAndXpathIn(@Param("anchorId") long anchorId, - @Param("xpaths") String[] xpaths); - - default List findAllXpathByAnchorAndXpathIn(final AnchorEntity anchorEntity, - final Collection xpaths) { - return findAllXpathByAnchorIdAndXpathIn(anchorEntity.getId(), xpaths.toArray(new String[0])); - } - - @Query(value = "SELECT EXISTS(SELECT 1 FROM fragment WHERE anchor_id = :anchorId" - + " AND xpath LIKE :xpathPattern LIMIT 1)", nativeQuery = true) - boolean existsByAnchorIdAndParentXpathAndXpathLike(@Param("anchorId") long anchorId, - @Param("xpathPattern") String xpathPattern); - - default boolean existsByAnchorAndXpathStartsWith(final AnchorEntity anchorEntity, final String xpath) { - return existsByAnchorIdAndParentXpathAndXpathLike(anchorEntity.getId(), - EscapeUtils.escapeForSqlLike(xpath) + "%"); - } - - @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId AND parent_id IS NULL", nativeQuery = true) - List findRootsByAnchorId(@Param("anchorId") long anchorId); - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQuery.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQuery.java deleted file mode 100644 index 9c279618b0..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQuery.java +++ /dev/null @@ -1,40 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2021-2023 Nordix Foundation. - * Modifications Copyright (C) 2023 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.util.List; -import org.onap.cps.cpspath.parser.CpsPathQuery; -import org.onap.cps.spi.PaginationOption; -import org.onap.cps.spi.entities.AnchorEntity; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.entities.FragmentEntity; - -public interface FragmentRepositoryCpsPathQuery { - List findByAnchorAndCpsPath(AnchorEntity anchorEntity, CpsPathQuery cpsPathQuery); - - List findByDataspaceAndCpsPath(DataspaceEntity dataspaceEntity, - CpsPathQuery cpsPathQuery, List anchorIds); - - List findAnchorIdsForPagination(DataspaceEntity dataspaceEntity, CpsPathQuery cpsPathQuery, - PaginationOption paginationOption); - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java deleted file mode 100644 index 9c98f7f7d9..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java +++ /dev/null @@ -1,71 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2021-2024 Nordix Foundation. - * Modifications Copyright (C) 2023 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import jakarta.persistence.Query; -import jakarta.transaction.Transactional; -import java.util.List; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.onap.cps.cpspath.parser.CpsPathQuery; -import org.onap.cps.spi.PaginationOption; -import org.onap.cps.spi.entities.AnchorEntity; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.entities.FragmentEntity; - -@RequiredArgsConstructor -@Slf4j -public class FragmentRepositoryCpsPathQueryImpl implements FragmentRepositoryCpsPathQuery { - - private final FragmentQueryBuilder fragmentQueryBuilder; - - @Override - @Transactional - public List findByAnchorAndCpsPath(final AnchorEntity anchorEntity, - final CpsPathQuery cpsPathQuery) { - final Query query = fragmentQueryBuilder.getQueryForAnchorAndCpsPath(anchorEntity, cpsPathQuery); - final List fragmentEntities = query.getResultList(); - log.debug("Fetched {} fragment entities by anchor and cps path.", fragmentEntities.size()); - return fragmentEntities; - } - - @Override - @Transactional - public List findByDataspaceAndCpsPath(final DataspaceEntity dataspaceEntity, - final CpsPathQuery cpsPathQuery, final List anchorIds) { - final Query query = fragmentQueryBuilder.getQueryForDataspaceAndCpsPath( - dataspaceEntity, cpsPathQuery, anchorIds); - final List fragmentEntities = query.getResultList(); - log.debug("Fetched {} fragment entities by cps path across all anchors.", fragmentEntities.size()); - return fragmentEntities; - } - - @Override - @Transactional - public List findAnchorIdsForPagination(final DataspaceEntity dataspaceEntity, final CpsPathQuery cpsPathQuery, - final PaginationOption paginationOption) { - final Query query = fragmentQueryBuilder.getQueryForAnchorIdsForPagination( - dataspaceEntity, cpsPathQuery, paginationOption); - return query.getResultList(); - } - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java deleted file mode 100644 index 4082307384..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceQuery.java +++ /dev/null @@ -1,37 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2024 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.util.Collection; -import java.util.Map; -import org.onap.cps.spi.model.ModuleReference; - -/** - * This interface is used in conjunction with {@link ModuleReferenceRepository} to create native sql queries. - */ -public interface ModuleReferenceQuery { - - Collection identifyNewModuleReferences(final Collection moduleReferencesToCheck); - - Collection findModuleReferences(final String dataspaceName, final String anchorName, - final Map parentAttributes, - final Map childAttributes); -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java deleted file mode 100644 index 15ffa372f9..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepository.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import org.onap.cps.spi.entities.YangResourceEntity; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface ModuleReferenceRepository extends JpaRepository, ModuleReferenceQuery {} - diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java deleted file mode 100644 index 6cc8234c90..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/ModuleReferenceRepositoryImpl.java +++ /dev/null @@ -1,179 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import jakarta.persistence.EntityManager; -import jakarta.persistence.PersistenceContext; -import jakarta.persistence.Query; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.RequiredArgsConstructor; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; -import org.onap.cps.spi.model.ModuleReference; -import org.springframework.transaction.annotation.Transactional; - -@Slf4j -@Transactional -@RequiredArgsConstructor -public class ModuleReferenceRepositoryImpl implements ModuleReferenceQuery { - - @PersistenceContext - private EntityManager entityManager; - - private final TempTableCreator tempTableCreator; - - @Override - @SneakyThrows - public Collection identifyNewModuleReferences( - final Collection moduleReferencesToCheck) { - - if (moduleReferencesToCheck == null || moduleReferencesToCheck.isEmpty()) { - return Collections.emptyList(); - } - - final Collection> sqlData = new HashSet<>(moduleReferencesToCheck.size()); - for (final ModuleReference moduleReference : moduleReferencesToCheck) { - final List row = new ArrayList<>(2); - row.add(moduleReference.getModuleName()); - row.add(moduleReference.getRevision()); - sqlData.add(row); - } - - final String tempTableName = tempTableCreator.createTemporaryTable( - "moduleReferencesToCheckTemp", sqlData, "module_name", "revision"); - - return identifyNewModuleReferencesForCmHandle(tempTableName); - } - - /** - * Finds module references based on specified dataspace, anchor, and attribute filters. - * This method constructs and executes a SQL query to retrieve module references. The query applies filters to - * parent and child fragments using the provided attribute maps. The `parentAttributes` are used to filter - * parent fragments, while `childAttributes` filter child fragments. - * - * @param dataspaceName the name of the dataspace to filter on. - * @param anchorName the name of the anchor to filter on. - * @param parentAttributes a map of attributes for filtering parent fragments. - * @param childAttributes a map of attributes for filtering child fragments. - * @return a collection of {@link ModuleReference} objects that match the specified filters. - */ - @Transactional - @SuppressWarnings("unchecked") - @Override - public Collection findModuleReferences(final String dataspaceName, final String anchorName, - final Map parentAttributes, - final Map childAttributes) { - - final String parentFragmentWhereClause = buildWhereClause(childAttributes, "parentFragment"); - final String childFragmentWhereClause = buildWhereClause(parentAttributes, "childFragment"); - - final String moduleReferencesSqlQuery = buildModuleReferencesSqlQuery(parentFragmentWhereClause, - childFragmentWhereClause); - - final Query query = entityManager.createNativeQuery(moduleReferencesSqlQuery); - setQueryParameters(query, parentAttributes, childAttributes, anchorName, dataspaceName); - return processQueryResults(query.getResultList()); - } - - private String buildWhereClause(final Map attributes, final String alias) { - return attributes.keySet().stream() - .map(attributeName -> String.format("%s.attributes->>'%s' = ?", alias, attributeName)) - .collect(Collectors.joining(" AND ")); - } - - private void setQueryParameters(final Query query, final Map parentAttributes, - final Map childAttributes, final String anchorName, - final String dataspaceName) { - final String childAttributeValue = childAttributes.entrySet().iterator().next().getValue(); - query.setParameter(1, childAttributeValue); - - final String parentAttributeValue = parentAttributes.entrySet().iterator().next().getValue(); - query.setParameter(2, parentAttributeValue); - - query.setParameter(3, anchorName); - query.setParameter(4, dataspaceName); - } - - private String buildModuleReferencesSqlQuery(final String parentFragmentClause, final String childFragmentClause) { - return """ - WITH Fragment AS ( - SELECT childFragment.attributes->>'id' AS schema_set_name - FROM fragment parentFragment - JOIN fragment childFragment ON parentFragment.parent_id = childFragment.id - JOIN anchor anchorInfo ON parentFragment.anchor_id = anchorInfo.id - JOIN dataspace dataspaceInfo ON anchorInfo.dataspace_id = dataspaceInfo.id - WHERE %s - AND %s - AND anchorInfo.name = ? - AND dataspaceInfo.name = ? - LIMIT 1 - ), - SchemaSet AS ( - SELECT id - FROM schema_set - WHERE name = (SELECT schema_set_name FROM Fragment) - ) - SELECT yangResource.module_name, yangResource.revision - FROM yang_resource yangResource - JOIN schema_set_yang_resources schemaSetYangResources - ON yangResource.id = schemaSetYangResources.yang_resource_id - WHERE schemaSetYangResources.schema_set_id = (SELECT id FROM SchemaSet); - """.formatted(parentFragmentClause, childFragmentClause); - } - - private Collection processQueryResults(final List queryResults) { - if (queryResults.isEmpty()) { - log.info("No module references found for the provided attributes."); - return Collections.emptyList(); - } - return queryResults.stream() - .map(queryResult -> { - final String name = (String) queryResult[0]; - final String revision = (String) queryResult[1]; - return new ModuleReference(name, revision); - }) - .collect(Collectors.toList()); - } - - private Collection identifyNewModuleReferencesForCmHandle(final String tempTableName) { - final String sql = String.format( - "SELECT %1$s.module_name, %1$s.revision" - + " FROM %1$s LEFT JOIN yang_resource" - + " ON yang_resource.module_name=%1$s.module_name" - + " AND yang_resource.revision=%1$s.revision" - + " WHERE yang_resource.module_name IS NULL;", tempTableName); - - @SuppressWarnings("unchecked") - final List resultsAsObjects = entityManager.createNativeQuery(sql).getResultList(); - - final List resultsAsModuleReferences = new ArrayList<>(resultsAsObjects.size()); - for (final Object[] row : resultsAsObjects) { - resultsAsModuleReferences.add(new ModuleReference((String) row[0], (String) row[1])); - } - return resultsAsModuleReferences; - } -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java deleted file mode 100644 index 3c5f973cb0..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020 Pantheon.tech - * Modifications Copyright (C) 2022 TechMahindra Ltd. - * Modifications Copyright (C) 2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.util.Collection; -import java.util.List; -import java.util.Optional; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.entities.SchemaSetEntity; -import org.onap.cps.spi.exceptions.SchemaSetNotFoundException; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.data.jpa.repository.Modifying; -import org.springframework.data.jpa.repository.Query; -import org.springframework.data.repository.query.Param; -import org.springframework.stereotype.Repository; - -@Repository -public interface SchemaSetRepository extends JpaRepository { - - Optional findByDataspaceAndName(DataspaceEntity dataspaceEntity, String schemaSetName); - - /** - * Gets schema sets by dataspace. - * @param dataspaceEntity dataspace entity - * @return list of schema set entity - */ - List findByDataspace(DataspaceEntity dataspaceEntity); - - Integer countByDataspace(DataspaceEntity dataspaceEntity); - - /** - * Gets a schema set by dataspace and schema set name. - * - * @param dataspaceEntity dataspace entity - * @param schemaSetName schema set name - * @return schema set entity - * @throws SchemaSetNotFoundException if SchemaSet not found - */ - default SchemaSetEntity getByDataspaceAndName(final DataspaceEntity dataspaceEntity, final String schemaSetName) { - return findByDataspaceAndName(dataspaceEntity, schemaSetName) - .orElseThrow(() -> new SchemaSetNotFoundException(dataspaceEntity.getName(), schemaSetName)); - } - - @Modifying - @Query(value = "DELETE FROM schema_set WHERE dataspace_id = :dataspaceId AND name = ANY (:schemaSetNames)", - nativeQuery = true) - void deleteByDataspaceIdAndNameIn(@Param("dataspaceId") final int dataspaceId, - @Param("schemaSetNames") final String[] schemaSetNames); - - /** - * Delete multiple schema sets in a given dataspace. - * @param dataspaceEntity dataspace entity - * @param schemaSetNames schema set names - */ - default void deleteByDataspaceAndNameIn(final DataspaceEntity dataspaceEntity, - final Collection schemaSetNames) { - deleteByDataspaceIdAndNameIn(dataspaceEntity.getId(), schemaSetNames.toArray(new String[0])); - } - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetYangResourceRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetYangResourceRepository.java deleted file mode 100644 index aacebd63b3..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetYangResourceRepository.java +++ /dev/null @@ -1,29 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2021-2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.util.List; - -public interface SchemaSetYangResourceRepository { - - void insertSchemaSetIdYangResourceId(final Integer schemaSetId, final List yangResourceIds); - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetYangResourceRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetYangResourceRepositoryImpl.java deleted file mode 100644 index c786a62d0c..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetYangResourceRepositoryImpl.java +++ /dev/null @@ -1,59 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2021-2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import jakarta.persistence.EntityManager; -import jakarta.persistence.PersistenceContext; -import java.sql.PreparedStatement; -import java.util.List; -import org.hibernate.Session; -import org.springframework.transaction.annotation.Transactional; - - -@Transactional -public class SchemaSetYangResourceRepositoryImpl implements SchemaSetYangResourceRepository { - - private static final int MAX_INSERT_BATCH_SIZE = 100; - - @PersistenceContext - private EntityManager entityManager; - - @Override - public void insertSchemaSetIdYangResourceId(final Integer schemaSetId, final List yangResourceIds) { - final Session session = entityManager.unwrap(Session.class); - session.doWork(connection -> { - try (PreparedStatement preparedStatement = connection.prepareStatement( - "INSERT INTO SCHEMA_SET_YANG_RESOURCES (SCHEMA_SET_ID, YANG_RESOURCE_ID) VALUES ( ?, ?)")) { - int sqlQueryCount = 1; - for (final int yangResourceId : yangResourceIds) { - preparedStatement.setInt(1, schemaSetId); - preparedStatement.setInt(2, yangResourceId); - preparedStatement.addBatch(); - if (sqlQueryCount % MAX_INSERT_BATCH_SIZE == 0 || sqlQueryCount == yangResourceIds.size()) { - preparedStatement.executeBatch(); - } - sqlQueryCount++; - } - } - }); - } -} - diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java deleted file mode 100644 index 5804b2dc5d..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java +++ /dev/null @@ -1,102 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import jakarta.persistence.EntityManager; -import jakarta.persistence.PersistenceContext; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.UUID; -import java.util.stream.Collectors; -import lombok.AllArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.onap.cps.spi.utils.EscapeUtils; -import org.springframework.stereotype.Component; -import org.springframework.transaction.annotation.Transactional; - -@Slf4j -@Transactional -@AllArgsConstructor -@Component -public class TempTableCreator { - - @PersistenceContext - private EntityManager entityManager; - - /** - * Create a uniquely named temporary table. - * - * @param prefix prefix for the table name (so you can recognize it) - * @param sqlData data to insert (strings only) the inner List present a row of data - * @param columnNames column names (in same order as data in rows in sqlData) - * @return a unique temporary table name with given prefix - */ - public String createTemporaryTable(final String prefix, - final Collection> sqlData, - final String... columnNames) { - final String tempTableName = prefix + UUID.randomUUID().toString().replace("-", ""); - final StringBuilder sqlStringBuilder = new StringBuilder("CREATE TEMPORARY TABLE "); - sqlStringBuilder.append(tempTableName); - defineColumns(sqlStringBuilder, columnNames); - sqlStringBuilder.append(" ON COMMIT DROP;"); - insertData(sqlStringBuilder, tempTableName, columnNames, sqlData); - entityManager.createNativeQuery(sqlStringBuilder.toString()).executeUpdate(); - return tempTableName; - } - - private static void defineColumns(final StringBuilder sqlStringBuilder, final String[] columnNames) { - sqlStringBuilder.append('('); - final Iterator it = Arrays.stream(columnNames).iterator(); - while (it.hasNext()) { - final String columnName = it.next(); - sqlStringBuilder.append(" "); - sqlStringBuilder.append(columnName); - sqlStringBuilder.append(" varchar NOT NULL"); - if (it.hasNext()) { - sqlStringBuilder.append(","); - } - } - sqlStringBuilder.append(")"); - } - - private static void insertData(final StringBuilder sqlStringBuilder, - final String tempTableName, - final String[] columnNames, - final Collection> sqlData) { - final Collection sqlInserts = new HashSet<>(sqlData.size()); - for (final Collection rowValues : sqlData) { - final Collection escapedValues = - rowValues.stream().map(EscapeUtils::escapeForSqlStringLiteral).collect(Collectors.toList()); - sqlInserts.add("('" + String.join("','", escapedValues) + "')"); - } - sqlStringBuilder.append("INSERT INTO "); - sqlStringBuilder.append(tempTableName); - sqlStringBuilder.append(" ("); - sqlStringBuilder.append(String.join(",", columnNames)); - sqlStringBuilder.append(") VALUES "); - sqlStringBuilder.append(String.join(",", sqlInserts)); - sqlStringBuilder.append(";"); - } - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceNativeRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceNativeRepository.java deleted file mode 100644 index 9ae32b3e78..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceNativeRepository.java +++ /dev/null @@ -1,31 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.util.Collection; -import java.util.List; -import org.onap.cps.spi.model.ModuleReference; - -public interface YangResourceNativeRepository { - - List getResourceIdsByModuleReferences(Collection moduleReferences); - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceNativeRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceNativeRepositoryImpl.java deleted file mode 100644 index c84ff427e6..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceNativeRepositoryImpl.java +++ /dev/null @@ -1,69 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import jakarta.persistence.EntityManager; -import jakarta.persistence.PersistenceContext; -import jakarta.persistence.Query; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.StringJoiner; -import lombok.extern.slf4j.Slf4j; -import org.hibernate.type.StandardBasicTypes; -import org.onap.cps.spi.model.ModuleReference; -import org.springframework.stereotype.Repository; -import org.springframework.transaction.annotation.Transactional; - -@Slf4j -@Repository -public class YangResourceNativeRepositoryImpl implements YangResourceNativeRepository { - - @PersistenceContext - private EntityManager entityManager; - - @Override - @Transactional - public List getResourceIdsByModuleReferences(final Collection moduleReferences) { - if (moduleReferences.isEmpty()) { - return Collections.emptyList(); - } - final Query query = entityManager.createNativeQuery(getCombinedSelectSqlQuery(moduleReferences)) - .unwrap(org.hibernate.query.NativeQuery.class) - .addScalar("id", StandardBasicTypes.INTEGER); - final List yangResourceIds = query.getResultList(); - if (yangResourceIds.size() != moduleReferences.size()) { - log.warn("ModuleReferences size : {} and QueryResult size : {}", moduleReferences.size(), - yangResourceIds.size()); - } - return yangResourceIds; - } - - private String getCombinedSelectSqlQuery(final Collection moduleReferences) { - final StringJoiner sqlQueryJoiner = new StringJoiner(" UNION ALL "); - moduleReferences.forEach(moduleReference -> - sqlQueryJoiner.add(String.format("SELECT id FROM yang_resource WHERE module_name='%s' and revision='%s'", - moduleReference.getModuleName(), - moduleReference.getRevision())) - ); - return sqlQueryJoiner.toString(); - } -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceRepository.java deleted file mode 100644 index 8be0d9a33b..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceRepository.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020 Pantheon.tech - * Modifications Copyright (C) 2021-2024 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.repository; - -import java.util.Collection; -import java.util.List; -import java.util.Set; -import org.onap.cps.spi.entities.YangResourceEntity; -import org.onap.cps.spi.entities.YangResourceModuleReference; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.data.jpa.repository.Modifying; -import org.springframework.data.jpa.repository.Query; -import org.springframework.data.repository.query.Param; -import org.springframework.stereotype.Repository; - -@Repository -public interface YangResourceRepository extends JpaRepository, - YangResourceNativeRepository, SchemaSetYangResourceRepository { - - List findAllByChecksumIn(String[] checksums); - - default List findAllByChecksumIn(final Collection checksums) { - return findAllByChecksumIn(checksums.toArray(new String[0])); - } - - @Query(value = """ - SELECT DISTINCT - yang_resource.module_name AS module_name, - yang_resource.revision AS revision - FROM - dataspace - JOIN schema_set ON schema_set.dataspace_id = dataspace.id - JOIN schema_set_yang_resources ON schema_set_yang_resources.schema_set_id = schema_set.id - JOIN yang_resource ON yang_resource.id = schema_set_yang_resources.yang_resource_id - WHERE - dataspace.name = :dataspaceName - """, nativeQuery = true) - Set findAllModuleReferencesByDataspace(@Param("dataspaceName") String dataspaceName); - - @Query(value = """ - SELECT DISTINCT - yang_resource.module_name AS module_name, - yang_resource.revision AS revision - FROM - dataspace - JOIN anchor ON anchor.dataspace_id = dataspace.id - JOIN schema_set ON schema_set.id = anchor.schema_set_id - JOIN schema_set_yang_resources ON schema_set_yang_resources.schema_set_id = schema_set.id - JOIN yang_resource ON yang_resource.id = schema_set_yang_resources.yang_resource_id - WHERE - dataspace.name = :dataspaceName - AND anchor.name = :anchorName - """, nativeQuery = true) - Set findAllModuleReferencesByDataspaceAndAnchor( - @Param("dataspaceName") String dataspaceName, @Param("anchorName") String anchorName); - - @Query(value = """ - SELECT DISTINCT - yang_resource.* - FROM - dataspace - JOIN anchor ON anchor.dataspace_id = dataspace.id - JOIN schema_set ON schema_set.id = anchor.schema_set_id - JOIN schema_set_yang_resources ON schema_set_yang_resources.schema_set_id = schema_set.id - JOIN yang_resource ON yang_resource.id = schema_set_yang_resources.yang_resource_id - WHERE - dataspace.name = :dataspaceName - AND anchor.name = :anchorName - AND (:moduleName IS NULL OR yang_resource.module_name = :moduleName) - AND (:revision IS NULL OR yang_resource.revision = :revision) - """, nativeQuery = true) - Set findAllModuleDefinitionsByDataspaceAndAnchorAndModule( - @Param("dataspaceName") String dataspaceName, @Param("anchorName") String anchorName, - @Param("moduleName") String moduleName, @Param("revision") String revision); - - @Modifying - @Query(value = "DELETE FROM schema_set_yang_resources WHERE schema_set_id = :schemaSetId", nativeQuery = true) - void deleteSchemaSetYangResourceForSchemaSetId(@Param("schemaSetId") int schemaSetId); - - @Modifying - @Query(value = "DELETE FROM yang_resource yr WHERE NOT EXISTS " - + "(SELECT 1 FROM schema_set_yang_resources ssyr WHERE ssyr.yang_resource_id = yr.id)", nativeQuery = true) - void deleteOrphans(); -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/utils/EscapeUtils.java b/cps-ri/src/main/java/org/onap/cps/spi/utils/EscapeUtils.java deleted file mode 100644 index 2b61d39503..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/utils/EscapeUtils.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.utils; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; - -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public class EscapeUtils { - - public static String escapeForSqlLike(final String value) { - return value.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_"); - } - - public static String escapeForSqlStringLiteral(final String value) { - return value.replace("'", "''"); - } - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/utils/SessionManager.java b/cps-ri/src/main/java/org/onap/cps/spi/utils/SessionManager.java deleted file mode 100644 index 6150bf9dbe..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/utils/SessionManager.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.utils; - -import com.google.common.util.concurrent.TimeLimiter; -import com.google.common.util.concurrent.UncheckedExecutionException; -import jakarta.annotation.PostConstruct; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import lombok.RequiredArgsConstructor; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; -import org.hibernate.HibernateException; -import org.hibernate.LockMode; -import org.hibernate.Session; -import org.onap.cps.spi.config.CpsSessionFactory; -import org.onap.cps.spi.entities.AnchorEntity; -import org.onap.cps.spi.entities.DataspaceEntity; -import org.onap.cps.spi.exceptions.SessionManagerException; -import org.onap.cps.spi.exceptions.SessionTimeoutException; -import org.onap.cps.spi.repository.AnchorRepository; -import org.onap.cps.spi.repository.DataspaceRepository; -import org.springframework.beans.factory.config.ConfigurableBeanFactory; -import org.springframework.context.annotation.Scope; -import org.springframework.stereotype.Component; - -@RequiredArgsConstructor -@Slf4j -@Component -@Scope(ConfigurableBeanFactory.SCOPE_SINGLETON) -public class SessionManager { - - private final CpsSessionFactory cpsSessionFactory; - private final TimeLimiterProvider timeLimiterProvider; - private final DataspaceRepository dataspaceRepository; - private final AnchorRepository anchorRepository; - private final ConcurrentHashMap sessionMap = new ConcurrentHashMap<>(); - public static final boolean WITH_COMMIT = true; - public static final boolean WITH_ROLLBACK = false; - - @PostConstruct - private void postConstruct() { - final Thread shutdownHook = new Thread(this::closeAllSessionsInShutdown); - Runtime.getRuntime().addShutdownHook(shutdownHook); - } - - private void closeAllSessionsInShutdown() { - for (final String sessionId : sessionMap.keySet()) { - try { - closeSession(sessionId, WITH_ROLLBACK); - log.info("Session with session ID {} rolled back and closed", sessionId); - } catch (final Exception e) { - log.warn("Session with session ID {} failed to close", sessionId); - } - } - cpsSessionFactory.closeSessionFactory(); - } - - /** - * Starts a session which allows use of locks and batch interaction with the persistence service. - * - * @return Session ID string - */ - public String startSession() { - final Session session = cpsSessionFactory.openSession(); - final String sessionId = UUID.randomUUID().toString(); - sessionMap.put(sessionId, session); - session.beginTransaction(); - return sessionId; - } - - /** - * Close session. - * Changes are committed when commit boolean is set to true. - * Rollback will execute when commit boolean is set to false. - * - * @param sessionId session ID - * @param commit indicator whether session will commit or rollback - */ - public void closeSession(final String sessionId, final boolean commit) { - try { - final Session session = getSession(sessionId); - if (commit) { - session.getTransaction().commit(); - } else { - session.getTransaction().rollback(); - } - session.close(); - } catch (final HibernateException e) { - throw new SessionManagerException("Cannot close session", - String.format("Unable to close session with session ID '%s'", sessionId), e); - } finally { - sessionMap.remove(sessionId); - } - } - - /** - * Lock Anchor. - * To release locks(s), the session holding the lock(s) must be closed. - * - * @param sessionId session ID - * @param dataspaceName dataspace name - * @param anchorName anchor name - * @param timeoutInMilliseconds lock attempt timeout in milliseconds - */ - @SneakyThrows - public void lockAnchor(final String sessionId, final String dataspaceName, - final String anchorName, final Long timeoutInMilliseconds) { - final ExecutorService executorService = Executors.newSingleThreadExecutor(); - final TimeLimiter timeLimiter = timeLimiterProvider.getTimeLimiter(executorService); - - try { - timeLimiter.callWithTimeout(() -> { - applyPessimisticWriteLockOnAnchor(sessionId, dataspaceName, anchorName); - return null; - }, timeoutInMilliseconds, TimeUnit.MILLISECONDS); - } catch (final TimeoutException e) { - throw new SessionTimeoutException( - "Timeout: Anchor locking failed", - "The error could be caused by another session holding a lock on the specified table. " - + "Retrying the sending the request could be required.", e); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - throw new SessionManagerException("Operation interrupted", "This thread was interrupted.", e); - } catch (final ExecutionException | UncheckedExecutionException e) { - if (e.getCause() != null) { - throw e.getCause(); - } - throw new SessionManagerException( - "Operation Aborted", - "The transaction request was aborted. " - + "Retrying and checking all details are correct could be required", e); - } finally { - executorService.shutdownNow(); - } - } - - private void applyPessimisticWriteLockOnAnchor(final String sessionId, final String dataspaceName, - final String anchorName) { - final Session session = getSession(sessionId); - final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); - final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); - final long anchorId = anchorEntity.getId(); - log.debug("Attempting to lock anchor {} for session {}", anchorName, sessionId); - session.get(AnchorEntity.class, anchorId, LockMode.PESSIMISTIC_WRITE); - log.info("Anchor {} successfully locked", anchorName); - } - - private Session getSession(final String sessionId) { - final Session session = sessionMap.get(sessionId); - if (session == null) { - throw new SessionManagerException("Session not found", - String.format("Session with ID %s does not exist", sessionId)); - } - return session; - } - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/utils/TimeLimiterProvider.java b/cps-ri/src/main/java/org/onap/cps/spi/utils/TimeLimiterProvider.java deleted file mode 100644 index 2bd7ac3763..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/utils/TimeLimiterProvider.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.utils; - -import com.google.common.util.concurrent.SimpleTimeLimiter; -import com.google.common.util.concurrent.TimeLimiter; -import java.util.concurrent.ExecutorService; -import org.springframework.stereotype.Component; - -@Component -public class TimeLimiterProvider { - public TimeLimiter getTimeLimiter(final ExecutorService executorService) { - return SimpleTimeLimiter.create(executorService); - } -} diff --git a/cps-ri/src/test/groovy/org/onap/cps/ri/CpsDataPersistenceServiceImplSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsDataPersistenceServiceImplSpec.groovy new file mode 100644 index 0000000000..36bf55e2db --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsDataPersistenceServiceImplSpec.groovy @@ -0,0 +1,281 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (c) 2021 Bell Canada. + * Modifications Copyright (C) 2021-2023 Nordix Foundation + * Modifications Copyright (C) 2022-2023 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= +*/ + +package org.onap.cps.ri + +import com.fasterxml.jackson.databind.ObjectMapper +import org.hibernate.StaleStateException +import org.onap.cps.ri.models.AnchorEntity +import org.onap.cps.ri.models.DataspaceEntity +import org.onap.cps.ri.models.FragmentEntity +import org.onap.cps.ri.repository.AnchorRepository +import org.onap.cps.ri.repository.DataspaceRepository +import org.onap.cps.ri.repository.FragmentRepository +import org.onap.cps.ri.utils.SessionManager +import org.onap.cps.spi.FetchDescendantsOption +import org.onap.cps.spi.exceptions.ConcurrencyException +import org.onap.cps.spi.exceptions.DataValidationException +import org.onap.cps.spi.model.DataNode +import org.onap.cps.spi.model.DataNodeBuilder +import org.onap.cps.utils.JsonObjectMapper +import org.springframework.dao.DataIntegrityViolationException +import spock.lang.Specification + +import java.util.stream.Collectors + +class CpsDataPersistenceServiceImplSpec extends Specification { + + def mockDataspaceRepository = Mock(DataspaceRepository) + def mockAnchorRepository = Mock(AnchorRepository) + def mockFragmentRepository = Mock(FragmentRepository) + def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) + def mockSessionManager = Mock(SessionManager) + + def objectUnderTest = Spy(new CpsDataPersistenceServiceImpl(mockDataspaceRepository, mockAnchorRepository, + mockFragmentRepository, jsonObjectMapper, mockSessionManager)) + + static def anchorEntity = new AnchorEntity(id: 123, dataspace: new DataspaceEntity(id: 1)) + + def setup() { + mockAnchorRepository.getByDataspaceAndName(_, _) >> anchorEntity + mockFragmentRepository.prefetchDescendantsOfFragmentEntities(_, _) >> { fetchDescendantsOption, fragmentEntities -> fragmentEntities } + mockFragmentRepository.findListByAnchorAndXpath(_, [] as Set) >> [] + } + + def 'Storing data nodes individually when batch operation fails'(){ + given: 'two data nodes and supporting repository mock behavior' + def dataNode1 = createDataNodeAndMockRepositoryMethodSupportingIt('xpath1','OK') + def dataNode2 = createDataNodeAndMockRepositoryMethodSupportingIt('xpath2','OK') + and: 'the batch store operation will fail' + mockFragmentRepository.saveAll(*_) >> { throw new DataIntegrityViolationException("Exception occurred") } + when: 'trying to store data nodes' + objectUnderTest.storeDataNodes('dataSpaceName', 'anchorName', [dataNode1, dataNode2]) + then: 'the two data nodes are saved individually' + 2 * mockFragmentRepository.save(_) + } + + def 'Handling of StaleStateException (caused by concurrent updates) during patch operation for data nodes.'() { + given: 'the system can update one datanode and has two more datanodes that throw an exception while updating' + def dataNodes = createDataNodesAndMockRepositoryMethodSupportingThem([ + '/node1': 'OK', + '/node2': 'EXCEPTION', + '/node3': 'EXCEPTION']) + def updatedLeavesPerXPath = dataNodes.stream() + .collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves)) + and: 'the batch update will therefore also fail' + mockFragmentRepository.saveAll(*_) >> { throw new StaleStateException("concurrent updates") } + when: 'attempt batch update data nodes' + objectUnderTest.batchUpdateDataLeaves('some-dataspace', 'some-anchor', updatedLeavesPerXPath) + then: 'concurrency exception is thrown' + def thrown = thrown(ConcurrencyException) + assert thrown.message == 'Concurrent Transactions' + and: 'it does not contain the successful datanode' + assert !thrown.details.contains('/node1') + and: 'it contains the failed datanodes' + assert thrown.details.contains('/node2') + assert thrown.details.contains('/node3') + } + + def 'Batch update data node leaves and descendants: #scenario'(){ + given: 'the fragment repository returns fragment entities related to the xpath inputs' + mockFragmentRepository.findByAnchorAndXpathIn(_, [] as Set) >> [] + mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath'] as Set) >> [ + new FragmentEntity(1, '/test/xpath', null, "{\"id\":\"testId\"}", anchorEntity, [] as Set) + ] + mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath1', '/test/xpath2'] as Set) >> [ + new FragmentEntity(1, '/test/xpath1', null, "{\"id\":\"testId1\"}", anchorEntity, [] as Set), + new FragmentEntity(2, '/test/xpath2', null, "{\"id\":\"testId2\"}", anchorEntity, [] as Set) + ] + when: 'replace data node tree' + objectUnderTest.batchUpdateDataLeaves('dataspaceName', 'anchorName', + dataNodes.stream().collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves))) + then: 'call fragment repository save all method' + 1 * mockFragmentRepository.saveAll({fragmentEntities -> + assert fragmentEntities.sort() == expectedFragmentEntities.sort() + assert fragmentEntities.size() == expectedSize + }) + where: 'the following Data Type is passed' + scenario | dataNodes | expectedSize || expectedFragmentEntities + 'empty data node list' | [] | 0 || [] + 'one data node in list' | [new DataNode(xpath: '/test/xpath', leaves: ['id': 'testId'])] | 1 || [new FragmentEntity(xpath: '/test/xpath', attributes: '{"id":"testId"}', anchor: anchorEntity)] + 'multiple data nodes' | [new DataNode(xpath: '/test/xpath1', leaves: ['id': 'newTestId1']), new DataNode(xpath: '/test/xpath2', leaves: ['id': 'newTestId2'])] | 2 || [new FragmentEntity(xpath: '/test/xpath2', attributes: '{"id":"newTestId2"}', anchor: anchorEntity), new FragmentEntity(xpath: '/test/xpath1', attributes: '{"id":"newTestId1"}', anchor: anchorEntity)] + } + + def 'Handling of StaleStateException (caused by concurrent updates) during update data nodes and descendants.'() { + given: 'the system can update one datanode and has two more datanodes that throw an exception while updating' + def dataNodes = createDataNodesAndMockRepositoryMethodSupportingThem([ + '/node1': 'OK', + '/node2': 'EXCEPTION', + '/node3': 'EXCEPTION']) + and: 'the batch update will therefore also fail' + mockFragmentRepository.saveAll(*_) >> { throw new StaleStateException("concurrent updates") } + when: 'attempt batch update data nodes' + objectUnderTest.updateDataNodesAndDescendants('some-dataspace', 'some-anchor', dataNodes) + then: 'concurrency exception is thrown' + def thrown = thrown(ConcurrencyException) + assert thrown.message == 'Concurrent Transactions' + and: 'it does not contain the successful datanode' + assert !thrown.details.contains('/node1') + and: 'it contains the failed datanodes' + assert thrown.details.contains('/node2') + assert thrown.details.contains('/node3') + } + + def 'Retrieving a data node with a property JSON value of #scenario'() { + given: 'the db has a fragment with an attribute property JSON value of #scenario' + mockFragmentWithJson("{\"some attribute\": ${dataString}}") + when: 'getting the data node represented by this fragment' + def dataNode = objectUnderTest.getDataNodes('my-dataspace', 'my-anchor', + '/parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + then: 'the leaf is of the correct value and data type' + def attributeValue = dataNode[0].leaves.get('some attribute') + assert attributeValue == expectedValue + assert attributeValue.class == expectedDataClass + where: 'the following Data Type is passed' + scenario | dataString || expectedValue | expectedDataClass + 'just numbers' | '15174' || 15174 | Integer + 'number with dot' | '15174.32' || 15174.32 | Double + 'number with 0 value after dot' | '15174.0' || 15174.0 | Double + 'number with 0 value before dot' | '0.32' || 0.32 | Double + 'number higher than max int' | '2147483648' || 2147483648 | Long + 'just text' | '"Test"' || 'Test' | String + 'number with exponent' | '1.2345e5' || 1.2345e5 | Double + 'number higher than max int with dot' | '123456789101112.0' || 123456789101112.0 | Double + 'text and numbers' | '"String = \'1234\'"' || "String = '1234'" | String + 'number as String' | '"12345"' || '12345' | String + } + + def 'Retrieving a data node with invalid JSON'() { + given: 'a fragment with invalid JSON' + mockFragmentWithJson('{invalid json') + when: 'getting the data node represented by this fragment' + objectUnderTest.getDataNodes('my-dataspace', 'my-anchor', + '/parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + then: 'a data validation exception is thrown' + thrown(DataValidationException) + } + + def 'Retrieving multiple data nodes.'() { + given: 'fragment repository returns a collection of fragments' + mockFragmentRepository.findByAnchorAndXpathIn(anchorEntity, ['/xpath1', '/xpath2'] as Set) >> [ + new FragmentEntity(1, '/xpath1', null, null, anchorEntity, [] as Set), + new FragmentEntity(2, '/xpath2', null, null, anchorEntity, [] as Set) + ] + when: 'getting data nodes for 2 xpaths' + def result = objectUnderTest.getDataNodesForMultipleXpaths('some-dataspace', 'some-anchor', ['/xpath1', '/xpath2'], FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + then: '2 data nodes are returned' + assert result.size() == 2 + } + + def 'start session'() { + when: 'start session' + objectUnderTest.startSession() + then: 'the session manager method to start session is invoked' + 1 * mockSessionManager.startSession() + } + + def 'close session'() { + given: 'session ID' + def someSessionId = 'someSessionId' + when: 'close session method is called with session ID as parameter' + objectUnderTest.closeSession(someSessionId) + then: 'the session manager method to close session is invoked with parameter' + 1 * mockSessionManager.closeSession(someSessionId, mockSessionManager.WITH_COMMIT) + } + + def 'Lock anchor.'(){ + when: 'lock anchor method is called with anchor entity details' + objectUnderTest.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L) + then: 'the session manager method to lock anchor is invoked with same parameters' + 1 * mockSessionManager.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L) + } + + def 'Replace data node and descendants: #scenario'(){ + given: 'the fragment repository returns fragment entities related to the xpath inputs' + mockFragmentRepository.findByAnchorAndXpathIn(_, [] as Set) >> [] + mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath'] as Set) >> [ + new FragmentEntity(1, '/test/xpath', null, '{"id":"testId"}', anchorEntity, [] as Set) + ] + when: 'replace data node tree' + objectUnderTest.updateDataNodesAndDescendants('dataspaceName', 'anchorName', dataNodes) + then: 'call fragment repository save all method' + 1 * mockFragmentRepository.saveAll({fragmentEntities -> assert fragmentEntities as List == expectedFragmentEntities}) + where: 'the following Data Type is passed' + scenario | dataNodes || expectedFragmentEntities + 'empty data node list' | [] || [] + 'one data node in list' | [new DataNode(xpath: '/test/xpath', leaves: ['id': 'testId'], childDataNodes: [])] || [new FragmentEntity(xpath: '/test/xpath', attributes: '{"id":"testId"}', anchor: anchorEntity, childFragments: [])] + } + + def 'Replace data nodes and descendants'() { + given: 'the fragment repository returns fragment entities related to the xpath inputs' + mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath1', '/test/xpath2'] as Set) >> [ + new FragmentEntity(1, '/test/xpath1', null, null, anchorEntity, [] as Set), + new FragmentEntity(2, '/test/xpath2', null, null, anchorEntity, [] as Set) + ] + and: 'some data nodes with descendants' + def dataNode1 = new DataNode(xpath: '/test/xpath1', leaves: ['id': 'testId1'], childDataNodes: [new DataNode(xpath: '/test/xpath1/child', leaves: ['id': 'childTestId1'])]) + def dataNode2 = new DataNode(xpath: '/test/xpath2', leaves: ['id': 'testId2'], childDataNodes: [new DataNode(xpath: '/test/xpath2/child', leaves: ['id': 'childTestId2'])]) + when: 'the fragment entities are update by the data nodes' + objectUnderTest.updateDataNodesAndDescendants('dataspace', 'anchor', [dataNode1, dataNode2]) + then: 'call fragment repository save all method is called with the updated fragments' + 1 * mockFragmentRepository.saveAll({fragmentEntities -> { + assert fragmentEntities.size() == 2 + def fragmentEntityPerXpath = fragmentEntities.collectEntries { [it.xpath, it] } + assert fragmentEntityPerXpath.get('/test/xpath1').childFragments.first().attributes == '{"id":"childTestId1"}' + assert fragmentEntityPerXpath.get('/test/xpath2').childFragments.first().attributes == '{"id":"childTestId2"}' + }}) + } + + def createDataNodeAndMockRepositoryMethodSupportingIt(xpath, scenario) { + def dataNode = new DataNodeBuilder().withXpath(xpath).build() + def fragmentEntity = new FragmentEntity(xpath: xpath, childFragments: []) + mockFragmentRepository.getByAnchorAndXpath(_, xpath) >> fragmentEntity + if ('EXCEPTION' == scenario) { + mockFragmentRepository.save(fragmentEntity) >> { throw new StaleStateException("concurrent updates") } + } + return dataNode + } + + def createDataNodesAndMockRepositoryMethodSupportingThem(Map xpathToScenarioMap) { + def dataNodes = [] + def fragmentEntities = [] + def fragmentId = 1 + xpathToScenarioMap.each { + def xpath = it.key + def scenario = it.value + def dataNode = new DataNodeBuilder().withXpath(xpath).build() + dataNodes.add(dataNode) + def fragmentEntity = new FragmentEntity(id: fragmentId, anchor: anchorEntity, xpath: xpath, childFragments: []) + fragmentEntities.add(fragmentEntity) + if ('EXCEPTION' == scenario) { + mockFragmentRepository.save(fragmentEntity) >> { throw new StaleStateException("concurrent updates") } + } + fragmentId++ + } + mockFragmentRepository.findByAnchorAndXpathIn(_, xpathToScenarioMap.keySet()) >> fragmentEntities + return dataNodes + } + + def mockFragmentWithJson(json) { + def fragmentEntity = new FragmentEntity(456, '/parent-01', null, json, anchorEntity, [] as Set) + mockFragmentRepository.findByAnchorAndXpathIn(_, ['/parent-01'] as Set) >> [fragmentEntity] + } + +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceConcurrencySpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceConcurrencySpec.groovy new file mode 100644 index 0000000000..b892fe4dae --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceConcurrencySpec.groovy @@ -0,0 +1,145 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Bell Canada. + * Modifications Copyright (C) 2021-2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ +package org.onap.cps.ri + +import org.hibernate.exception.ConstraintViolationException +import org.onap.cps.ri.models.DataspaceEntity +import org.onap.cps.ri.models.SchemaSetEntity +import org.onap.cps.ri.repository.DataspaceRepository +import org.onap.cps.ri.repository.ModuleReferenceRepository +import org.onap.cps.ri.repository.SchemaSetRepository +import org.onap.cps.ri.repository.YangResourceRepository +import org.onap.cps.spi.CpsAdminPersistenceService +import org.onap.cps.spi.CpsModulePersistenceService +import org.onap.cps.spi.exceptions.DuplicatedYangResourceException +import org.onap.cps.spi.model.ModuleReference +import org.spockframework.spring.SpringBean +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.boot.test.context.SpringBootTest +import org.springframework.dao.DataIntegrityViolationException +import org.springframework.retry.annotation.EnableRetry +import spock.lang.Specification + +import java.sql.SQLException + +@SpringBootTest(classes=[CpsModulePersistenceServiceImpl]) +@EnableRetry +class CpsModulePersistenceServiceConcurrencySpec extends Specification { + + @Autowired + CpsModulePersistenceService objectUnderTest + + @SpringBean + DataspaceRepository dataspaceRepository = Mock() + + @SpringBean + YangResourceRepository yangResourceRepository = Mock() + + @SpringBean + SchemaSetRepository schemaSetRepository = Mock() + + @SpringBean + CpsAdminPersistenceService cpsAdminPersistenceService = Mock() + + @SpringBean + ModuleReferenceRepository moduleReferenceRepository = Mock() + + def NEW_RESOURCE_NAME = 'some new resource' + def NEW_RESOURCE_CONTENT = 'module stores {\n' + + ' yang-version 1.1;\n' + + ' namespace "org:onap:ccsdk:sample";\n' + + '}' + + def newYangResourcesNameToContentMap = [(NEW_RESOURCE_NAME):NEW_RESOURCE_CONTENT] + + def yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539' + + def yangResourceChecksumDbConstraint = 'yang_resource_checksum_key' + + def sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum) + + def checksumIntegrityException = new DataIntegrityViolationException("checksum integrity exception", + new ConstraintViolationException('', new SQLException(sqlExceptionMessage), yangResourceChecksumDbConstraint)) + + def 'Store new schema set, maximum retries.'() { + given: 'no pre-existing schemaset in database' + dataspaceRepository.getByName(_) >> new DataspaceEntity() + yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() + when: 'a new schemaset is stored' + objectUnderTest.storeSchemaSet('some dataspace', 'some new schema set', newYangResourcesNameToContentMap) + then: 'a duplicated yang resource exception is thrown ' + thrown(DuplicatedYangResourceException) + and: 'the system will attempt to save the data 5 times (because checksum integrity exception is thrown each time)' + 5 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } + } + + def 'Store new schema set, succeed on third attempt.'() { + given: 'no pre-existing schemaset in database' + dataspaceRepository.getByName(_) >> new DataspaceEntity() + yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() + when: 'a new schemaset is stored' + objectUnderTest.storeSchemaSet('some dataspace', 'some new schema set', newYangResourcesNameToContentMap) + then: 'no exception is thrown ' + noExceptionThrown() + and: 'the system will attempt to save the data 2 times with checksum integrity exception but then succeed' + 2 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } + 1 * yangResourceRepository.saveAll(_) >> [] + } + + def 'Store schema set using modules, maximum retries.'() { + given: 'map of new modules, a list of existing modules, module reference' + def mapOfNewModules = [newModule1: 'module newmodule { yang-version 1.1; revision "2021-10-12" { } }'] + def moduleReferenceForExistingModule = new ModuleReference("test","2021-10-12") + def listOfExistingModulesModuleReference = [moduleReferenceForExistingModule] + and: 'no pre-existing schemaset in database' + dataspaceRepository.getByName(_) >> new DataspaceEntity() + yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() + when: 'a new schemaset is stored from a module' + objectUnderTest.storeSchemaSetFromModules('some dataspace', 'some new schema set' , mapOfNewModules, listOfExistingModulesModuleReference) + then: 'a duplicated yang resource exception is thrown ' + thrown(DuplicatedYangResourceException) + and: 'the system will attempt to save the data 5 times (because checksum integrity exception is thrown each time)' + 5 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } + } + + def 'Store schema set using modules, succeed on third attempt.'() { + given: 'map of new modules, a list of existing modules, module reference' + def mapOfNewModules = [newModule1: 'module newmodule { yang-version 1.1; revision "2021-10-12" { } }'] + def moduleReferenceForExistingModule = new ModuleReference("test","2021-10-12") + def listOfExistingModulesModuleReference = [moduleReferenceForExistingModule] + and: 'no pre-existing schemaset in database' + def dataspaceEntity = new DataspaceEntity() + dataspaceRepository.getByName(_) >> new DataspaceEntity() + yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() + yangResourceRepository.getResourceIdsByModuleReferences(_) >> [] + and: 'can retrieve schemaset details after storing it' + def schemaSetEntity = new SchemaSetEntity() + schemaSetRepository.getByDataspaceAndName(dataspaceEntity, 'new schema set') >> schemaSetEntity + when: 'a new schemaset is stored from a module' + objectUnderTest.storeSchemaSetFromModules('some dataspace', 'new schema set' , mapOfNewModules, listOfExistingModulesModuleReference) + then: 'no exception is thrown ' + noExceptionThrown() + and: 'the system will attempt to save the data 2 times with checksum integrity exception but then succeed' + 2 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } + 1 * yangResourceRepository.saveAll(_) >> [] + } + +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceImplSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceImplSpec.groovy new file mode 100644 index 0000000000..1b61ff39c0 --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/ri/CpsModulePersistenceServiceImplSpec.groovy @@ -0,0 +1,104 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (c) 2021 Bell Canada. + * Modifications Copyright (C) 2022-2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= +*/ + +package org.onap.cps.ri + +import org.hibernate.exception.ConstraintViolationException +import org.onap.cps.ri.models.SchemaSetEntity +import org.onap.cps.ri.repository.DataspaceRepository +import org.onap.cps.ri.repository.ModuleReferenceRepository +import org.onap.cps.ri.repository.SchemaSetRepository +import org.onap.cps.ri.repository.YangResourceRepository +import org.onap.cps.spi.CpsModulePersistenceService +import org.onap.cps.spi.exceptions.DuplicatedYangResourceException +import org.onap.cps.spi.model.ModuleReference +import org.springframework.dao.DataIntegrityViolationException +import spock.lang.Specification + +import java.sql.SQLException + +/** + * Specification unit test class for CPS module persistence service. + */ +class CpsModulePersistenceServiceImplSpec extends Specification { + + CpsModulePersistenceService objectUnderTest + + def mockDataspaceRepository = Mock(DataspaceRepository) + def mockYangResourceRepository = Mock(YangResourceRepository) + def mockSchemaSetRepository = Mock(SchemaSetRepository) + def mockModuleReferenceRepository = Mock(ModuleReferenceRepository) + + def yangResourceName = 'my-yang-resource-name' + def yangResourceContent = 'module stores {\n' + + ' yang-version 1.1;\n' + + ' namespace "org:onap:ccsdk:sample";\n' + + '\n' + + ' prefix book-store;\n' + + '\n' + + ' revision "2020-09-15" {\n' + + ' description\n' + + ' "Sample Model";\n' + + ' }' + + '}' + + static yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539' + static yangResourceChecksumDbConstraint = 'yang_resource_checksum_key' + static sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum) + static checksumIntegrityException = new DataIntegrityViolationException('checksum integrity exception', + new ConstraintViolationException('', new SQLException(sqlExceptionMessage), yangResourceChecksumDbConstraint)) + static checksumIntegrityExceptionWithoutChecksum = new DataIntegrityViolationException('checksum integrity exception', + new ConstraintViolationException('', new SQLException('no checksum'), yangResourceChecksumDbConstraint)) + static otherIntegrityException = new DataIntegrityViolationException('another integrity exception') + + def setup() { + objectUnderTest = new CpsModulePersistenceServiceImpl(mockYangResourceRepository, mockSchemaSetRepository, + mockDataspaceRepository, mockModuleReferenceRepository) + } + + def 'Store schema set error scenario: #scenario.'() { + given: 'no yang resource are currently saved' + mockYangResourceRepository.findAllByChecksumIn(_ as Collection) >> Collections.emptyList() + and: 'persisting yang resource raises db constraint exception (in case of concurrent requests for example)' + mockYangResourceRepository.saveAll(_) >> { throw dbException } + when: 'attempt to store schema set ' + def newYangResourcesNameToContentMap = [(yangResourceName):yangResourceContent] + objectUnderTest.storeSchemaSet('my-dataspace', 'my-schema-set', newYangResourcesNameToContentMap) + then: 'an #expectedThrownException is thrown' + def e = thrown(expectedThrownException) + assert e.getMessage().contains(expectedThrownExceptionMessage) + where: 'the following data is used' + scenario | dbException || expectedThrownException | expectedThrownExceptionMessage + 'checksum data failure' | checksumIntegrityException || DuplicatedYangResourceException | yangResourceChecksum + 'checksum failure without checksum' | checksumIntegrityExceptionWithoutChecksum || DuplicatedYangResourceException | 'no checksum found' + 'other data failure' | otherIntegrityException || DataIntegrityViolationException | 'another integrity exception' + } + + def 'Upgrade existing schema set'() { + given: 'old schema has empty yang resource' + mockYangResourceRepository.findAllByChecksumIn(_ as Collection) >> Collections.emptyList() + def schemaSetEntity = new SchemaSetEntity(id: 1) + mockSchemaSetRepository.getByDataspaceAndName(_, _) >> schemaSetEntity + when: 'schema set update is requested' + objectUnderTest.updateSchemaSetFromModules('my-dataspace', 'my-schemaset', [:], [new ModuleReference('some module name', 'some revision name')]) + then: 'no exception is thrown ' + noExceptionThrown() + } + +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/ri/utils/CpsValidatorImplSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/ri/utils/CpsValidatorImplSpec.groovy new file mode 100644 index 0000000000..d57bf25058 --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/ri/utils/CpsValidatorImplSpec.groovy @@ -0,0 +1,78 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.utils + + +import org.onap.cps.spi.PaginationOption +import org.onap.cps.spi.exceptions.DataValidationException +import spock.lang.Specification + +class CpsValidatorImplSpec extends Specification { + + def objectUnderTest = new CpsValidatorImpl() + + def 'Validating a valid string.'() { + when: 'the string is validated using a valid name' + objectUnderTest.validateNameCharacters('name-with-no-spaces') + then: 'no exception is thrown' + noExceptionThrown() + } + + def 'Validating an invalid string.'() { + when: 'the string is validated using an invalid name' + objectUnderTest.validateNameCharacters(name) + then: 'a data validation exception is thrown' + def exceptionThrown = thrown(DataValidationException) + and: 'the error was encountered at the following index in #scenario' + assert exceptionThrown.getDetails().contains(expectedErrorMessage) + where: 'the following names are used' + scenario | name || expectedErrorMessage + 'position 5' | 'name with spaces' || 'name with spaces invalid token encountered at position 5' + 'position 9' | 'nameWith Space' || 'nameWith Space invalid token encountered at position 9' + } + + def 'Validating a list of valid names.'() { + given: 'a list of valid names' + def names = ['valid-name', 'another-valid-name'] + when: 'a list of strings is validated' + objectUnderTest.validateNameCharacters(names) + then: 'no exception is thrown' + noExceptionThrown() + } + + def 'Validating a list of names with invalid names.'() { + given: 'a list of names with an invalid name' + def names = ['valid-name', 'name with spaces'] + when: 'a list of strings is validated' + objectUnderTest.validateNameCharacters(names) + then: 'a data validation exception is thrown' + thrown(DataValidationException) + } + + def 'Validate Pagination option with invalid page index and size.'() { + when: 'the pagination option is validated using invalid options' + objectUnderTest.validatePaginationOption(new PaginationOption(-5, -2)) + then: 'a data validation exception is thrown' + def exceptionThrown = thrown(DataValidationException) + and: 'the error was encountered at the following index in #scenario' + assert exceptionThrown.getDetails().contains("Invalid page index or size") + } +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/ri/utils/EscapeUtilsSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/ri/utils/EscapeUtilsSpec.groovy new file mode 100644 index 0000000000..8afd9695a7 --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/ri/utils/EscapeUtilsSpec.groovy @@ -0,0 +1,41 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.utils + +import spock.lang.Specification + +class EscapeUtilsSpec extends Specification { + + def 'Escape text for use in SQL LIKE operation.'() { + expect: 'SQL LIKE special characters to be escaped with forward-slash' + assert EscapeUtils.escapeForSqlLike(unescapedText) == escapedText + where: + unescapedText || escapedText + 'Only %, _, and \\ are special' || 'Only \\%, \\_, and \\\\ are special' + 'Others (./?$) are not special' || 'Others (./?$) are not special' + } + + def 'Escape text for use in SQL string literal.'() { + expect: 'single quotes to be doubled' + assert EscapeUtils.escapeForSqlStringLiteral("I'm escaping!") == "I''m escaping!" + } + +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/ri/utils/SessionManagerSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/ri/utils/SessionManagerSpec.groovy new file mode 100644 index 0000000000..b50a20c124 --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/ri/utils/SessionManagerSpec.groovy @@ -0,0 +1,137 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ri.utils + +import com.google.common.util.concurrent.TimeLimiter +import com.google.common.util.concurrent.UncheckedExecutionException +import org.hibernate.HibernateException +import org.hibernate.Session +import org.hibernate.Transaction +import org.onap.cps.ri.models.AnchorEntity +import org.onap.cps.ri.repository.AnchorRepository +import org.onap.cps.ri.repository.DataspaceRepository +import org.onap.cps.spi.exceptions.SessionManagerException +import spock.lang.Specification + +class SessionManagerSpec extends Specification { + + def mockCpsSessionFactory = Mock(CpsSessionFactory) + def spiedTimeLimiterProvider = Spy(TimeLimiterProvider) + def mockDataspaceRepository = Mock(DataspaceRepository) + def mockAnchorRepository = Mock(AnchorRepository) + def mockSession1 = Mock(Session) + def mockSession2 = Mock(Session) + def mockTransaction1 = Mock(Transaction) + def mockTransaction2 = Mock(Transaction) + + def objectUnderTest = new SessionManager(mockCpsSessionFactory, spiedTimeLimiterProvider, mockDataspaceRepository, mockAnchorRepository) + + def setup(){ + mockSession1.getTransaction() >> mockTransaction1 + mockSession2.getTransaction() >> mockTransaction2 + } + + def 'Lock anchor entity with #exceptionDuringTest exception.'() { + given: 'a dummy session' + objectUnderTest.sessionMap.put('dummy-session', mockSession1) + and: 'the anchor name can be resolved' + def mockAnchorEntity = Mock(AnchorEntity) + mockAnchorEntity.getId() > 456 + mockAnchorRepository.getByDataspaceAndName(_, _) >> mockAnchorEntity + and: 'timeLimiter throws an #exceptionDuringTest exception' + def mockTimeLimiter = Mock(TimeLimiter) + spiedTimeLimiterProvider.getTimeLimiter(_) >> mockTimeLimiter + mockTimeLimiter.callWithTimeout(*_) >> { throw exceptionDuringTest } + when: 'session tries to acquire anchor lock' + objectUnderTest.lockAnchor('dummy-session', 'some-dataspace', 'some-anchor', 123L) + then: 'a session manager exception is thrown with the expected detail' + def thrown = thrown(SessionManagerException) + thrown.details.contains(expectedExceptionDetail) + where: + exceptionDuringTest || expectedExceptionDetail + new InterruptedException() || 'interrupted' + new UncheckedExecutionException() || 'aborted' + } + + def 'Close a session' () { + given: 'a session in the session map' + objectUnderTest.sessionMap.putAll([testSessionId1:mockSession1]) + when: 'the session manager closes session' + objectUnderTest.closeSession('testSessionId1', commit) + then: 'commit or rollback is called on the transaction as appropriate' + if (commit) { + 1 * mockTransaction1.commit() + } else { + 1 * mockTransaction1.rollback() + } + and: 'the correct session is closed' + 1 * mockSession1.close() + where: + commit << [SessionManager.WITH_COMMIT, SessionManager.WITH_ROLLBACK] + } + + def 'Close session that does not exist.'() { + when: 'attempt to close session that does not exist' + objectUnderTest.closeSession('unknown session id', SessionManager.WITH_COMMIT) + then: 'a session manager exception is thrown with the unknown id in the details' + def thrown = thrown(SessionManagerException) + assert thrown.details.contains('unknown session id') + } + + def 'Hibernate exception while closing session.'() { + given: 'a test session in session map' + objectUnderTest.sessionMap.put('testSessionId', mockSession1) + and: 'an hibernate exception when closing that session' + def hibernateException = new HibernateException('test') + mockSession1.close() >> { throw hibernateException } + when: 'attempt to close session' + objectUnderTest.closeSession('testSessionId', SessionManager.WITH_COMMIT) + then: 'a session manager exception is thrown with the session id in the details' + def thrown = thrown(SessionManagerException) + assert thrown.details.contains('testSessionId') + and: 'the original exception as cause' + assert thrown.cause == hibernateException + } + + def 'Attempt to lock anchor entity with session Id that does not exist'() { + when: 'attempt to acquire anchor lock with session that does not exist' + objectUnderTest.lockAnchor('unknown session id', '', '', 123L) + then: 'a session manager exception is thrown with the unknown id in the details' + def thrown = thrown(SessionManagerException) + thrown.details.contains('unknown session id') + } + + def 'Close all sessions in shutdown.'() { + given: 'sessions that holds transactions in the session map' + objectUnderTest.sessionMap.putAll([testSessionId1:mockSession1, otherSessionId:mockSession2]) + when: 'shutdown method to close all sessions is called' + objectUnderTest.closeAllSessionsInShutdown() + then: 'commit is called on each transaction' + 1 * mockTransaction1.rollback() + 1 * mockTransaction2.rollback() + and: 'each session is closed' + 1 * mockSession1.close() + 1 * mockSession2.close() + then: 'session factory is closed' + 1 * mockCpsSessionFactory.closeSessionFactory() + } + +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy deleted file mode 100644 index c72c3046e8..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy +++ /dev/null @@ -1,281 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (c) 2021 Bell Canada. - * Modifications Copyright (C) 2021-2023 Nordix Foundation - * Modifications Copyright (C) 2022-2023 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= -*/ - -package org.onap.cps.spi.impl - -import com.fasterxml.jackson.databind.ObjectMapper -import org.hibernate.StaleStateException -import org.onap.cps.spi.FetchDescendantsOption -import org.onap.cps.spi.entities.AnchorEntity -import org.onap.cps.spi.entities.DataspaceEntity -import org.onap.cps.spi.entities.FragmentEntity - -import org.onap.cps.spi.exceptions.ConcurrencyException -import org.onap.cps.spi.exceptions.DataValidationException -import org.onap.cps.spi.model.DataNode -import org.onap.cps.spi.model.DataNodeBuilder -import org.onap.cps.spi.repository.AnchorRepository -import org.onap.cps.spi.repository.DataspaceRepository -import org.onap.cps.spi.repository.FragmentRepository -import org.onap.cps.spi.utils.SessionManager -import org.onap.cps.utils.JsonObjectMapper -import org.springframework.dao.DataIntegrityViolationException -import spock.lang.Specification -import java.util.stream.Collectors - -class CpsDataPersistenceServiceSpec extends Specification { - - def mockDataspaceRepository = Mock(DataspaceRepository) - def mockAnchorRepository = Mock(AnchorRepository) - def mockFragmentRepository = Mock(FragmentRepository) - def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) - def mockSessionManager = Mock(SessionManager) - - def objectUnderTest = Spy(new CpsDataPersistenceServiceImpl(mockDataspaceRepository, mockAnchorRepository, - mockFragmentRepository, jsonObjectMapper, mockSessionManager)) - - static def anchorEntity = new AnchorEntity(id: 123, dataspace: new DataspaceEntity(id: 1)) - - def setup() { - mockAnchorRepository.getByDataspaceAndName(_, _) >> anchorEntity - mockFragmentRepository.prefetchDescendantsOfFragmentEntities(_, _) >> { fetchDescendantsOption, fragmentEntities -> fragmentEntities } - mockFragmentRepository.findListByAnchorAndXpath(_, [] as Set) >> [] - } - - def 'Storing data nodes individually when batch operation fails'(){ - given: 'two data nodes and supporting repository mock behavior' - def dataNode1 = createDataNodeAndMockRepositoryMethodSupportingIt('xpath1','OK') - def dataNode2 = createDataNodeAndMockRepositoryMethodSupportingIt('xpath2','OK') - and: 'the batch store operation will fail' - mockFragmentRepository.saveAll(*_) >> { throw new DataIntegrityViolationException("Exception occurred") } - when: 'trying to store data nodes' - objectUnderTest.storeDataNodes('dataSpaceName', 'anchorName', [dataNode1, dataNode2]) - then: 'the two data nodes are saved individually' - 2 * mockFragmentRepository.save(_) - } - - def 'Handling of StaleStateException (caused by concurrent updates) during patch operation for data nodes.'() { - given: 'the system can update one datanode and has two more datanodes that throw an exception while updating' - def dataNodes = createDataNodesAndMockRepositoryMethodSupportingThem([ - '/node1': 'OK', - '/node2': 'EXCEPTION', - '/node3': 'EXCEPTION']) - def updatedLeavesPerXPath = dataNodes.stream() - .collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves)) - and: 'the batch update will therefore also fail' - mockFragmentRepository.saveAll(*_) >> { throw new StaleStateException("concurrent updates") } - when: 'attempt batch update data nodes' - objectUnderTest.batchUpdateDataLeaves('some-dataspace', 'some-anchor', updatedLeavesPerXPath) - then: 'concurrency exception is thrown' - def thrown = thrown(ConcurrencyException) - assert thrown.message == 'Concurrent Transactions' - and: 'it does not contain the successful datanode' - assert !thrown.details.contains('/node1') - and: 'it contains the failed datanodes' - assert thrown.details.contains('/node2') - assert thrown.details.contains('/node3') - } - - def 'Batch update data node leaves and descendants: #scenario'(){ - given: 'the fragment repository returns fragment entities related to the xpath inputs' - mockFragmentRepository.findByAnchorAndXpathIn(_, [] as Set) >> [] - mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath'] as Set) >> [ - new FragmentEntity(1, '/test/xpath', null, "{\"id\":\"testId\"}", anchorEntity, [] as Set) - ] - mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath1', '/test/xpath2'] as Set) >> [ - new FragmentEntity(1, '/test/xpath1', null, "{\"id\":\"testId1\"}", anchorEntity, [] as Set), - new FragmentEntity(2, '/test/xpath2', null, "{\"id\":\"testId2\"}", anchorEntity, [] as Set) - ] - when: 'replace data node tree' - objectUnderTest.batchUpdateDataLeaves('dataspaceName', 'anchorName', - dataNodes.stream().collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves))) - then: 'call fragment repository save all method' - 1 * mockFragmentRepository.saveAll({fragmentEntities -> - assert fragmentEntities.sort() == expectedFragmentEntities.sort() - assert fragmentEntities.size() == expectedSize - }) - where: 'the following Data Type is passed' - scenario | dataNodes | expectedSize || expectedFragmentEntities - 'empty data node list' | [] | 0 || [] - 'one data node in list' | [new DataNode(xpath: '/test/xpath', leaves: ['id': 'testId'])] | 1 || [new FragmentEntity(xpath: '/test/xpath', attributes: '{"id":"testId"}', anchor: anchorEntity)] - 'multiple data nodes' | [new DataNode(xpath: '/test/xpath1', leaves: ['id': 'newTestId1']), new DataNode(xpath: '/test/xpath2', leaves: ['id': 'newTestId2'])] | 2 || [new FragmentEntity(xpath: '/test/xpath2', attributes: '{"id":"newTestId2"}', anchor: anchorEntity), new FragmentEntity(xpath: '/test/xpath1', attributes: '{"id":"newTestId1"}', anchor: anchorEntity)] - } - - def 'Handling of StaleStateException (caused by concurrent updates) during update data nodes and descendants.'() { - given: 'the system can update one datanode and has two more datanodes that throw an exception while updating' - def dataNodes = createDataNodesAndMockRepositoryMethodSupportingThem([ - '/node1': 'OK', - '/node2': 'EXCEPTION', - '/node3': 'EXCEPTION']) - and: 'the batch update will therefore also fail' - mockFragmentRepository.saveAll(*_) >> { throw new StaleStateException("concurrent updates") } - when: 'attempt batch update data nodes' - objectUnderTest.updateDataNodesAndDescendants('some-dataspace', 'some-anchor', dataNodes) - then: 'concurrency exception is thrown' - def thrown = thrown(ConcurrencyException) - assert thrown.message == 'Concurrent Transactions' - and: 'it does not contain the successful datanode' - assert !thrown.details.contains('/node1') - and: 'it contains the failed datanodes' - assert thrown.details.contains('/node2') - assert thrown.details.contains('/node3') - } - - def 'Retrieving a data node with a property JSON value of #scenario'() { - given: 'the db has a fragment with an attribute property JSON value of #scenario' - mockFragmentWithJson("{\"some attribute\": ${dataString}}") - when: 'getting the data node represented by this fragment' - def dataNode = objectUnderTest.getDataNodes('my-dataspace', 'my-anchor', - '/parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) - then: 'the leaf is of the correct value and data type' - def attributeValue = dataNode[0].leaves.get('some attribute') - assert attributeValue == expectedValue - assert attributeValue.class == expectedDataClass - where: 'the following Data Type is passed' - scenario | dataString || expectedValue | expectedDataClass - 'just numbers' | '15174' || 15174 | Integer - 'number with dot' | '15174.32' || 15174.32 | Double - 'number with 0 value after dot' | '15174.0' || 15174.0 | Double - 'number with 0 value before dot' | '0.32' || 0.32 | Double - 'number higher than max int' | '2147483648' || 2147483648 | Long - 'just text' | '"Test"' || 'Test' | String - 'number with exponent' | '1.2345e5' || 1.2345e5 | Double - 'number higher than max int with dot' | '123456789101112.0' || 123456789101112.0 | Double - 'text and numbers' | '"String = \'1234\'"' || "String = '1234'" | String - 'number as String' | '"12345"' || '12345' | String - } - - def 'Retrieving a data node with invalid JSON'() { - given: 'a fragment with invalid JSON' - mockFragmentWithJson('{invalid json') - when: 'getting the data node represented by this fragment' - objectUnderTest.getDataNodes('my-dataspace', 'my-anchor', - '/parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) - then: 'a data validation exception is thrown' - thrown(DataValidationException) - } - - def 'Retrieving multiple data nodes.'() { - given: 'fragment repository returns a collection of fragments' - mockFragmentRepository.findByAnchorAndXpathIn(anchorEntity, ['/xpath1', '/xpath2'] as Set) >> [ - new FragmentEntity(1, '/xpath1', null, null, anchorEntity, [] as Set), - new FragmentEntity(2, '/xpath2', null, null, anchorEntity, [] as Set) - ] - when: 'getting data nodes for 2 xpaths' - def result = objectUnderTest.getDataNodesForMultipleXpaths('some-dataspace', 'some-anchor', ['/xpath1', '/xpath2'], FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) - then: '2 data nodes are returned' - assert result.size() == 2 - } - - def 'start session'() { - when: 'start session' - objectUnderTest.startSession() - then: 'the session manager method to start session is invoked' - 1 * mockSessionManager.startSession() - } - - def 'close session'() { - given: 'session ID' - def someSessionId = 'someSessionId' - when: 'close session method is called with session ID as parameter' - objectUnderTest.closeSession(someSessionId) - then: 'the session manager method to close session is invoked with parameter' - 1 * mockSessionManager.closeSession(someSessionId, mockSessionManager.WITH_COMMIT) - } - - def 'Lock anchor.'(){ - when: 'lock anchor method is called with anchor entity details' - objectUnderTest.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L) - then: 'the session manager method to lock anchor is invoked with same parameters' - 1 * mockSessionManager.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L) - } - - def 'Replace data node and descendants: #scenario'(){ - given: 'the fragment repository returns fragment entities related to the xpath inputs' - mockFragmentRepository.findByAnchorAndXpathIn(_, [] as Set) >> [] - mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath'] as Set) >> [ - new FragmentEntity(1, '/test/xpath', null, '{"id":"testId"}', anchorEntity, [] as Set) - ] - when: 'replace data node tree' - objectUnderTest.updateDataNodesAndDescendants('dataspaceName', 'anchorName', dataNodes) - then: 'call fragment repository save all method' - 1 * mockFragmentRepository.saveAll({fragmentEntities -> assert fragmentEntities as List == expectedFragmentEntities}) - where: 'the following Data Type is passed' - scenario | dataNodes || expectedFragmentEntities - 'empty data node list' | [] || [] - 'one data node in list' | [new DataNode(xpath: '/test/xpath', leaves: ['id': 'testId'], childDataNodes: [])] || [new FragmentEntity(xpath: '/test/xpath', attributes: '{"id":"testId"}', anchor: anchorEntity, childFragments: [])] - } - - def 'Replace data nodes and descendants'() { - given: 'the fragment repository returns fragment entities related to the xpath inputs' - mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath1', '/test/xpath2'] as Set) >> [ - new FragmentEntity(1, '/test/xpath1', null, null, anchorEntity, [] as Set), - new FragmentEntity(2, '/test/xpath2', null, null, anchorEntity, [] as Set) - ] - and: 'some data nodes with descendants' - def dataNode1 = new DataNode(xpath: '/test/xpath1', leaves: ['id': 'testId1'], childDataNodes: [new DataNode(xpath: '/test/xpath1/child', leaves: ['id': 'childTestId1'])]) - def dataNode2 = new DataNode(xpath: '/test/xpath2', leaves: ['id': 'testId2'], childDataNodes: [new DataNode(xpath: '/test/xpath2/child', leaves: ['id': 'childTestId2'])]) - when: 'the fragment entities are update by the data nodes' - objectUnderTest.updateDataNodesAndDescendants('dataspace', 'anchor', [dataNode1, dataNode2]) - then: 'call fragment repository save all method is called with the updated fragments' - 1 * mockFragmentRepository.saveAll({fragmentEntities -> { - assert fragmentEntities.size() == 2 - def fragmentEntityPerXpath = fragmentEntities.collectEntries { [it.xpath, it] } - assert fragmentEntityPerXpath.get('/test/xpath1').childFragments.first().attributes == '{"id":"childTestId1"}' - assert fragmentEntityPerXpath.get('/test/xpath2').childFragments.first().attributes == '{"id":"childTestId2"}' - }}) - } - - def createDataNodeAndMockRepositoryMethodSupportingIt(xpath, scenario) { - def dataNode = new DataNodeBuilder().withXpath(xpath).build() - def fragmentEntity = new FragmentEntity(xpath: xpath, childFragments: []) - mockFragmentRepository.getByAnchorAndXpath(_, xpath) >> fragmentEntity - if ('EXCEPTION' == scenario) { - mockFragmentRepository.save(fragmentEntity) >> { throw new StaleStateException("concurrent updates") } - } - return dataNode - } - - def createDataNodesAndMockRepositoryMethodSupportingThem(Map xpathToScenarioMap) { - def dataNodes = [] - def fragmentEntities = [] - def fragmentId = 1 - xpathToScenarioMap.each { - def xpath = it.key - def scenario = it.value - def dataNode = new DataNodeBuilder().withXpath(xpath).build() - dataNodes.add(dataNode) - def fragmentEntity = new FragmentEntity(id: fragmentId, anchor: anchorEntity, xpath: xpath, childFragments: []) - fragmentEntities.add(fragmentEntity) - if ('EXCEPTION' == scenario) { - mockFragmentRepository.save(fragmentEntity) >> { throw new StaleStateException("concurrent updates") } - } - fragmentId++ - } - mockFragmentRepository.findByAnchorAndXpathIn(_, xpathToScenarioMap.keySet()) >> fragmentEntities - return dataNodes - } - - def mockFragmentWithJson(json) { - def fragmentEntity = new FragmentEntity(456, '/parent-01', null, json, anchorEntity, [] as Set) - mockFragmentRepository.findByAnchorAndXpathIn(_, ['/parent-01'] as Set) >> [fragmentEntity] - } - -} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy deleted file mode 100644 index 2e4dba2e9b..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy +++ /dev/null @@ -1,145 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022 Bell Canada. - * Modifications Copyright (C) 2021-2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ -package org.onap.cps.spi.impl - -import org.hibernate.exception.ConstraintViolationException -import org.onap.cps.spi.CpsAdminPersistenceService -import org.onap.cps.spi.CpsModulePersistenceService -import org.onap.cps.spi.entities.DataspaceEntity -import org.onap.cps.spi.entities.SchemaSetEntity -import org.onap.cps.spi.exceptions.DuplicatedYangResourceException -import org.onap.cps.spi.model.ModuleReference -import org.onap.cps.spi.repository.DataspaceRepository -import org.onap.cps.spi.repository.ModuleReferenceRepository -import org.onap.cps.spi.repository.SchemaSetRepository -import org.onap.cps.spi.repository.YangResourceRepository -import org.spockframework.spring.SpringBean -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.boot.test.context.SpringBootTest -import org.springframework.dao.DataIntegrityViolationException -import org.springframework.retry.annotation.EnableRetry -import spock.lang.Specification - -import java.sql.SQLException - -@SpringBootTest(classes=[CpsModulePersistenceServiceImpl]) -@EnableRetry -class CpsModulePersistenceServiceConcurrencySpec extends Specification { - - @Autowired - CpsModulePersistenceService objectUnderTest - - @SpringBean - DataspaceRepository dataspaceRepository = Mock() - - @SpringBean - YangResourceRepository yangResourceRepository = Mock() - - @SpringBean - SchemaSetRepository schemaSetRepository = Mock() - - @SpringBean - CpsAdminPersistenceService cpsAdminPersistenceService = Mock() - - @SpringBean - ModuleReferenceRepository moduleReferenceRepository = Mock() - - def NEW_RESOURCE_NAME = 'some new resource' - def NEW_RESOURCE_CONTENT = 'module stores {\n' + - ' yang-version 1.1;\n' + - ' namespace "org:onap:ccsdk:sample";\n' + - '}' - - def newYangResourcesNameToContentMap = [(NEW_RESOURCE_NAME):NEW_RESOURCE_CONTENT] - - def yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539' - - def yangResourceChecksumDbConstraint = 'yang_resource_checksum_key' - - def sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum) - - def checksumIntegrityException = new DataIntegrityViolationException("checksum integrity exception", - new ConstraintViolationException('', new SQLException(sqlExceptionMessage), yangResourceChecksumDbConstraint)) - - def 'Store new schema set, maximum retries.'() { - given: 'no pre-existing schemaset in database' - dataspaceRepository.getByName(_) >> new DataspaceEntity() - yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() - when: 'a new schemaset is stored' - objectUnderTest.storeSchemaSet('some dataspace', 'some new schema set', newYangResourcesNameToContentMap) - then: 'a duplicated yang resource exception is thrown ' - thrown(DuplicatedYangResourceException) - and: 'the system will attempt to save the data 5 times (because checksum integrity exception is thrown each time)' - 5 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } - } - - def 'Store new schema set, succeed on third attempt.'() { - given: 'no pre-existing schemaset in database' - dataspaceRepository.getByName(_) >> new DataspaceEntity() - yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() - when: 'a new schemaset is stored' - objectUnderTest.storeSchemaSet('some dataspace', 'some new schema set', newYangResourcesNameToContentMap) - then: 'no exception is thrown ' - noExceptionThrown() - and: 'the system will attempt to save the data 2 times with checksum integrity exception but then succeed' - 2 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } - 1 * yangResourceRepository.saveAll(_) >> [] - } - - def 'Store schema set using modules, maximum retries.'() { - given: 'map of new modules, a list of existing modules, module reference' - def mapOfNewModules = [newModule1: 'module newmodule { yang-version 1.1; revision "2021-10-12" { } }'] - def moduleReferenceForExistingModule = new ModuleReference("test","2021-10-12") - def listOfExistingModulesModuleReference = [moduleReferenceForExistingModule] - and: 'no pre-existing schemaset in database' - dataspaceRepository.getByName(_) >> new DataspaceEntity() - yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() - when: 'a new schemaset is stored from a module' - objectUnderTest.storeSchemaSetFromModules('some dataspace', 'some new schema set' , mapOfNewModules, listOfExistingModulesModuleReference) - then: 'a duplicated yang resource exception is thrown ' - thrown(DuplicatedYangResourceException) - and: 'the system will attempt to save the data 5 times (because checksum integrity exception is thrown each time)' - 5 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } - } - - def 'Store schema set using modules, succeed on third attempt.'() { - given: 'map of new modules, a list of existing modules, module reference' - def mapOfNewModules = [newModule1: 'module newmodule { yang-version 1.1; revision "2021-10-12" { } }'] - def moduleReferenceForExistingModule = new ModuleReference("test","2021-10-12") - def listOfExistingModulesModuleReference = [moduleReferenceForExistingModule] - and: 'no pre-existing schemaset in database' - def dataspaceEntity = new DataspaceEntity() - dataspaceRepository.getByName(_) >> new DataspaceEntity() - yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() - yangResourceRepository.getResourceIdsByModuleReferences(_) >> [] - and: 'can retrieve schemaset details after storing it' - def schemaSetEntity = new SchemaSetEntity() - schemaSetRepository.getByDataspaceAndName(dataspaceEntity, 'new schema set') >> schemaSetEntity - when: 'a new schemaset is stored from a module' - objectUnderTest.storeSchemaSetFromModules('some dataspace', 'new schema set' , mapOfNewModules, listOfExistingModulesModuleReference) - then: 'no exception is thrown ' - noExceptionThrown() - and: 'the system will attempt to save the data 2 times with checksum integrity exception but then succeed' - 2 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } - 1 * yangResourceRepository.saveAll(_) >> [] - } - -} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy deleted file mode 100644 index 3447a1c599..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy +++ /dev/null @@ -1,103 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (c) 2021 Bell Canada. - * Modifications Copyright (C) 2022-2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= -*/ - -package org.onap.cps.spi.impl - -import org.hibernate.exception.ConstraintViolationException -import org.onap.cps.spi.CpsModulePersistenceService -import org.onap.cps.spi.entities.SchemaSetEntity -import org.onap.cps.spi.exceptions.DuplicatedYangResourceException -import org.onap.cps.spi.model.ModuleReference -import org.onap.cps.spi.repository.DataspaceRepository -import org.onap.cps.spi.repository.ModuleReferenceRepository -import org.onap.cps.spi.repository.SchemaSetRepository -import org.onap.cps.spi.repository.YangResourceRepository -import org.springframework.dao.DataIntegrityViolationException -import spock.lang.Specification -import java.sql.SQLException - -/** - * Specification unit test class for CPS module persistence service. - */ -class CpsModulePersistenceServiceSpec extends Specification { - - CpsModulePersistenceService objectUnderTest - - def mockDataspaceRepository = Mock(DataspaceRepository) - def mockYangResourceRepository = Mock(YangResourceRepository) - def mockSchemaSetRepository = Mock(SchemaSetRepository) - def mockModuleReferenceRepository = Mock(ModuleReferenceRepository) - - def yangResourceName = 'my-yang-resource-name' - def yangResourceContent = 'module stores {\n' + - ' yang-version 1.1;\n' + - ' namespace "org:onap:ccsdk:sample";\n' + - '\n' + - ' prefix book-store;\n' + - '\n' + - ' revision "2020-09-15" {\n' + - ' description\n' + - ' "Sample Model";\n' + - ' }' + - '}' - - static yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539' - static yangResourceChecksumDbConstraint = 'yang_resource_checksum_key' - static sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum) - static checksumIntegrityException = new DataIntegrityViolationException('checksum integrity exception', - new ConstraintViolationException('', new SQLException(sqlExceptionMessage), yangResourceChecksumDbConstraint)) - static checksumIntegrityExceptionWithoutChecksum = new DataIntegrityViolationException('checksum integrity exception', - new ConstraintViolationException('', new SQLException('no checksum'), yangResourceChecksumDbConstraint)) - static otherIntegrityException = new DataIntegrityViolationException('another integrity exception') - - def setup() { - objectUnderTest = new CpsModulePersistenceServiceImpl(mockYangResourceRepository, mockSchemaSetRepository, - mockDataspaceRepository, mockModuleReferenceRepository) - } - - def 'Store schema set error scenario: #scenario.'() { - given: 'no yang resource are currently saved' - mockYangResourceRepository.findAllByChecksumIn(_ as Collection) >> Collections.emptyList() - and: 'persisting yang resource raises db constraint exception (in case of concurrent requests for example)' - mockYangResourceRepository.saveAll(_) >> { throw dbException } - when: 'attempt to store schema set ' - def newYangResourcesNameToContentMap = [(yangResourceName):yangResourceContent] - objectUnderTest.storeSchemaSet('my-dataspace', 'my-schema-set', newYangResourcesNameToContentMap) - then: 'an #expectedThrownException is thrown' - def e = thrown(expectedThrownException) - assert e.getMessage().contains(expectedThrownExceptionMessage) - where: 'the following data is used' - scenario | dbException || expectedThrownException | expectedThrownExceptionMessage - 'checksum data failure' | checksumIntegrityException || DuplicatedYangResourceException | yangResourceChecksum - 'checksum failure without checksum' | checksumIntegrityExceptionWithoutChecksum || DuplicatedYangResourceException | 'no checksum found' - 'other data failure' | otherIntegrityException || DataIntegrityViolationException | 'another integrity exception' - } - - def 'Upgrade existing schema set'() { - given: 'old schema has empty yang resource' - mockYangResourceRepository.findAllByChecksumIn(_ as Collection) >> Collections.emptyList() - def schemaSetEntity = new SchemaSetEntity(id: 1) - mockSchemaSetRepository.getByDataspaceAndName(_, _) >> schemaSetEntity - when: 'schema set update is requested' - objectUnderTest.updateSchemaSetFromModules('my-dataspace', 'my-schemaset', [:], [new ModuleReference('some module name', 'some revision name')]) - then: 'no exception is thrown ' - noExceptionThrown() - } - -} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/utils/CpsValidatorSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/utils/CpsValidatorSpec.groovy deleted file mode 100644 index 8d348443c7..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/utils/CpsValidatorSpec.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.impl.utils - -import org.onap.cps.spi.PaginationOption -import org.onap.cps.spi.exceptions.DataValidationException -import spock.lang.Specification - -class CpsValidatorSpec extends Specification { - - def objectUnderTest = new CpsValidatorImpl() - - def 'Validating a valid string.'() { - when: 'the string is validated using a valid name' - objectUnderTest.validateNameCharacters('name-with-no-spaces') - then: 'no exception is thrown' - noExceptionThrown() - } - - def 'Validating an invalid string.'() { - when: 'the string is validated using an invalid name' - objectUnderTest.validateNameCharacters(name) - then: 'a data validation exception is thrown' - def exceptionThrown = thrown(DataValidationException) - and: 'the error was encountered at the following index in #scenario' - assert exceptionThrown.getDetails().contains(expectedErrorMessage) - where: 'the following names are used' - scenario | name || expectedErrorMessage - 'position 5' | 'name with spaces' || 'name with spaces invalid token encountered at position 5' - 'position 9' | 'nameWith Space' || 'nameWith Space invalid token encountered at position 9' - } - - def 'Validating a list of valid names.'() { - given: 'a list of valid names' - def names = ['valid-name', 'another-valid-name'] - when: 'a list of strings is validated' - objectUnderTest.validateNameCharacters(names) - then: 'no exception is thrown' - noExceptionThrown() - } - - def 'Validating a list of names with invalid names.'() { - given: 'a list of names with an invalid name' - def names = ['valid-name', 'name with spaces'] - when: 'a list of strings is validated' - objectUnderTest.validateNameCharacters(names) - then: 'a data validation exception is thrown' - thrown(DataValidationException) - } - - def 'Validate Pagination option with invalid page index and size.'() { - when: 'the pagination option is validated using invalid options' - objectUnderTest.validatePaginationOption(new PaginationOption(-5, -2)) - then: 'a data validation exception is thrown' - def exceptionThrown = thrown(DataValidationException) - and: 'the error was encountered at the following index in #scenario' - assert exceptionThrown.getDetails().contains("Invalid page index or size") - } -} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/EscapeUtilsSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/EscapeUtilsSpec.groovy deleted file mode 100644 index 52330e6251..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/EscapeUtilsSpec.groovy +++ /dev/null @@ -1,41 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.utils - -import spock.lang.Specification - -class EscapeUtilsSpec extends Specification { - - def 'Escape text for use in SQL LIKE operation.'() { - expect: 'SQL LIKE special characters to be escaped with forward-slash' - assert EscapeUtils.escapeForSqlLike(unescapedText) == escapedText - where: - unescapedText || escapedText - 'Only %, _, and \\ are special' || 'Only \\%, \\_, and \\\\ are special' - 'Others (./?$) are not special' || 'Others (./?$) are not special' - } - - def 'Escape text for use in SQL string literal.'() { - expect: 'single quotes to be doubled' - assert EscapeUtils.escapeForSqlStringLiteral("I'm escaping!") == "I''m escaping!" - } - -} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy deleted file mode 100644 index feda338b80..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy +++ /dev/null @@ -1,139 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.utils - -import com.google.common.util.concurrent.TimeLimiter -import com.google.common.util.concurrent.UncheckedExecutionException -import org.hibernate.HibernateException -import org.hibernate.Transaction -import org.onap.cps.spi.config.CpsSessionFactory -import org.onap.cps.spi.entities.AnchorEntity -import org.onap.cps.spi.exceptions.SessionManagerException -import org.onap.cps.spi.repository.AnchorRepository -import org.onap.cps.spi.repository.DataspaceRepository -import spock.lang.Specification -import org.hibernate.Session -import java.util.concurrent.ExecutionException - -class SessionManagerSpec extends Specification { - - def mockCpsSessionFactory = Mock(CpsSessionFactory) - def spiedTimeLimiterProvider = Spy(TimeLimiterProvider) - def mockDataspaceRepository = Mock(DataspaceRepository) - def mockAnchorRepository = Mock(AnchorRepository) - def mockSession1 = Mock(Session) - def mockSession2 = Mock(Session) - def mockTransaction1 = Mock(Transaction) - def mockTransaction2 = Mock(Transaction) - - def objectUnderTest = new SessionManager(mockCpsSessionFactory, spiedTimeLimiterProvider, mockDataspaceRepository, mockAnchorRepository) - - def setup(){ - mockSession1.getTransaction() >> mockTransaction1 - mockSession2.getTransaction() >> mockTransaction2 - } - - def 'Lock anchor entity with #exceptionDuringTest exception.'() { - given: 'a dummy session' - objectUnderTest.sessionMap.put('dummy-session', mockSession1) - and: 'the anchor name can be resolved' - def mockAnchorEntity = Mock(AnchorEntity) - mockAnchorEntity.getId() > 456 - mockAnchorRepository.getByDataspaceAndName(_, _) >> mockAnchorEntity - and: 'timeLimiter throws an #exceptionDuringTest exception' - def mockTimeLimiter = Mock(TimeLimiter) - spiedTimeLimiterProvider.getTimeLimiter(_) >> mockTimeLimiter - mockTimeLimiter.callWithTimeout(*_) >> { throw exceptionDuringTest } - when: 'session tries to acquire anchor lock' - objectUnderTest.lockAnchor('dummy-session', 'some-dataspace', 'some-anchor', 123L) - then: 'a session manager exception is thrown with the expected detail' - def thrown = thrown(SessionManagerException) - thrown.details.contains(expectedExceptionDetail) - where: - exceptionDuringTest || expectedExceptionDetail - new InterruptedException() || 'interrupted' - new UncheckedExecutionException() || 'aborted' - } - - def 'Close a session' () { - given: 'a session in the session map' - objectUnderTest.sessionMap.putAll([testSessionId1:mockSession1]) - when: 'the session manager closes session' - objectUnderTest.closeSession('testSessionId1', commit) - then: 'commit or rollback is called on the transaction as appropriate' - if (commit) { - 1 * mockTransaction1.commit() - } else { - 1 * mockTransaction1.rollback() - } - and: 'the correct session is closed' - 1 * mockSession1.close() - where: - commit << [SessionManager.WITH_COMMIT, SessionManager.WITH_ROLLBACK] - } - - def 'Close session that does not exist.'() { - when: 'attempt to close session that does not exist' - objectUnderTest.closeSession('unknown session id', SessionManager.WITH_COMMIT) - then: 'a session manager exception is thrown with the unknown id in the details' - def thrown = thrown(SessionManagerException) - assert thrown.details.contains('unknown session id') - } - - def 'Hibernate exception while closing session.'() { - given: 'a test session in session map' - objectUnderTest.sessionMap.put('testSessionId', mockSession1) - and: 'an hibernate exception when closing that session' - def hibernateException = new HibernateException('test') - mockSession1.close() >> { throw hibernateException } - when: 'attempt to close session' - objectUnderTest.closeSession('testSessionId', SessionManager.WITH_COMMIT) - then: 'a session manager exception is thrown with the session id in the details' - def thrown = thrown(SessionManagerException) - assert thrown.details.contains('testSessionId') - and: 'the original exception as cause' - assert thrown.cause == hibernateException - } - - def 'Attempt to lock anchor entity with session Id that does not exist'() { - when: 'attempt to acquire anchor lock with session that does not exist' - objectUnderTest.lockAnchor('unknown session id', '', '', 123L) - then: 'a session manager exception is thrown with the unknown id in the details' - def thrown = thrown(SessionManagerException) - thrown.details.contains('unknown session id') - } - - def 'Close all sessions in shutdown.'() { - given: 'sessions that holds transactions in the session map' - objectUnderTest.sessionMap.putAll([testSessionId1:mockSession1, otherSessionId:mockSession2]) - when: 'shutdown method to close all sessions is called' - objectUnderTest.closeAllSessionsInShutdown() - then: 'commit is called on each transaction' - 1 * mockTransaction1.rollback() - 1 * mockTransaction2.rollback() - and: 'each session is closed' - 1 * mockSession1.close() - 1 * mockSession2.close() - then: 'session factory is closed' - 1 * mockCpsSessionFactory.closeSessionFactory() - } - -} diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsAnchorServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsAnchorServiceImpl.java index c31e51b174..5ca0fe63d4 100644 --- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsAnchorServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsAnchorServiceImpl.java @@ -23,10 +23,10 @@ package org.onap.cps.api.impl; import java.util.Collection; import lombok.RequiredArgsConstructor; import org.onap.cps.api.CpsAnchorService; +import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsAdminPersistenceService; import org.onap.cps.spi.CpsDataPersistenceService; import org.onap.cps.spi.model.Anchor; -import org.onap.cps.spi.utils.CpsValidator; import org.springframework.stereotype.Service; @Service diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java index 165d62cede..951770b053 100644 --- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java @@ -42,6 +42,7 @@ import org.onap.cps.api.CpsDeltaService; import org.onap.cps.cpspath.parser.CpsPathUtil; import org.onap.cps.events.CpsDataUpdateEventsService; import org.onap.cps.events.model.Data.Operation; +import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsDataPersistenceService; import org.onap.cps.spi.FetchDescendantsOption; import org.onap.cps.spi.exceptions.DataValidationException; @@ -49,7 +50,6 @@ import org.onap.cps.spi.model.Anchor; import org.onap.cps.spi.model.DataNode; import org.onap.cps.spi.model.DataNodeBuilder; import org.onap.cps.spi.model.DeltaReport; -import org.onap.cps.spi.utils.CpsValidator; import org.onap.cps.utils.ContentType; import org.onap.cps.utils.DataMapUtils; import org.onap.cps.utils.JsonObjectMapper; diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataspaceServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataspaceServiceImpl.java index a7f5da4874..6bccf2a865 100644 --- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataspaceServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataspaceServiceImpl.java @@ -26,9 +26,9 @@ package org.onap.cps.api.impl; import java.util.Collection; import lombok.RequiredArgsConstructor; import org.onap.cps.api.CpsDataspaceService; +import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsAdminPersistenceService; import org.onap.cps.spi.model.Dataspace; -import org.onap.cps.spi.utils.CpsValidator; import org.springframework.stereotype.Service; @Service diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDeltaServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDeltaServiceImpl.java index 4df3a28145..7819568ae1 100644 --- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDeltaServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDeltaServiceImpl.java @@ -20,7 +20,6 @@ package org.onap.cps.api.impl; - import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsModuleServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsModuleServiceImpl.java index 34610f3455..a600b22b61 100644 --- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsModuleServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsModuleServiceImpl.java @@ -30,6 +30,7 @@ import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import org.onap.cps.api.CpsAnchorService; import org.onap.cps.api.CpsModuleService; +import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CascadeDeleteAllowed; import org.onap.cps.spi.CpsModulePersistenceService; import org.onap.cps.spi.exceptions.SchemaSetInUseException; @@ -37,7 +38,6 @@ import org.onap.cps.spi.model.Anchor; import org.onap.cps.spi.model.ModuleDefinition; import org.onap.cps.spi.model.ModuleReference; import org.onap.cps.spi.model.SchemaSet; -import org.onap.cps.spi.utils.CpsValidator; import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder; import org.onap.cps.yang.YangTextSchemaSourceSet; import org.springframework.stereotype.Service; diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsQueryServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsQueryServiceImpl.java index 1d7a7ceeb0..d1c98986e6 100644 --- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsQueryServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsQueryServiceImpl.java @@ -25,11 +25,11 @@ import io.micrometer.core.annotation.Timed; import java.util.Collection; import lombok.RequiredArgsConstructor; import org.onap.cps.api.CpsQueryService; +import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsDataPersistenceService; import org.onap.cps.spi.FetchDescendantsOption; import org.onap.cps.spi.PaginationOption; import org.onap.cps.spi.model.DataNode; -import org.onap.cps.spi.utils.CpsValidator; import org.springframework.stereotype.Service; @Service diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/YangTextSchemaSourceSetCache.java b/cps-service/src/main/java/org/onap/cps/api/impl/YangTextSchemaSourceSetCache.java index 4fdae5a307..8b85dfca32 100644 --- a/cps-service/src/main/java/org/onap/cps/api/impl/YangTextSchemaSourceSetCache.java +++ b/cps-service/src/main/java/org/onap/cps/api/impl/YangTextSchemaSourceSetCache.java @@ -27,8 +27,8 @@ import io.micrometer.core.instrument.Metrics; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import lombok.RequiredArgsConstructor; +import org.onap.cps.impl.utils.CpsValidator; import org.onap.cps.spi.CpsModulePersistenceService; -import org.onap.cps.spi.utils.CpsValidator; import org.onap.cps.yang.YangTextSchemaSourceSet; import org.onap.cps.yang.YangTextSchemaSourceSetBuilder; import org.springframework.cache.annotation.CacheConfig; diff --git a/cps-service/src/main/java/org/onap/cps/impl/utils/CpsValidator.java b/cps-service/src/main/java/org/onap/cps/impl/utils/CpsValidator.java new file mode 100644 index 0000000000..fd677eb2d2 --- /dev/null +++ b/cps-service/src/main/java/org/onap/cps/impl/utils/CpsValidator.java @@ -0,0 +1,47 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.impl.utils; + +import org.onap.cps.spi.PaginationOption; + +public interface CpsValidator { + + /** + * Validate characters in names within cps. + * + * @param names names of data to be validated + */ + void validateNameCharacters(final String... names); + + /** + * Validate characters in names within cps. + * + * @param names names of data to be validated + */ + void validateNameCharacters(final Iterable names); + + /** + * Validate pagination option. + * + * @param paginationOption pagination option + */ + void validatePaginationOption(final PaginationOption paginationOption); +} diff --git a/cps-service/src/main/java/org/onap/cps/spi/utils/CpsValidator.java b/cps-service/src/main/java/org/onap/cps/spi/utils/CpsValidator.java deleted file mode 100644 index ceb75c09b2..0000000000 --- a/cps-service/src/main/java/org/onap/cps/spi/utils/CpsValidator.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.utils; - -import org.onap.cps.spi.PaginationOption; - -public interface CpsValidator { - - /** - * Validate characters in names within cps. - * - * @param names names of data to be validated - */ - void validateNameCharacters(final String... names); - - /** - * Validate characters in names within cps. - * - * @param names names of data to be validated - */ - void validateNameCharacters(final Iterable names); - - /** - * Validate pagination option. - * - * @param paginationOption pagination option - */ - void validatePaginationOption(final PaginationOption paginationOption); -} diff --git a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsAnchorServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsAnchorServiceImplSpec.groovy index e58a5024b5..ccf943a470 100644 --- a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsAnchorServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsAnchorServiceImplSpec.groovy @@ -20,11 +20,11 @@ package org.onap.cps.api.impl +import org.onap.cps.impl.utils.CpsValidator import org.onap.cps.spi.CpsAdminPersistenceService import org.onap.cps.spi.CpsDataPersistenceService import org.onap.cps.spi.exceptions.ModuleNamesNotFoundException import org.onap.cps.spi.model.Anchor -import org.onap.cps.spi.utils.CpsValidator import spock.lang.Specification class CpsAnchorServiceImplSpec extends Specification { diff --git a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataServiceImplSpec.groovy index a296716b59..9846b30158 100644 --- a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataServiceImplSpec.groovy @@ -23,14 +23,15 @@ package org.onap.cps.api.impl -import com.fasterxml.jackson.databind.ObjectMapper import ch.qos.logback.classic.Level import ch.qos.logback.classic.Logger import ch.qos.logback.core.read.ListAppender +import com.fasterxml.jackson.databind.ObjectMapper import org.onap.cps.TestUtils import org.onap.cps.api.CpsAnchorService import org.onap.cps.api.CpsDeltaService import org.onap.cps.events.CpsDataUpdateEventsService +import org.onap.cps.impl.utils.CpsValidator import org.onap.cps.spi.CpsDataPersistenceService import org.onap.cps.spi.FetchDescendantsOption import org.onap.cps.spi.exceptions.ConcurrencyException @@ -40,12 +41,11 @@ import org.onap.cps.spi.exceptions.SessionManagerException import org.onap.cps.spi.exceptions.SessionTimeoutException import org.onap.cps.spi.model.Anchor import org.onap.cps.spi.model.DataNodeBuilder -import org.onap.cps.spi.utils.CpsValidator import org.onap.cps.utils.ContentType -import org.onap.cps.utils.YangParser -import org.onap.cps.utils.YangParserHelper import org.onap.cps.utils.JsonObjectMapper import org.onap.cps.utils.PrefixResolver +import org.onap.cps.utils.YangParser +import org.onap.cps.utils.YangParserHelper import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder import org.onap.cps.yang.YangTextSchemaSourceSet import org.onap.cps.yang.YangTextSchemaSourceSetBuilder @@ -53,6 +53,7 @@ import org.slf4j.LoggerFactory import org.springframework.context.annotation.AnnotationConfigApplicationContext import spock.lang.Shared import spock.lang.Specification + import java.time.OffsetDateTime import static org.onap.cps.events.model.Data.Operation.DELETE diff --git a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataspaceServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataspaceServiceImplSpec.groovy index 8e17594bd1..ac7d4c0aa7 100644 --- a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataspaceServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataspaceServiceImplSpec.groovy @@ -20,9 +20,9 @@ package org.onap.cps.api.impl +import org.onap.cps.impl.utils.CpsValidator import org.onap.cps.spi.CpsAdminPersistenceService import org.onap.cps.spi.model.Dataspace -import org.onap.cps.spi.utils.CpsValidator import spock.lang.Specification class CpsDataspaceServiceImplSpec extends Specification { diff --git a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsModuleServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsModuleServiceImplSpec.groovy index 62eba0c397..1831506563 100644 --- a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsModuleServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsModuleServiceImplSpec.groovy @@ -23,19 +23,15 @@ package org.onap.cps.api.impl -import org.onap.cps.api.CpsAnchorService - -import static org.onap.cps.spi.CascadeDeleteAllowed.CASCADE_DELETE_ALLOWED -import static org.onap.cps.spi.CascadeDeleteAllowed.CASCADE_DELETE_PROHIBITED - import org.onap.cps.TestUtils +import org.onap.cps.api.CpsAnchorService +import org.onap.cps.impl.utils.CpsValidator import org.onap.cps.spi.CpsModulePersistenceService import org.onap.cps.spi.exceptions.DuplicatedYangResourceException import org.onap.cps.spi.exceptions.ModelValidationException import org.onap.cps.spi.exceptions.SchemaSetInUseException -import org.onap.cps.spi.model.ModuleDefinition -import org.onap.cps.spi.utils.CpsValidator import org.onap.cps.spi.model.Anchor +import org.onap.cps.spi.model.ModuleDefinition import org.onap.cps.spi.model.ModuleReference import org.onap.cps.spi.model.SchemaSet import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder @@ -43,6 +39,9 @@ import org.onap.cps.yang.YangTextSchemaSourceSet import org.onap.cps.yang.YangTextSchemaSourceSetBuilder import spock.lang.Specification +import static org.onap.cps.spi.CascadeDeleteAllowed.CASCADE_DELETE_ALLOWED +import static org.onap.cps.spi.CascadeDeleteAllowed.CASCADE_DELETE_PROHIBITED + class CpsModuleServiceImplSpec extends Specification { def mockCpsModulePersistenceService = Mock(CpsModulePersistenceService) diff --git a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsQueryServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsQueryServiceImplSpec.groovy index 1ad5017919..3b10669ddb 100644 --- a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsQueryServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsQueryServiceImplSpec.groovy @@ -21,10 +21,10 @@ package org.onap.cps.api.impl +import org.onap.cps.impl.utils.CpsValidator import org.onap.cps.spi.CpsDataPersistenceService import org.onap.cps.spi.FetchDescendantsOption import org.onap.cps.spi.PaginationOption -import org.onap.cps.spi.utils.CpsValidator import spock.lang.Specification class CpsQueryServiceImplSpec extends Specification { diff --git a/cps-service/src/test/groovy/org/onap/cps/api/impl/E2ENetworkSliceSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/api/impl/E2ENetworkSliceSpec.groovy index 9e55e8f10a..05c8983fc2 100755 --- a/cps-service/src/test/groovy/org/onap/cps/api/impl/E2ENetworkSliceSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/api/impl/E2ENetworkSliceSpec.groovy @@ -28,13 +28,13 @@ import org.onap.cps.TestUtils import org.onap.cps.api.CpsAnchorService import org.onap.cps.api.CpsDeltaService import org.onap.cps.events.CpsDataUpdateEventsService +import org.onap.cps.impl.utils.CpsValidator import org.onap.cps.spi.CpsDataPersistenceService import org.onap.cps.spi.CpsModulePersistenceService import org.onap.cps.spi.model.Anchor -import org.onap.cps.spi.utils.CpsValidator +import org.onap.cps.utils.ContentType import org.onap.cps.utils.JsonObjectMapper import org.onap.cps.utils.PrefixResolver -import org.onap.cps.utils.ContentType import org.onap.cps.utils.YangParser import org.onap.cps.utils.YangParserHelper import org.onap.cps.yang.TimedYangTextSchemaSourceSetBuilder diff --git a/cps-service/src/test/groovy/org/onap/cps/api/impl/YangTextSchemaSourceSetCacheSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/api/impl/YangTextSchemaSourceSetCacheSpec.groovy index a9f50ee5b0..189e28521b 100644 --- a/cps-service/src/test/groovy/org/onap/cps/api/impl/YangTextSchemaSourceSetCacheSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/api/impl/YangTextSchemaSourceSetCacheSpec.groovy @@ -22,6 +22,7 @@ package org.onap.cps.api.impl import org.onap.cps.TestUtils +import org.onap.cps.impl.utils.CpsValidator import org.onap.cps.spi.CpsModulePersistenceService import org.onap.cps.yang.YangTextSchemaSourceSet import org.onap.cps.yang.YangTextSchemaSourceSetBuilder @@ -34,8 +35,6 @@ import org.springframework.cache.annotation.EnableCaching import org.springframework.cache.caffeine.CaffeineCacheManager import org.springframework.test.context.ContextConfiguration import spock.lang.Specification -import org.onap.cps.spi.utils.CpsValidator - @SpringBootTest @EnableCaching diff --git a/docs/admin-guide.rst b/docs/admin-guide.rst index 1c4d7455f0..4a40f9b29c 100644 --- a/docs/admin-guide.rst +++ b/docs/admin-guide.rst @@ -111,9 +111,9 @@ Execute CPS service that you want to calculate total elapsed time and log as sho .. code-block:: - 2022-01-28 18:39:17.679 DEBUG [cps-application,e17da1571e518c59,e17da1571e518c59] 11128 --- [tp1901272535-29] o.onap.cps.aop.CpsLoggingAspectService : Execution time of : DataspaceRepository.getByName() with argument[s] = [test42] having result = org.onap.cps.spi.entities.DataspaceEntity@68ded236 :: 205 ms + 2022-01-28 18:39:17.679 DEBUG [cps-application,e17da1571e518c59,e17da1571e518c59] 11128 --- [tp1901272535-29] o.onap.cps.aop.CpsLoggingAspectService : Execution time of : DataspaceRepository.getByName() with argument[s] = [test42] having result = org.onap.cps.impl.models.DataspaceEntity@68ded236 :: 205 ms - 2022-01-28 18:39:17.726 DEBUG [cps-application,e17da1571e518c59,e17da1571e518c59] 11128 --- [tp1901272535-29] o.onap.cps.aop.CpsLoggingAspectService : Execution time of : AnchorRepository.getByDataspaceAndName() with argument[s] = [org.onap.cps.spi.entities.DataspaceEntity@68ded236, bookstore] having result = org.onap.cps.spi.entities.AnchorEntity@71c47fb1 :: 46 ms + 2022-01-28 18:39:17.726 DEBUG [cps-application,e17da1571e518c59,e17da1571e518c59] 11128 --- [tp1901272535-29] o.onap.cps.aop.CpsLoggingAspectService : Execution time of : AnchorRepository.getByDataspaceAndName() with argument[s] = [org.onap.cps.impl.models.DataspaceEntity@68ded236, bookstore] having result = org.onap.cps.impl.models.AnchorEntity@71c47fb1 :: 46 ms 2022-01-28 18:39:17.768 DEBUG [cps-application,e17da1571e518c59,e17da1571e518c59] 11128 --- [tp1901272535-29] o.onap.cps.aop.CpsLoggingAspectService : Execution time of : CpsAdminPersistenceServiceImpl.getAnchor() with argument[s] = [test42, bookstore] having result = Anchor(name=bookstore, dataspaceName=test42, schemaSetName=bookstore) :: 299 ms diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy index bd53c4ea13..587cbae619 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/base/CpsIntegrationSpecBase.groovy @@ -39,10 +39,10 @@ import org.onap.cps.ncmp.impl.inventory.ParameterizedCmHandleQueryService import org.onap.cps.ncmp.impl.inventory.models.CmHandleState import org.onap.cps.ncmp.impl.inventory.sync.ModuleSyncWatchdog import org.onap.cps.ncmp.impl.utils.AlternateIdMatcher +import org.onap.cps.ri.repository.DataspaceRepository +import org.onap.cps.ri.utils.SessionManager import org.onap.cps.spi.exceptions.DataspaceNotFoundException import org.onap.cps.spi.model.DataNode -import org.onap.cps.spi.repository.DataspaceRepository -import org.onap.cps.spi.utils.SessionManager import org.onap.cps.utils.ContentType import org.onap.cps.utils.JsonObjectMapper import org.springframework.beans.factory.annotation.Autowired @@ -72,7 +72,7 @@ import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY @AutoConfigureMockMvc @EnableJpaRepositories(basePackageClasses = [DataspaceRepository]) @ComponentScan(basePackages = ['org.onap.cps']) -@EntityScan('org.onap.cps.spi.entities') +@EntityScan('org.onap.cps.ri.models') abstract class CpsIntegrationSpecBase extends Specification { @Shared diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/SessionManagerIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/SessionManagerIntegrationSpec.groovy index 428d5f9014..ad153d6a4a 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/SessionManagerIntegrationSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/SessionManagerIntegrationSpec.groovy @@ -21,8 +21,8 @@ package org.onap.cps.integration.functional.cps import org.onap.cps.integration.base.FunctionalSpecBase +import org.onap.cps.ri.utils.SessionManager import org.onap.cps.spi.exceptions.SessionManagerException -import org.onap.cps.spi.utils.SessionManager class SessionManagerIntegrationSpec extends FunctionalSpecBase { -- cgit 1.2.3-korg