summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--cps-application/src/main/resources/application.yml9
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java18
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy37
-rw-r--r--cps-ncmp-service/src/test/resources/application.yml9
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java15
-rwxr-xr-xcps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java2
-rwxr-xr-xcps-ri/src/main/java/org/onap/cps/spi/repository/AnchorRepository.java50
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentNativeRepository.java49
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentNativeRepositoryImpl.java72
-rwxr-xr-xcps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java65
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java27
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceRepository.java15
-rwxr-xr-xcps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy53
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy18
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy19
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy9
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy2
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy6
-rw-r--r--cps-service/src/main/java/org/onap/cps/cache/AnchorDataCacheConfig.java20
-rw-r--r--cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java9
-rw-r--r--cps-service/src/test/groovy/org/onap/cps/cache/AnchorDataCacheConfigSpec.groovy28
-rw-r--r--cps-service/src/test/resources/application.yml7
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsAdminServiceLimits.groovy13
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsDataServiceLimits.groovy21
24 files changed, 280 insertions, 293 deletions
diff --git a/cps-application/src/main/resources/application.yml b/cps-application/src/main/resources/application.yml
index 0007bc824..6d4583566 100644
--- a/cps-application/src/main/resources/application.yml
+++ b/cps-application/src/main/resources/application.yml
@@ -187,4 +187,11 @@ ncmp:
parallelism-level: 10
model-loader:
- subscription: false \ No newline at end of file
+ subscription: false
+
+# Custom Hazelcast Config.
+hazelcast:
+ mode:
+ kubernetes:
+ enabled: ${HAZELCAST_MODE_KUBERNETES_ENABLED:false}
+ service-name: ${CPS_NCMP_SERVICE_NAME:"cps-and-ncmp-service"} \ No newline at end of file
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java
index ac2bd4596..0b6726637 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java
@@ -28,19 +28,28 @@ import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.map.IMap;
import java.util.concurrent.BlockingQueue;
+import lombok.extern.slf4j.Slf4j;
import org.onap.cps.spi.model.DataNode;
+import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* Core infrastructure of the hazelcast distributed caches for Module Sync and Data Sync use cases.
*/
+@Slf4j
@Configuration
public class SynchronizationCacheConfig {
public static final int MODULE_SYNC_STARTED_TTL_SECS = 600;
public static final int DATA_SYNC_SEMAPHORE_TTL_SECS = 1800;
+ @Value("${hazelcast.mode.kubernetes.enabled}")
+ private boolean cacheKubernetesEnabled;
+
+ @Value("${hazelcast.mode.kubernetes.service-name}")
+ private String cacheKubernetesServiceName;
+
private static final QueueConfig commonQueueConfig = createQueueConfig();
private static final MapConfig moduleSyncStartedConfig = createMapConfig("moduleSyncStartedConfig");
private static final MapConfig dataSyncSemaphoresConfig = createMapConfig("dataSyncSemaphoresConfig");
@@ -92,6 +101,7 @@ public class SynchronizationCacheConfig {
config.addQueueConfig((QueueConfig) namedConfig);
}
config.setClusterName("synchronization-caches");
+ updateDiscoveryMode(config);
return config;
}
@@ -109,4 +119,12 @@ public class SynchronizationCacheConfig {
return mapConfig;
}
+ private void updateDiscoveryMode(final Config config) {
+ if (cacheKubernetesEnabled) {
+ log.info("Enabling kubernetes mode with service-name : {}", cacheKubernetesServiceName);
+ config.getNetworkConfig().getJoin().getKubernetesConfig().setEnabled(true)
+ .setProperty("service-name", cacheKubernetesServiceName);
+ }
+ }
+
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy
index 868ee7a70..6829d834d 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfigSpec.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START========================================================
- * Copyright (C) 2022 Nordix Foundation
+ * Copyright (C) 2022-2023 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -20,6 +20,7 @@
package org.onap.cps.ncmp.api.impl.config.embeddedcache
+import com.hazelcast.config.Config
import com.hazelcast.core.Hazelcast
import com.hazelcast.map.IMap
import org.onap.cps.spi.model.DataNode
@@ -74,6 +75,40 @@ class SynchronizationCacheConfigSpec extends Specification {
assert dataSyncSemaphoresConfig.asyncBackupCount == 3
}
+ def 'Verify deployment network configs for Distributed objects'() {
+ given: 'the Module Sync Work Queue config'
+ def queueNetworkConfig = Hazelcast.getHazelcastInstanceByName('moduleSyncWorkQueue').config.networkConfig
+ and: 'the Module Sync Started Cm Handle Map config'
+ def moduleSyncStartedOnCmHandlesNetworkConfig = Hazelcast.getHazelcastInstanceByName('moduleSyncStartedOnCmHandles').config.networkConfig
+ and: 'the Data Sync Semaphores Map config'
+ def dataSyncSemaphoresNetworkConfig = Hazelcast.getHazelcastInstanceByName('dataSyncSemaphores').config.networkConfig
+ expect: 'system created instance with correct config of Module Sync Work Queue'
+ assert queueNetworkConfig.join.autoDetectionConfig.enabled
+ assert !queueNetworkConfig.join.kubernetesConfig.enabled
+ and: 'Module Sync Started Cm Handle Map has the correct settings'
+ assert moduleSyncStartedOnCmHandlesNetworkConfig.join.autoDetectionConfig.enabled
+ assert !moduleSyncStartedOnCmHandlesNetworkConfig.join.kubernetesConfig.enabled
+ and: 'Data Sync Semaphore Map has the correct settings'
+ assert dataSyncSemaphoresNetworkConfig.join.autoDetectionConfig.enabled
+ assert !dataSyncSemaphoresNetworkConfig.join.kubernetesConfig.enabled
+
+ }
+
+ def 'Verify network config'() {
+ given: 'Synchronization config object and test configuration'
+ def objectUnderTest = new SynchronizationCacheConfig()
+ def testConfig = new Config()
+ when: 'kubernetes properties are enabled'
+ objectUnderTest.cacheKubernetesEnabled = true
+ objectUnderTest.cacheKubernetesServiceName = 'test-service-name'
+ and: 'method called to update the discovery mode'
+ objectUnderTest.updateDiscoveryMode(testConfig)
+ then: 'applied properties are reflected'
+ assert testConfig.networkConfig.join.kubernetesConfig.enabled
+ assert testConfig.networkConfig.join.kubernetesConfig.properties.get('service-name') == 'test-service-name'
+
+ }
+
def 'Time to Live Verify for Module Sync Semaphore'() {
when: 'the key is inserted with a TTL of 1 second (Hazelcast TTL resolution is seconds!)'
moduleSyncStartedOnCmHandles.put('testKeyModuleSync', 'toBeExpired' as Object, 1, TimeUnit.SECONDS)
diff --git a/cps-ncmp-service/src/test/resources/application.yml b/cps-ncmp-service/src/test/resources/application.yml
index e66f23d23..679248ba8 100644
--- a/cps-ncmp-service/src/test/resources/application.yml
+++ b/cps-ncmp-service/src/test/resources/application.yml
@@ -35,4 +35,11 @@ ncmp:
parallelism-level: 3
model-loader:
- subscription: true \ No newline at end of file
+ subscription: true
+
+# Custom Hazelcast Config.
+hazelcast:
+ mode:
+ kubernetes:
+ enabled: false
+ service-name: "cps-and-ncmp-service" \ No newline at end of file
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
index 369e5289b..3d9105f68 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
@@ -494,21 +494,6 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
}
@Override
- public void updateDataNodeAndDescendants(final String dataspaceName, final String anchorName,
- final DataNode dataNode) {
- final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName);
- final FragmentEntity fragmentEntity = getFragmentEntity(anchorEntity, dataNode.getXpath());
- updateFragmentEntityAndDescendantsWithDataNode(fragmentEntity, dataNode);
- try {
- fragmentRepository.save(fragmentEntity);
- } catch (final StaleStateException staleStateException) {
- throw new ConcurrencyException("Concurrent Transactions",
- String.format("dataspace :'%s', Anchor : '%s' and xpath: '%s' is updated by another transaction.",
- dataspaceName, anchorName, dataNode.getXpath()));
- }
- }
-
- @Override
public void updateDataNodesAndDescendants(final String dataspaceName, final String anchorName,
final Collection<DataNode> updatedDataNodes) {
final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName);
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java
index b4366de75..cd1457e35 100755
--- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java
@@ -159,7 +159,7 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ
@Override
public Collection<SchemaSet> getSchemaSetsByDataspaceName(final String dataspaceName) {
final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName);
- final List<SchemaSetEntity> schemaSetEntities = schemaSetRepository.getByDataspace(dataspaceEntity);
+ final List<SchemaSetEntity> schemaSetEntities = schemaSetRepository.findByDataspace(dataspaceEntity);
return schemaSetEntities.stream()
.map(CpsModulePersistenceServiceImpl::toSchemaSet).collect(Collectors.toList());
}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/AnchorRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/AnchorRepository.java
index f7b586d7b..fe9ff9e2f 100755
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/AnchorRepository.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/AnchorRepository.java
@@ -27,6 +27,7 @@ import org.onap.cps.spi.entities.DataspaceEntity;
import org.onap.cps.spi.entities.SchemaSetEntity;
import org.onap.cps.spi.exceptions.AnchorNotFoundException;
import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
@@ -45,11 +46,27 @@ public interface AnchorRepository extends JpaRepository<AnchorEntity, Integer> {
Collection<AnchorEntity> findAllBySchemaSet(SchemaSetEntity schemaSetEntity);
- Collection<AnchorEntity> findAllByDataspaceAndNameIn(DataspaceEntity dataspaceEntity,
- Collection<String> anchorNames);
+ @Query(value = "SELECT * FROM anchor WHERE dataspace_id = :dataspaceId AND name = ANY (:anchorNames)",
+ nativeQuery = true)
+ Collection<AnchorEntity> findAllByDataspaceIdAndNameIn(@Param("dataspaceId") int dataspaceId,
+ @Param("anchorNames") String[] anchorNames);
- Collection<AnchorEntity> findAllByDataspaceAndSchemaSetNameIn(DataspaceEntity dataspaceEntity,
- Collection<String> schemaSetNames);
+ default Collection<AnchorEntity> findAllByDataspaceAndNameIn(final DataspaceEntity dataspaceEntity,
+ final Collection<String> anchorNames) {
+ return findAllByDataspaceIdAndNameIn(dataspaceEntity.getId(), anchorNames.toArray(new String[0]));
+ }
+
+ @Query(value = "SELECT a.* FROM anchor a"
+ + " LEFT OUTER JOIN schema_set s ON a.schema_set_id = s.id"
+ + " WHERE a.dataspace_id = :dataspaceId AND s.name = ANY (:schemaSetNames)",
+ nativeQuery = true)
+ Collection<AnchorEntity> findAllByDataspaceIdAndSchemaSetNameIn(@Param("dataspaceId") int dataspaceId,
+ @Param("schemaSetNames") String[] schemaSetNames);
+
+ default Collection<AnchorEntity> findAllByDataspaceAndSchemaSetNameIn(final DataspaceEntity dataspaceEntity,
+ final Collection<String> schemaSetNames) {
+ return findAllByDataspaceIdAndSchemaSetNameIn(dataspaceEntity.getId(), schemaSetNames.toArray(new String[0]));
+ }
Integer countByDataspace(DataspaceEntity dataspaceEntity);
@@ -57,12 +74,29 @@ public interface AnchorRepository extends JpaRepository<AnchorEntity, Integer> {
+ "JOIN schema_set_yang_resources ON schema_set_yang_resources.yang_resource_id = yang_resource.id\n"
+ "JOIN schema_set ON schema_set.id = schema_set_yang_resources.schema_set_id\n"
+ "JOIN anchor ON anchor.schema_set_id = schema_set.id\n"
- + "WHERE schema_set.dataspace_id = :dataspaceId AND module_name IN (:moduleNames)\n"
+ + "WHERE schema_set.dataspace_id = :dataspaceId AND module_name = ANY (:moduleNames)\n"
+ "GROUP BY anchor.id, anchor.name, anchor.dataspace_id, anchor.schema_set_id\n"
+ "HAVING COUNT(DISTINCT module_name) = :sizeOfModuleNames", nativeQuery = true)
Collection<AnchorEntity> getAnchorsByDataspaceIdAndModuleNames(@Param("dataspaceId") int dataspaceId,
- @Param("moduleNames") Collection<String> moduleNames, @Param("sizeOfModuleNames") int sizeOfModuleNames);
+ @Param("moduleNames") String[] moduleNames,
+ @Param("sizeOfModuleNames") int sizeOfModuleNames);
+
+ default Collection<AnchorEntity> getAnchorsByDataspaceIdAndModuleNames(final int dataspaceId,
+ final Collection<String> moduleNames,
+ final int sizeOfModuleNames) {
+ final String[] moduleNamesArray = moduleNames.toArray(new String[0]);
+ return getAnchorsByDataspaceIdAndModuleNames(dataspaceId, moduleNamesArray, sizeOfModuleNames);
+ }
+
+ @Modifying
+ @Query(value = "DELETE FROM anchor WHERE dataspace_id = :dataspaceId AND name = ANY (:anchorNames)",
+ nativeQuery = true)
+ void deleteAllByDataspaceIdAndNameIn(@Param("dataspaceId") int dataspaceId,
+ @Param("anchorNames") String[] anchorNames);
+
+ default void deleteAllByDataspaceAndNameIn(final DataspaceEntity dataspaceEntity,
+ final Collection<String> anchorNames) {
+ deleteAllByDataspaceIdAndNameIn(dataspaceEntity.getId(), anchorNames.toArray(new String[0]));
+ }
- void deleteAllByDataspaceAndNameIn(DataspaceEntity dataspaceEntity,
- Collection<String> anchorNames);
}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentNativeRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentNativeRepository.java
deleted file mode 100644
index bad68f7e5..000000000
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentNativeRepository.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2023 Nordix Foundation
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.spi.repository;
-
-import java.util.Collection;
-
-/**
- * This interface is used in delete fragment entity by id with child using native sql queries.
- */
-public interface FragmentNativeRepository {
-
- /**
- * Delete fragment entities for each supplied xpath.
- * This method will delete list elements or other data nodes, but not whole lists.
- * Non-existing xpaths will not result in an exception.
- * @param anchorId the id of the anchor
- * @param xpaths xpaths of data nodes to remove
- */
- void deleteByAnchorIdAndXpaths(int anchorId, Collection<String> xpaths);
-
- /**
- * Delete fragment entities that are list elements of each supplied list xpath.
- * For example, if xpath '/parent/list' is provided, then list all elements in '/parent/list' will be deleted,
- * e.g. /parent/list[@key='A'], /parent/list[@key='B'].
- * This method will only delete whole lists by xpath; xpaths to list elements or other data nodes will be ignored.
- * Non-existing xpaths will not result in an exception.
- * @param anchorId the id of the anchor
- * @param listXpaths xpaths of whole lists to remove
- */
- void deleteListsByAnchorIdAndXpaths(int anchorId, Collection<String> listXpaths);
-}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentNativeRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentNativeRepositoryImpl.java
deleted file mode 100644
index 04b7080de..000000000
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentNativeRepositoryImpl.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2023 Nordix Foundation
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.spi.repository;
-
-import java.util.Collection;
-import java.util.Collections;
-import java.util.stream.Collectors;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import lombok.RequiredArgsConstructor;
-
-@RequiredArgsConstructor
-public class FragmentNativeRepositoryImpl implements FragmentNativeRepository {
-
- @PersistenceContext
- private final EntityManager entityManager;
-
- @Override
- public void deleteByAnchorIdAndXpaths(final int anchorId, final Collection<String> xpaths) {
- final String queryString =
- "DELETE FROM fragment f WHERE f.anchor_id = ? AND (f.xpath IN (:parameterPlaceholders))";
- executeUpdateWithAnchorIdAndCollection(queryString, anchorId, xpaths);
- }
-
- @Override
- public void deleteListsByAnchorIdAndXpaths(final int anchorId, final Collection<String> listXpaths) {
- final Collection<String> listXpathPatterns =
- listXpaths.stream().map(listXpath -> listXpath + "[%").collect(Collectors.toSet());
- final String queryString =
- "DELETE FROM fragment f WHERE f.anchor_id = ? AND (f.xpath LIKE ANY (array[:parameterPlaceholders]))";
- executeUpdateWithAnchorIdAndCollection(queryString, anchorId, listXpathPatterns);
- }
-
- // Accept security hotspot as placeholders in SQL query are created internally, not from user input.
- @SuppressWarnings("squid:S2077")
- private void executeUpdateWithAnchorIdAndCollection(final String sqlTemplate, final int anchorId,
- final Collection<String> collection) {
- if (!collection.isEmpty()) {
- final String parameterPlaceholders = String.join(",", Collections.nCopies(collection.size(), "?"));
- final String queryStringWithParameterPlaceholders =
- sqlTemplate.replaceFirst(":parameterPlaceholders\\b", parameterPlaceholders);
-
- final Query query = entityManager.createNativeQuery(queryStringWithParameterPlaceholders);
- query.setParameter(1, anchorId);
- int parameterIndex = 2;
- for (final String parameterValue : collection) {
- query.setParameter(parameterIndex++, parameterValue);
- }
- query.executeUpdate();
- }
- }
-
-}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java
index d486a39c7..8114f1055 100755
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java
@@ -37,8 +37,7 @@ import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
@Repository
-public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, FragmentRepositoryCpsPathQuery,
- FragmentNativeRepository {
+public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, FragmentRepositoryCpsPathQuery {
Optional<FragmentEntity> findByAnchorAndXpath(AnchorEntity anchorEntity, String xpath);
@@ -52,13 +51,39 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
@Query("SELECT f FROM FragmentEntity f WHERE anchor = :anchor")
List<FragmentExtract> findAllExtractsByAnchor(@Param("anchor") AnchorEntity anchorEntity);
- List<FragmentEntity> findAllByAnchorAndXpathIn(AnchorEntity anchorEntity, Collection<String> xpath);
+ @Query(value = "SELECT * FROM fragment WHERE xpath = ANY (:xpaths)", nativeQuery = true)
+ List<FragmentEntity> findAllByXpathIn(@Param("xpaths") String[] xpath);
- List<FragmentEntity> findAllByXpathIn(Collection<String> xpath);
+ default List<FragmentEntity> findAllByXpathIn(final Collection<String> xpaths) {
+ return findAllByXpathIn(xpaths.toArray(new String[0]));
+ }
+
+ @Modifying
+ @Query(value = "DELETE FROM fragment WHERE anchor_id = ANY (:anchorIds)", nativeQuery = true)
+ void deleteByAnchorIdIn(@Param("anchorIds") int[] anchorIds);
+
+ default void deleteByAnchorIn(final Collection<AnchorEntity> anchorEntities) {
+ deleteByAnchorIdIn(anchorEntities.stream().map(AnchorEntity::getId).mapToInt(id -> id).toArray());
+ }
@Modifying
- @Query("DELETE FROM FragmentEntity WHERE anchor IN (:anchors)")
- void deleteByAnchorIn(@Param("anchors") Collection<AnchorEntity> anchorEntities);
+ @Query(value = "DELETE FROM fragment WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths)", nativeQuery = true)
+ void deleteByAnchorIdAndXpaths(@Param("anchorId") int anchorId, @Param("xpaths") String[] xpaths);
+
+ default void deleteByAnchorIdAndXpaths(final int anchorId, final Collection<String> xpaths) {
+ deleteByAnchorIdAndXpaths(anchorId, xpaths.toArray(new String[0]));
+ }
+
+ @Modifying
+ @Query(value = "DELETE FROM fragment f WHERE anchor_id = :anchorId AND xpath LIKE ANY (:xpathPatterns)",
+ nativeQuery = true)
+ void deleteByAnchorIdAndXpathLikeAny(@Param("anchorId") int anchorId,
+ @Param("xpathPatterns") String[] xpathPatterns);
+
+ default void deleteListsByAnchorIdAndXpaths(int anchorId, Collection<String> xpaths) {
+ final String[] listXpathPatterns = xpaths.stream().map(xpath -> xpath + "[%").toArray(String[]::new);
+ deleteByAnchorIdAndXpathLikeAny(anchorId, listXpathPatterns);
+ }
@Query("SELECT f FROM FragmentEntity f WHERE anchor = :anchor"
+ " AND (xpath = :parentXpath OR xpath LIKE CONCAT(:parentXpath,'/%'))")
@@ -66,13 +91,6 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
@Param("parentXpath") String parentXpath);
@Query(value = "SELECT id, anchor_id AS anchorId, xpath, parent_id AS parentId,"
- + " CAST(attributes AS TEXT) AS attributes"
- + " FROM FRAGMENT WHERE "
- + "( xpath = :parentXpath OR xpath LIKE CONCAT(:parentXpath,'/%') )",
- nativeQuery = true)
- List<FragmentExtract> findByParentXpath(@Param("parentXpath") String parentXpath);
-
- @Query(value = "SELECT id, anchor_id AS anchorId, xpath, parent_id AS parentId,"
+ " CAST(attributes AS TEXT) AS attributes"
+ " FROM FRAGMENT WHERE anchor_id = :anchorId"
+ " AND xpath ~ :xpathRegex",
@@ -80,9 +98,15 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
List<FragmentExtract> quickFindWithDescendants(@Param("anchorId") int anchorId,
@Param("xpathRegex") String xpathRegex);
- @Query("SELECT xpath FROM FragmentEntity WHERE anchor = :anchor AND xpath IN :xpaths")
- List<String> findAllXpathByAnchorAndXpathIn(@Param("anchor") AnchorEntity anchorEntity,
- @Param("xpaths") Collection<String> xpaths);
+ @Query(value = "SELECT xpath FROM fragment WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths)",
+ nativeQuery = true)
+ List<String> findAllXpathByAnchorIdAndXpathIn(@Param("anchorId") int anchorId,
+ @Param("xpaths") String[] xpaths);
+
+ default List<String> findAllXpathByAnchorAndXpathIn(final AnchorEntity anchorEntity,
+ final Collection<String> xpaths) {
+ return findAllXpathByAnchorIdAndXpathIn(anchorEntity.getId(), xpaths.toArray(new String[0]));
+ }
boolean existsByAnchorAndXpathStartsWith(AnchorEntity anchorEntity, String xpath);
@@ -93,7 +117,7 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
= "WITH RECURSIVE parent_search AS ("
+ " SELECT id, 0 AS depth "
+ " FROM fragment "
- + " WHERE anchor_id = :anchorId AND xpath IN :xpaths "
+ + " WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths) "
+ " UNION "
+ " SELECT c.id, depth + 1 "
+ " FROM fragment c INNER JOIN parent_search p ON c.parent_id = p.id"
@@ -104,9 +128,14 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
nativeQuery = true
)
List<FragmentExtract> findExtractsWithDescendants(@Param("anchorId") int anchorId,
- @Param("xpaths") Collection<String> xpaths,
+ @Param("xpaths") String[] xpaths,
@Param("maxDepth") int maxDepth);
+ default List<FragmentExtract> findExtractsWithDescendants(final int anchorId, final Collection<String> xpaths,
+ final int maxDepth) {
+ return findExtractsWithDescendants(anchorId, xpaths.toArray(new String[0]), maxDepth);
+ }
+
@Query(value = "SELECT id, anchor_id AS anchorId, xpath, parent_id AS parentId,"
+ " CAST(attributes AS TEXT) AS attributes"
+ " FROM FRAGMENT WHERE xpath ~ :xpathRegex",
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java
index 3263f3447..3c5f973cb 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/SchemaSetRepository.java
@@ -24,7 +24,6 @@ package org.onap.cps.spi.repository;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
-import java.util.stream.Collectors;
import org.onap.cps.spi.entities.DataspaceEntity;
import org.onap.cps.spi.entities.SchemaSetEntity;
import org.onap.cps.spi.exceptions.SchemaSetNotFoundException;
@@ -44,7 +43,7 @@ public interface SchemaSetRepository extends JpaRepository<SchemaSetEntity, Inte
* @param dataspaceEntity dataspace entity
* @return list of schema set entity
*/
- Collection<SchemaSetEntity> findByDataspace(DataspaceEntity dataspaceEntity);
+ List<SchemaSetEntity> findByDataspace(DataspaceEntity dataspaceEntity);
Integer countByDataspace(DataspaceEntity dataspaceEntity);
@@ -61,24 +60,20 @@ public interface SchemaSetRepository extends JpaRepository<SchemaSetEntity, Inte
.orElseThrow(() -> new SchemaSetNotFoundException(dataspaceEntity.getName(), schemaSetName));
}
- /**
- * Gets all schema sets for a given dataspace.
- *
- * @param dataspaceEntity dataspace entity
- * @return list of schema set entity
- * @throws SchemaSetNotFoundException if SchemaSet not found
- */
- default List<SchemaSetEntity> getByDataspace(final DataspaceEntity dataspaceEntity) {
- return findByDataspace(dataspaceEntity).stream().collect(Collectors.toList());
- }
+ @Modifying
+ @Query(value = "DELETE FROM schema_set WHERE dataspace_id = :dataspaceId AND name = ANY (:schemaSetNames)",
+ nativeQuery = true)
+ void deleteByDataspaceIdAndNameIn(@Param("dataspaceId") final int dataspaceId,
+ @Param("schemaSetNames") final String[] schemaSetNames);
/**
* Delete multiple schema sets in a given dataspace.
* @param dataspaceEntity dataspace entity
* @param schemaSetNames schema set names
*/
- @Modifying
- @Query("DELETE FROM SchemaSetEntity s WHERE s.dataspace = :dataspaceEntity AND s.name IN (:schemaSetNames)")
- void deleteByDataspaceAndNameIn(@Param("dataspaceEntity") DataspaceEntity dataspaceEntity,
- @Param("schemaSetNames") Collection<String> schemaSetNames);
+ default void deleteByDataspaceAndNameIn(final DataspaceEntity dataspaceEntity,
+ final Collection<String> schemaSetNames) {
+ deleteByDataspaceIdAndNameIn(dataspaceEntity.getId(), schemaSetNames.toArray(new String[0]));
+ }
+
}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceRepository.java
index fff0a6a03..7584ff65c 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceRepository.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/YangResourceRepository.java
@@ -35,7 +35,11 @@ import org.springframework.stereotype.Repository;
public interface YangResourceRepository extends JpaRepository<YangResourceEntity, Long>,
YangResourceNativeRepository, SchemaSetYangResourceRepository {
- List<YangResourceEntity> findAllByChecksumIn(Set<String> checksum);
+ List<YangResourceEntity> findAllByChecksumIn(String[] checksums);
+
+ default List<YangResourceEntity> findAllByChecksumIn(final Collection<String> checksums) {
+ return findAllByChecksumIn(checksums.toArray(new String[0]));
+ }
@Query(value = "SELECT DISTINCT\n"
+ "yang_resource.module_name AS module_name,\n"
@@ -86,9 +90,14 @@ public interface YangResourceRepository extends JpaRepository<YangResourceEntity
+ "schema_set.id\n"
+ "JOIN yang_resource ON yang_resource.id = schema_set_yang_resources.yang_resource_id\n"
+ "WHERE\n"
- + "dataspace.name = :dataspaceName and yang_resource.module_Name IN (:moduleNames)", nativeQuery = true)
+ + "dataspace.name = :dataspaceName and yang_resource.module_Name = ANY (:moduleNames)", nativeQuery = true)
Set<YangResourceModuleReference> findAllModuleReferencesByDataspaceAndModuleNames(
- @Param("dataspaceName") String dataspaceName, @Param("moduleNames") Collection<String> moduleNames);
+ @Param("dataspaceName") String dataspaceName, @Param("moduleNames") String[] moduleNames);
+
+ default Set<YangResourceModuleReference> findAllModuleReferencesByDataspaceAndModuleNames(
+ final String dataspaceName, final Collection<String> moduleNames) {
+ return findAllModuleReferencesByDataspaceAndModuleNames(dataspaceName, moduleNames.toArray(new String[0]));
+ }
@Modifying
@Query(value = "DELETE FROM yang_resource yr WHERE NOT EXISTS "
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
index e60afa78d..93d766201 100755
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
@@ -23,7 +23,6 @@
package org.onap.cps.spi.impl
-import com.fasterxml.jackson.databind.ObjectMapper
import com.google.common.collect.ImmutableSet
import org.onap.cps.spi.CpsDataPersistenceService
import org.onap.cps.spi.entities.FragmentEntity
@@ -35,7 +34,6 @@ import org.onap.cps.spi.exceptions.DataNodeNotFoundException
import org.onap.cps.spi.exceptions.DataspaceNotFoundException
import org.onap.cps.spi.model.DataNode
import org.onap.cps.spi.model.DataNodeBuilder
-import org.onap.cps.utils.JsonObjectMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.test.context.jdbc.Sql
@@ -49,7 +47,6 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
@Autowired
CpsDataPersistenceService objectUnderTest
- static JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper())
static DataNodeBuilder dataNodeBuilder = new DataNodeBuilder()
static final String SET_DATA = '/data/fragment.sql'
@@ -353,11 +350,11 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Update data node and descendants by removing descendants.'() {
- given: 'data node object with leaves updated, no children'
- def submittedDataNode = buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [])
+ def 'Update data nodes and descendants by removing descendants.'() {
+ given: 'data nodes with leaves updated, no children'
+ def submittedDataNodes = [buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [])]
when: 'update data nodes and descendants is performed'
- objectUnderTest.updateDataNodeAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNode)
+ objectUnderTest.updateDataNodesAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNodes)
then: 'leaves have been updated for selected data node'
def updatedFragment = fragmentRepository.getById(DATA_NODE_202_FRAGMENT_ID)
def updatedLeaves = getLeavesMap(updatedFragment)
@@ -370,13 +367,13 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Update data node and descendants with new descendants'() {
- given: 'data node object with leaves updated, having child with old content'
- def submittedDataNode = buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [
+ def 'Update data nodes and descendants with new descendants'() {
+ given: 'data nodes with leaves updated, having child with old content'
+ def submittedDataNodes = [buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [
buildDataNode('/parent-200/child-201/grand-child', ['leaf-value': 'original'], [])
- ])
+ ])]
when: 'update is performed including descendants'
- objectUnderTest.updateDataNodeAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNode)
+ objectUnderTest.updateDataNodesAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNodes)
then: 'leaves have been updated for selected data node'
def updatedFragment = fragmentRepository.getById(DATA_NODE_202_FRAGMENT_ID)
def updatedLeaves = getLeavesMap(updatedFragment)
@@ -390,13 +387,13 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Update data node and descendants with same descendants but changed leaf value.'() {
- given: 'data node object with leaves updated, having child with old content'
- def submittedDataNode = buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [
+ def 'Update data nodes and descendants with same descendants but changed leaf value.'() {
+ given: 'data nodes with leaves updated, having child with old content'
+ def submittedDataNodes = [buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [
buildDataNode('/parent-200/child-201/grand-child', ['leaf-value': 'new'], [])
- ])
+ ])]
when: 'update is performed including descendants'
- objectUnderTest.updateDataNodeAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNode)
+ objectUnderTest.updateDataNodesAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNodes)
then: 'leaves have been updated for selected data node'
def updatedFragment = fragmentRepository.getById(DATA_NODE_202_FRAGMENT_ID)
def updatedLeaves = getLeavesMap(updatedFragment)
@@ -410,13 +407,13 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Update data node and descendants with different descendants xpath'() {
- given: 'data node object with leaves updated, having child with old content'
- def submittedDataNode = buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [
+ def 'Update data nodes and descendants with different descendants xpath'() {
+ given: 'data nodes with leaves updated, having child with old content'
+ def submittedDataNodes = [buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [
buildDataNode('/parent-200/child-201/grand-child-new', ['leaf-value': 'new'], [])
- ])
+ ])]
when: 'update is performed including descendants'
- objectUnderTest.updateDataNodeAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNode)
+ objectUnderTest.updateDataNodesAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNodes)
then: 'leaves have been updated for selected data node'
def updatedFragment = fragmentRepository.getById(DATA_NODE_202_FRAGMENT_ID)
def updatedLeaves = getLeavesMap(updatedFragment)
@@ -432,19 +429,17 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Update data node and descendants error scenario: #scenario.'() {
- given: 'data node object'
- def submittedDataNode = buildDataNode(xpath, ['leaf-name': 'leaf-value'], [])
+ def 'Update data nodes and descendants error scenario: #scenario.'() {
+ given: 'data nodes collection'
+ def submittedDataNodes = [buildDataNode(xpath, ['leaf-name': 'leaf-value'], [])]
when: 'attempt to update data node for #scenario'
- objectUnderTest.updateDataNodeAndDescendants(dataspaceName, anchorName, submittedDataNode)
+ objectUnderTest.updateDataNodesAndDescendants(dataspaceName, anchorName, submittedDataNodes)
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'the following data is used'
scenario | dataspaceName | anchorName | xpath || expectedException
'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING-XPATH' || DataNodeNotFoundException
- 'invalid xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'INVALID XPATH' || CpsPathException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -699,7 +694,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
return dataNodeBuilder.withXpath(xpath).withLeaves(leaves).withChildDataNodes(childDataNodes).build()
}
- static Map<String, Object> getLeavesMap(FragmentEntity fragmentEntity) {
+ Map<String, Object> getLeavesMap(FragmentEntity fragmentEntity) {
return jsonObjectMapper.convertJsonString(fragmentEntity.attributes, Map<String, Object>.class)
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
index f02aa754f..204b93442 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
@@ -77,24 +77,6 @@ class CpsDataPersistenceServiceSpec extends Specification {
1 * objectUnderTest.storeDataNodes('dataspace1', 'anchor1', [dataNode])
}
- def 'Handling of StaleStateException (caused by concurrent updates) during update data node and descendants.'() {
- given: 'the fragment repository returns a fragment entity'
- mockFragmentRepository.getByAnchorAndXpath(*_) >> {
- def fragmentEntity = new FragmentEntity()
- fragmentEntity.setChildFragments([new FragmentEntity()] as Set<FragmentEntity>)
- return fragmentEntity
- }
- and: 'a data node is concurrently updated by another transaction'
- mockFragmentRepository.save(_) >> { throw new StaleStateException("concurrent updates") }
- when: 'attempt to update data node with submitted data nodes'
- objectUnderTest.updateDataNodeAndDescendants('some-dataspace', 'some-anchor', new DataNodeBuilder().withXpath('/some/xpath').build())
- then: 'concurrency exception is thrown'
- def concurrencyException = thrown(ConcurrencyException)
- assert concurrencyException.getDetails().contains('some-dataspace')
- assert concurrencyException.getDetails().contains('some-anchor')
- assert concurrencyException.getDetails().contains('/some/xpath')
- }
-
def 'Handling of StaleStateException (caused by concurrent updates) during update data nodes and descendants.'() {
given: 'the system can update one datanode and has two more datanodes that throw an exception while updating'
def dataNodes = createDataNodesAndMockRepositoryMethodSupportingThem([
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy
index 214fd69ff..65d63dfe3 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy
@@ -1,6 +1,7 @@
/*
* ============LICENSE_START=======================================================
* Copyright (C) 2022 Bell Canada.
+ * Modifications Copyright (C) 2021-2023 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
@@ -19,18 +20,14 @@
*/
package org.onap.cps.spi.impl
-import com.fasterxml.jackson.databind.ObjectMapper
+
import org.hibernate.exception.ConstraintViolationException
-import org.onap.cps.spi.CpsAdminPersistenceService
import org.onap.cps.spi.CpsModulePersistenceService
import org.onap.cps.spi.entities.DataspaceEntity
import org.onap.cps.spi.exceptions.DuplicatedYangResourceException
import org.onap.cps.spi.model.ModuleReference
-import org.onap.cps.spi.repository.AnchorRepository
import org.onap.cps.spi.repository.DataspaceRepository
-import org.onap.cps.spi.repository.SchemaSetRepository
import org.onap.cps.spi.repository.YangResourceRepository
-import org.onap.cps.utils.JsonObjectMapper
import org.spockframework.spring.SpringBean
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.dao.DataIntegrityViolationException
@@ -43,24 +40,12 @@ class CpsModulePersistenceServiceConcurrencySpec extends CpsPersistenceSpecBase
@Autowired
CpsModulePersistenceService objectUnderTest
- @Autowired
- AnchorRepository anchorRepository
-
- @Autowired
- SchemaSetRepository schemaSetRepository
-
- @Autowired
- CpsAdminPersistenceService cpsAdminPersistenceService
-
@SpringBean
YangResourceRepository yangResourceRepositoryMock = Mock()
@SpringBean
DataspaceRepository dataspaceRepositoryMock = Mock()
- @SpringBean
- JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper())
-
static final String DATASPACE_NAME = 'DATASPACE-001'
static final String SCHEMA_SET_NAME_NEW = 'SCHEMA-SET-NEW'
static final String NEW_RESOURCE_NAME = 'some new resource'
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy
index 864b3e3b6..53f42f5a9 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy
@@ -30,13 +30,10 @@ import org.onap.cps.spi.exceptions.SchemaSetNotFoundException
import org.onap.cps.spi.model.ModuleDefinition
import org.onap.cps.spi.model.ModuleReference
import org.onap.cps.spi.model.SchemaSet
-import org.onap.cps.spi.repository.AnchorRepository
import org.onap.cps.spi.repository.SchemaSetRepository
import org.onap.cps.spi.repository.SchemaSetYangResourceRepositoryImpl
-import org.onap.cps.spi.repository.YangResourceRepository
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.test.context.jdbc.Sql
-import spock.lang.Ignore
class CpsModulePersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
@@ -44,17 +41,11 @@ class CpsModulePersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase
CpsModulePersistenceService objectUnderTest
@Autowired
- AnchorRepository anchorRepository
-
- @Autowired
SchemaSetRepository schemaSetRepository
@Autowired
CpsAdminPersistenceService cpsAdminPersistenceService
- @Autowired
- YangResourceRepository yangResourceRepository
-
final static String SET_DATA = '/data/schemaset.sql'
def static EXISTING_SCHEMA_SET_NAME = SCHEMA_SET_NAME1
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy
index 9ef973268..811c3290b 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy
@@ -84,7 +84,7 @@ class CpsModulePersistenceServiceSpec extends Specification {
def 'Store schema set error scenario: #scenario.'() {
given: 'no yang resource are currently saved'
- yangResourceRepositoryMock.findAllByChecksumIn(_) >> Collections.emptyList()
+ yangResourceRepositoryMock.findAllByChecksumIn(_ as Collection<String>) >> Collections.emptyList()
and: 'persisting yang resource raises db constraint exception (in case of concurrent requests for example)'
yangResourceRepositoryMock.saveAll(_) >> { throw dbException }
when: 'attempt to store schema set '
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy
index 9b722cdda..222a828b9 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2022 Nordix Foundation
+ * Copyright (C) 2022-2023 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -24,7 +24,6 @@ import org.onap.cps.spi.CpsModulePersistenceService
import org.onap.cps.spi.entities.SchemaSetEntity
import org.onap.cps.spi.impl.CpsPersistenceSpecBase
import org.onap.cps.spi.model.ModuleReference
-import org.onap.cps.spi.repository.DataspaceRepository
import org.onap.cps.spi.repository.ModuleReferenceRepository
import org.onap.cps.spi.repository.SchemaSetRepository
import org.springframework.beans.factory.annotation.Autowired
@@ -53,9 +52,6 @@ class CpsModuleReferenceRepositoryPerfTest extends CpsPersistenceSpecBase {
CpsModulePersistenceService objectUnderTest
@Autowired
- DataspaceRepository dataspaceRepository
-
- @Autowired
SchemaSetRepository schemaSetRepository
@Autowired
diff --git a/cps-service/src/main/java/org/onap/cps/cache/AnchorDataCacheConfig.java b/cps-service/src/main/java/org/onap/cps/cache/AnchorDataCacheConfig.java
index 54d6ff395..3acba0bb3 100644
--- a/cps-service/src/main/java/org/onap/cps/cache/AnchorDataCacheConfig.java
+++ b/cps-service/src/main/java/org/onap/cps/cache/AnchorDataCacheConfig.java
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START========================================================
- * Copyright (C) 2022 Nordix Foundation
+ * Copyright (C) 2022-2023 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -26,15 +26,24 @@ import com.hazelcast.config.NamedConfig;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.map.IMap;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* Core infrastructure of the hazelcast distributed cache for anchor data config use cases.
*/
+@Slf4j
@Configuration
public class AnchorDataCacheConfig {
+ @Value("${hazelcast.mode.kubernetes.enabled}")
+ private boolean cacheKubernetesEnabled;
+
+ @Value("${hazelcast.mode.kubernetes.service-name}")
+ private String cacheKubernetesServiceName;
+
private static final MapConfig anchorDataCacheMapConfig = createMapConfig("anchorDataCacheMapConfig");
/**
@@ -57,6 +66,7 @@ public class AnchorDataCacheConfig {
final Config config = new Config(instanceName);
config.addMapConfig((MapConfig) namedConfig);
config.setClusterName("cps-service-caches");
+ updateDiscoveryMode(config);
return config;
}
@@ -67,4 +77,12 @@ public class AnchorDataCacheConfig {
return mapConfig;
}
+ private void updateDiscoveryMode(final Config config) {
+ if (cacheKubernetesEnabled) {
+ log.info("Enabling kubernetes mode with service-name : {}", cacheKubernetesServiceName);
+ config.getNetworkConfig().getJoin().getKubernetesConfig().setEnabled(true)
+ .setProperty("service-name", cacheKubernetesServiceName);
+ }
+ }
+
}
diff --git a/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java b/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java
index 540401913..949fbc2c2 100644
--- a/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java
+++ b/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java
@@ -138,15 +138,6 @@ public interface CpsDataPersistenceService {
void updateDataLeaves(String dataspaceName, String anchorName, String xpath, Map<String, Serializable> leaves);
/**
- * Replaces an existing data node's content including descendants.
- *
- * @param dataspaceName dataspace name
- * @param anchorName anchor name
- * @param dataNode data node
- */
- void updateDataNodeAndDescendants(String dataspaceName, String anchorName, DataNode dataNode);
-
- /**
* Replaces multiple existing data nodes' content including descendants in a batch operation.
*
* @param dataspaceName dataspace name
diff --git a/cps-service/src/test/groovy/org/onap/cps/cache/AnchorDataCacheConfigSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/cache/AnchorDataCacheConfigSpec.groovy
index 839444b68..76b534553 100644
--- a/cps-service/src/test/groovy/org/onap/cps/cache/AnchorDataCacheConfigSpec.groovy
+++ b/cps-service/src/test/groovy/org/onap/cps/cache/AnchorDataCacheConfigSpec.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2022 Nordix Foundation
+ * Copyright (C) 2022-2023 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -19,6 +19,8 @@
*/
package org.onap.cps.cache
+
+import com.hazelcast.config.Config
import com.hazelcast.core.Hazelcast
import com.hazelcast.map.IMap
import org.springframework.beans.factory.annotation.Autowired
@@ -49,4 +51,28 @@ class AnchorDataCacheConfigSpec extends Specification {
assert anchorDataCacheConfig.backupCount == 3
assert anchorDataCacheConfig.asyncBackupCount == 3
}
+
+ def 'Verify deployment network configs for Distributed Caches'() {
+ given: 'the Anchor Data Cache config'
+ def anchorDataCacheNetworkConfig = Hazelcast.getHazelcastInstanceByName('hazelCastInstanceCpsCore').config.networkConfig
+ expect: 'system created instance with correct config'
+ assert anchorDataCacheNetworkConfig.join.autoDetectionConfig.enabled
+ assert !anchorDataCacheNetworkConfig.join.kubernetesConfig.enabled
+ }
+
+ def 'Verify network config'() {
+ given: 'Synchronization config object and test configuration'
+ def objectUnderTest = new AnchorDataCacheConfig()
+ def testConfig = new Config()
+ when: 'kubernetes properties are enabled'
+ objectUnderTest.cacheKubernetesEnabled = true
+ objectUnderTest.cacheKubernetesServiceName = 'test-service-name'
+ and: 'method called to update the discovery mode'
+ objectUnderTest.updateDiscoveryMode(testConfig)
+ then: 'applied properties are reflected'
+ assert testConfig.networkConfig.join.kubernetesConfig.enabled
+ assert testConfig.networkConfig.join.kubernetesConfig.properties.get('service-name') == 'test-service-name'
+
+ }
+
}
diff --git a/cps-service/src/test/resources/application.yml b/cps-service/src/test/resources/application.yml
index 04295eb74..21f374533 100644
--- a/cps-service/src/test/resources/application.yml
+++ b/cps-service/src/test/resources/application.yml
@@ -48,3 +48,10 @@ spring:
logging:
level:
org.apache.kafka: ERROR
+
+# Custom Hazelcast Config.
+hazelcast:
+ mode:
+ kubernetes:
+ enabled: false
+ service-name: "cps-and-ncmp-service"
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsAdminServiceLimits.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsAdminServiceLimits.groovy
index 7875caec3..0034af453 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsAdminServiceLimits.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsAdminServiceLimits.groovy
@@ -22,7 +22,6 @@ package org.onap.cps.integration.performance.cps
import org.onap.cps.api.CpsAdminService
import org.onap.cps.integration.performance.base.CpsPerfTestBase
-import org.springframework.dao.DataAccessResourceFailureException
class CpsAdminServiceLimits extends CpsPerfTestBase {
@@ -32,20 +31,20 @@ class CpsAdminServiceLimits extends CpsPerfTestBase {
def 'Get anchors from multiple schema set names limit exceeded: 32,766 (~ 2^15) schema set names.'() {
given: 'more than 32,766 schema set names'
- def schemaSetNames = (0..32_766).collect { "size-of-this-name-does-not-matter-for-limit-" + it }
+ def schemaSetNames = (0..40_000).collect { "size-of-this-name-does-not-matter-for-limit-" + it }
when: 'single get is executed to get all the anchors'
objectUnderTest.getAnchors(CPS_PERFORMANCE_TEST_DATASPACE, schemaSetNames)
- then: 'a database exception is thrown'
- thrown(DataAccessResourceFailureException.class)
+ then: 'a database exception is not thrown'
+ noExceptionThrown()
}
def 'Querying anchor names limit exceeded: 32,766 (~ 2^15) modules.'() {
given: 'more than 32,766 module names'
- def moduleNames = (0..32_766).collect { "size-of-this-name-does-not-matter-for-limit-" + it }
+ def moduleNames = (0..40_000).collect { "size-of-this-name-does-not-matter-for-limit-" + it }
when: 'single query is executed to get all the anchors'
objectUnderTest.queryAnchorNames(CPS_PERFORMANCE_TEST_DATASPACE, moduleNames)
- then: 'a database exception is thrown'
- thrown(DataAccessResourceFailureException.class)
+ then: 'a database exception is not thrown'
+ noExceptionThrown()
}
}
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsDataServiceLimits.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsDataServiceLimits.groovy
index 2df910194..1579470ea 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsDataServiceLimits.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsDataServiceLimits.groovy
@@ -23,8 +23,7 @@ package org.onap.cps.integration.performance.cps
import java.time.OffsetDateTime
import org.onap.cps.api.CpsDataService
import org.onap.cps.integration.performance.base.CpsPerfTestBase
-import org.springframework.dao.DataAccessResourceFailureException
-import org.springframework.transaction.TransactionSystemException
+import org.onap.cps.spi.exceptions.DataNodeNotFoundException
import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS
@@ -36,29 +35,29 @@ class CpsDataServiceLimits extends CpsPerfTestBase {
def 'Multiple get limit exceeded: 32,764 (~ 2^15) xpaths.'() {
given: 'more than 32,764 xpaths'
- def xpaths = (0..32_764).collect { "/size/of/this/path/does/not/matter/for/limit[@id='" + it + "']" }
+ def xpaths = (0..40_000).collect { "/size/of/this/path/does/not/matter/for/limit[@id='" + it + "']" }
when: 'single operation is executed to get all datanodes with given xpaths'
objectUnderTest.getDataNodesForMultipleXpaths(CPS_PERFORMANCE_TEST_DATASPACE, 'bookstore1', xpaths, INCLUDE_ALL_DESCENDANTS)
- then: 'a database exception is thrown'
- thrown(DataAccessResourceFailureException.class)
+ then: 'a database exception is not thrown'
+ noExceptionThrown()
}
def 'Delete multiple datanodes limit exceeded: 32,767 (~ 2^15) xpaths.'() {
given: 'more than 32,767 xpaths'
- def xpaths = (0..32_767).collect { "/size/of/this/path/does/not/matter/for/limit[@id='" + it + "']" }
+ def xpaths = (0..40_000).collect { "/size/of/this/path/does/not/matter/for/limit[@id='" + it + "']" }
when: 'single operation is executed to delete all datanodes with given xpaths'
objectUnderTest.deleteDataNodes(CPS_PERFORMANCE_TEST_DATASPACE, 'bookstore1', xpaths, OffsetDateTime.now())
- then: 'a database exception is thrown'
- thrown(TransactionSystemException.class)
+ then: 'a database exception is not thrown (but a CPS DataNodeNotFoundException is thrown)'
+ thrown(DataNodeNotFoundException.class)
}
def 'Delete datanodes from multiple anchors limit exceeded: 32,766 (~ 2^15) anchors.'() {
given: 'more than 32,766 anchor names'
- def anchorNames = (0..32_766).collect { "size-of-this-name-does-not-matter-for-limit-" + it }
+ def anchorNames = (0..40_000).collect { "size-of-this-name-does-not-matter-for-limit-" + it }
when: 'single operation is executed to delete all datanodes in given anchors'
objectUnderTest.deleteDataNodes(CPS_PERFORMANCE_TEST_DATASPACE, anchorNames, OffsetDateTime.now())
- then: 'a database exception is thrown'
- thrown(DataAccessResourceFailureException.class)
+ then: 'a database exception is not thrown'
+ noExceptionThrown()
}
}