diff options
Diffstat (limited to 'cps-ri')
21 files changed, 259 insertions, 928 deletions
diff --git a/cps-ri/pom.xml b/cps-ri/pom.xml index 66b89de926..89e60dbfbe 100644 --- a/cps-ri/pom.xml +++ b/cps-ri/pom.xml @@ -26,15 +26,15 @@ <parent>
<groupId>org.onap.cps</groupId>
<artifactId>cps-parent</artifactId>
- <version>3.3.2-SNAPSHOT</version>
+ <version>3.3.3-SNAPSHOT</version>
<relativePath>../cps-parent/pom.xml</relativePath>
</parent>
<artifactId>cps-ri</artifactId>
<properties>
- <minimum-coverage>0.34</minimum-coverage>
- <!-- Additional coverage is provided by the integration-test module -->
+ <minimum-coverage>0.29</minimum-coverage>
+ <!-- Additional coverage is provided by integration-test module -->
</properties>
<dependencies>
@@ -125,16 +125,6 @@ </exclusion>
</exclusions>
</dependency>
- <dependency>
- <groupId>org.testcontainers</groupId>
- <artifactId>postgresql</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.testcontainers</groupId>
- <artifactId>spock</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<profiles>
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java deleted file mode 100644 index 697eb8de00..0000000000 --- a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation - * Modifications Copyright (C) 2023 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.entities; - -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import lombok.AccessLevel; -import lombok.NoArgsConstructor; - -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public class FragmentEntityArranger { - - /** - * Convert a collection of (related) FragmentExtracts into FragmentEntities (trees) with descendants. - * - * @param anchorEntity the anchor(entity) all the fragments belong to - * @param fragmentExtracts FragmentExtracts to convert - * @return a collection of FragmentEntities (trees) with descendants. - */ - public static Collection<FragmentEntity> toFragmentEntityTrees(final AnchorEntity anchorEntity, - final Collection<FragmentExtract> fragmentExtracts) { - final Map<Long, FragmentEntity> fragmentEntityPerId = new HashMap<>(); - if (fragmentExtracts != null) { - for (final FragmentExtract fragmentExtract : fragmentExtracts) { - final FragmentEntity fragmentEntity = toFragmentEntity(anchorEntity, fragmentExtract); - fragmentEntityPerId.put(fragmentEntity.getId(), fragmentEntity); - } - } - return reuniteChildrenWithTheirParents(fragmentEntityPerId); - } - - /** - * Convert a collection of (related) FragmentExtracts into FragmentEntities (trees) with descendants. - * - * @param anchorEntityPerId the anchor(entities) the fragments belong to - * @param fragmentExtracts FragmentExtracts to convert - * @return a collection of FragmentEntities (trees) with descendants. - */ - public static Collection<FragmentEntity> toFragmentEntityTreesAcrossAnchors( - final Map<Long, AnchorEntity> anchorEntityPerId, final Collection<FragmentExtract> fragmentExtracts) { - final Map<Long, FragmentEntity> fragmentEntityPerId = new HashMap<>(); - for (final FragmentExtract fragmentExtract : fragmentExtracts) { - final AnchorEntity anchorEntity = anchorEntityPerId.get(fragmentExtract.getAnchorId()); - final FragmentEntity fragmentEntity = toFragmentEntity(anchorEntity, fragmentExtract); - fragmentEntityPerId.put(fragmentEntity.getId(), fragmentEntity); - } - return reuniteChildrenWithTheirParents(fragmentEntityPerId); - } - - private static FragmentEntity toFragmentEntity(final AnchorEntity anchorEntity, - final FragmentExtract fragmentExtract) { - final FragmentEntity fragmentEntity = new FragmentEntity(); - fragmentEntity.setAnchor(anchorEntity); - fragmentEntity.setId(fragmentExtract.getId()); - fragmentEntity.setXpath(fragmentExtract.getXpath()); - fragmentEntity.setAttributes(fragmentExtract.getAttributes()); - fragmentEntity.setParentId(fragmentExtract.getParentId()); - fragmentEntity.setChildFragments(new HashSet<>()); - return fragmentEntity; - } - - private static Collection<FragmentEntity> reuniteChildrenWithTheirParents( - final Map<Long, FragmentEntity> fragmentEntityPerId) { - final Collection<FragmentEntity> fragmentEntitiesWithoutParentInResultSet = new HashSet<>(); - for (final FragmentEntity fragmentEntity : fragmentEntityPerId.values()) { - final FragmentEntity parentFragmentEntity = fragmentEntityPerId.get(fragmentEntity.getParentId()); - if (parentFragmentEntity == null) { - fragmentEntitiesWithoutParentInResultSet.add(fragmentEntity); - } else { - parentFragmentEntity.getChildFragments().add(fragmentEntity); - } - } - return fragmentEntitiesWithoutParentInResultSet; - } - -} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java index 02f723029d..e6e250f082 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java @@ -36,7 +36,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -52,8 +51,6 @@ import org.onap.cps.spi.FetchDescendantsOption; import org.onap.cps.spi.entities.AnchorEntity; import org.onap.cps.spi.entities.DataspaceEntity; import org.onap.cps.spi.entities.FragmentEntity; -import org.onap.cps.spi.entities.FragmentEntityArranger; -import org.onap.cps.spi.entities.FragmentExtract; import org.onap.cps.spi.exceptions.AlreadyDefinedException; import org.onap.cps.spi.exceptions.AlreadyDefinedExceptionBatch; import org.onap.cps.spi.exceptions.ConcurrencyException; @@ -248,7 +245,7 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName); final Collection<FragmentEntity> fragmentEntities = getFragmentEntities(anchorEntity, xpaths, fetchDescendantsOption); - return toDataNodes(fragmentEntities, fetchDescendantsOption); + return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); } private Collection<FragmentEntity> getFragmentEntities(final AnchorEntity anchorEntity, @@ -269,19 +266,16 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService normalizedXpaths.addAll(fragmentRepository.findAllXpathByAnchorAndParentIdIsNull(anchorEntity)); } - final List<FragmentExtract> fragmentExtracts = - fragmentRepository.findExtractsWithDescendants(anchorEntity.getId(), normalizedXpaths, - fetchDescendantsOption.getDepth()); + final List<FragmentEntity> fragmentEntities = fragmentRepository.findByAnchorAndXpathIn(anchorEntity, + normalizedXpaths); - return FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts); + return fragmentRepository.prefetchDescendantsOfFragmentEntities(fetchDescendantsOption, fragmentEntities); } private FragmentEntity getFragmentEntity(final AnchorEntity anchorEntity, final String xpath) { final FragmentEntity fragmentEntity; if (isRootXpath(xpath)) { - final List<FragmentExtract> fragmentExtracts = fragmentRepository.findAllExtractsByAnchor(anchorEntity); - fragmentEntity = FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts) - .stream().findFirst().orElse(null); + fragmentEntity = fragmentRepository.findOneByAnchorId(anchorEntity.getId()).orElse(null); } else { fragmentEntity = fragmentRepository.getByAnchorAndXpath(anchorEntity, getNormalizedXpath(xpath)); } @@ -320,8 +314,8 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService fragmentEntities = fragmentRepository.findByAnchorAndXpathIn(anchorEntity, ancestorXpaths); } } - fragmentEntities = prefetchDescendantsForFragmentEntities(fetchDescendantsOption, anchorEntity, - fragmentEntities); + fragmentEntities = fragmentRepository.prefetchDescendantsOfFragmentEntities(fetchDescendantsOption, + fragmentEntities); return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); } @@ -331,31 +325,6 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService return queryDataNodes(dataspaceName, QUERY_ACROSS_ANCHORS, cpsPath, fetchDescendantsOption); } - private Collection<FragmentEntity> prefetchDescendantsForFragmentEntities( - final FetchDescendantsOption fetchDescendantsOption, - final AnchorEntity anchorEntity, - final Collection<FragmentEntity> proxiedFragmentEntities) { - if (FetchDescendantsOption.OMIT_DESCENDANTS.equals(fetchDescendantsOption)) { - return proxiedFragmentEntities; - } - - final List<Long> fragmentEntityIds = proxiedFragmentEntities.stream() - .map(FragmentEntity::getId).collect(Collectors.toList()); - - final List<FragmentExtract> fragmentExtracts = - fragmentRepository.findExtractsWithDescendantsByIds(fragmentEntityIds, fetchDescendantsOption.getDepth()); - - if (anchorEntity == ALL_ANCHORS) { - final Collection<Long> anchorIds = fragmentExtracts.stream() - .map(FragmentExtract::getAnchorId).collect(Collectors.toSet()); - final List<AnchorEntity> anchorEntities = anchorRepository.findAllById(anchorIds); - final Map<Long, AnchorEntity> anchorEntityPerId = anchorEntities.stream() - .collect(Collectors.toMap(AnchorEntity::getId, Function.identity())); - return FragmentEntityArranger.toFragmentEntityTreesAcrossAnchors(anchorEntityPerId, fragmentExtracts); - } - return FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts); - } - private List<DataNode> createDataNodesFromFragmentEntities(final FetchDescendantsOption fetchDescendantsOption, final Collection<FragmentEntity> fragmentEntities) { final List<DataNode> dataNodes = new ArrayList<>(fragmentEntities.size()); @@ -422,15 +391,6 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService .withChildDataNodes(childDataNodes).build(); } - private Collection<DataNode> toDataNodes(final Collection<FragmentEntity> fragmentEntities, - final FetchDescendantsOption fetchDescendantsOption) { - final Collection<DataNode> dataNodes = new ArrayList<>(fragmentEntities.size()); - for (final FragmentEntity fragmentEntity : fragmentEntities) { - dataNodes.add(toDataNode(fragmentEntity, fetchDescendantsOption)); - } - return dataNodes; - } - private List<DataNode> getChildDataNodes(final FragmentEntity fragmentEntity, final FetchDescendantsOption fetchDescendantsOption) { if (fetchDescendantsOption.hasNext()) { diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentExtract.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepository.java index 50be3c7b7a..2460db869a 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentExtract.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepository.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation. + * Copyright (C) 2023 Nordix Foundation. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,17 +18,14 @@ * ============LICENSE_END========================================================= */ -package org.onap.cps.spi.entities; +package org.onap.cps.spi.repository; -public interface FragmentExtract { +import java.util.Collection; +import org.onap.cps.spi.FetchDescendantsOption; +import org.onap.cps.spi.entities.FragmentEntity; - Long getId(); - - Long getAnchorId(); - - String getXpath(); - - Long getParentId(); - - String getAttributes(); +public interface FragmentPrefetchRepository { + Collection<FragmentEntity> prefetchDescendantsOfFragmentEntities( + final FetchDescendantsOption fetchDescendantsOption, + final Collection<FragmentEntity> proxiedFragmentEntities); } diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java new file mode 100644 index 0000000000..4f056c8f6e --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentPrefetchRepositoryImpl.java @@ -0,0 +1,127 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.repository; + +import java.sql.Connection; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import lombok.RequiredArgsConstructor; +import org.onap.cps.spi.FetchDescendantsOption; +import org.onap.cps.spi.entities.AnchorEntity; +import org.onap.cps.spi.entities.FragmentEntity; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.PreparedStatementSetter; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Repository; + +@Repository +@RequiredArgsConstructor +public class FragmentPrefetchRepositoryImpl implements FragmentPrefetchRepository { + + private final JdbcTemplate jdbcTemplate; + + @Override + public Collection<FragmentEntity> prefetchDescendantsOfFragmentEntities( + final FetchDescendantsOption fetchDescendantsOption, + final Collection<FragmentEntity> proxiedFragmentEntities) { + + if (FetchDescendantsOption.OMIT_DESCENDANTS.equals(fetchDescendantsOption)) { + return proxiedFragmentEntities; + } + + final List<Long> fragmentEntityIds = proxiedFragmentEntities.stream() + .map(FragmentEntity::getId).collect(Collectors.toList()); + + final Map<Long, AnchorEntity> anchorEntityPerId = proxiedFragmentEntities.stream() + .map(FragmentEntity::getAnchor) + .collect(Collectors.toMap(AnchorEntity::getId, anchor -> anchor, (anchor1, anchor2) -> anchor1)); + + final int maxDepth = fetchDescendantsOption.equals(FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + ? Integer.MAX_VALUE + : fetchDescendantsOption.getDepth(); + return findFragmentEntitiesWithDescendantsByIds(fragmentEntityIds, anchorEntityPerId, maxDepth); + } + + private Collection<FragmentEntity> findFragmentEntitiesWithDescendantsByIds( + final Collection<Long> fragmentEntityIds, + final Map<Long, AnchorEntity> anchorEntityPerId, + final int maxDepth) { + final String sql + = "WITH RECURSIVE parent_search AS (" + + " SELECT id, 0 AS depth " + + " FROM fragment " + + " WHERE id = ANY (?) " + + " UNION " + + " SELECT child.id, depth + 1 " + + " FROM fragment child INNER JOIN parent_search parent ON child.parent_id = parent.id" + + " WHERE depth < ?" + + ") " + + "SELECT fragment.id, anchor_id AS anchorId, xpath, parent_id AS parentId, " + + " CAST(attributes AS TEXT) AS attributes " + + "FROM fragment INNER JOIN parent_search ON fragment.id = parent_search.id"; + + final PreparedStatementSetter preparedStatementSetter = preparedStatement -> { + final Connection connection = preparedStatement.getConnection(); + final java.sql.Array idArray = connection.createArrayOf("bigint", fragmentEntityIds.toArray()); + preparedStatement.setArray(1, idArray); + preparedStatement.setInt(2, maxDepth); + }; + + final RowMapper<FragmentEntity> fragmentEntityRowMapper = (resultSet, rowNum) -> { + final FragmentEntity fragmentEntity = new FragmentEntity(); + fragmentEntity.setId(resultSet.getLong("id")); + fragmentEntity.setXpath(resultSet.getString("xpath")); + fragmentEntity.setParentId(resultSet.getLong("parentId")); + fragmentEntity.setAttributes(resultSet.getString("attributes")); + fragmentEntity.setAnchor(anchorEntityPerId.get(resultSet.getLong("anchorId"))); + fragmentEntity.setChildFragments(new HashSet<>()); + return fragmentEntity; + }; + + final Map<Long, FragmentEntity> fragmentEntityPerId; + try (final Stream<FragmentEntity> fragmentEntityStream = jdbcTemplate.queryForStream(sql, + preparedStatementSetter, fragmentEntityRowMapper)) { + fragmentEntityPerId = fragmentEntityStream.collect( + Collectors.toMap(FragmentEntity::getId, Function.identity())); + } + return reuniteChildrenWithTheirParents(fragmentEntityPerId); + } + + private static Collection<FragmentEntity> reuniteChildrenWithTheirParents( + final Map<Long, FragmentEntity> fragmentEntityPerId) { + final Collection<FragmentEntity> fragmentEntitiesWithoutParent = new HashSet<>(); + for (final FragmentEntity fragmentEntity : fragmentEntityPerId.values()) { + final FragmentEntity parentFragmentEntity = fragmentEntityPerId.get(fragmentEntity.getParentId()); + if (parentFragmentEntity == null) { + fragmentEntitiesWithoutParent.add(fragmentEntity); + } else { + parentFragmentEntity.getChildFragments().add(fragmentEntity); + } + } + return fragmentEntitiesWithoutParent; + } + +} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java index 82c422f6fd..03de95eb8d 100755 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java @@ -29,7 +29,6 @@ import java.util.Optional; import org.onap.cps.spi.entities.AnchorEntity;
import org.onap.cps.spi.entities.DataspaceEntity;
import org.onap.cps.spi.entities.FragmentEntity;
-import org.onap.cps.spi.entities.FragmentExtract;
import org.onap.cps.spi.exceptions.DataNodeNotFoundException;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
@@ -38,7 +37,8 @@ import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository;
@Repository
-public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, FragmentRepositoryCpsPathQuery {
+public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, FragmentRepositoryCpsPathQuery,
+ FragmentPrefetchRepository {
Optional<FragmentEntity> findByAnchorAndXpath(AnchorEntity anchorEntity, String xpath);
@@ -47,7 +47,10 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, new DataNodeNotFoundException(anchorEntity.getDataspace().getName(), anchorEntity.getName(), xpath));
}
- List<FragmentEntity> findByAnchorIdAndXpathIn(long anchorId, String[] xpaths);
+ @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths)",
+ nativeQuery = true)
+ List<FragmentEntity> findByAnchorIdAndXpathIn(@Param("anchorId") long anchorId,
+ @Param("xpaths") String[] xpaths);
default List<FragmentEntity> findByAnchorAndXpathIn(final AnchorEntity anchorEntity,
final Collection<String> xpaths) {
@@ -66,8 +69,8 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, boolean existsByAnchorId(long anchorId);
- @Query("SELECT f FROM FragmentEntity f WHERE anchor = :anchor")
- List<FragmentExtract> findAllExtractsByAnchor(@Param("anchor") AnchorEntity anchorEntity);
+ @Query(value = "SELECT * FROM fragment WHERE anchor_id = :anchorId LIMIT 1", nativeQuery = true)
+ Optional<FragmentEntity> findOneByAnchorId(@Param("anchorId") long anchorId);
@Modifying
@Query(value = "DELETE FROM fragment WHERE anchor_id = ANY (:anchorIds)", nativeQuery = true)
@@ -111,48 +114,4 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, @Query("SELECT xpath FROM FragmentEntity WHERE anchor = :anchor AND parentId IS NULL")
List<String> findAllXpathByAnchorAndParentIdIsNull(@Param("anchor") AnchorEntity anchorEntity);
- @Query(value
- = "WITH RECURSIVE parent_search AS ("
- + " SELECT id, 0 AS depth "
- + " FROM fragment "
- + " WHERE anchor_id = :anchorId AND xpath = ANY (:xpaths) "
- + " UNION "
- + " SELECT c.id, depth + 1 "
- + " FROM fragment c INNER JOIN parent_search p ON c.parent_id = p.id"
- + " WHERE depth < (SELECT CASE WHEN :maxDepth = -1 THEN " + Integer.MAX_VALUE + " ELSE :maxDepth END) "
- + ") "
- + "SELECT f.id, anchor_id AS anchorId, xpath, f.parent_id AS parentId, CAST(attributes AS TEXT) AS attributes "
- + "FROM fragment f INNER JOIN parent_search p ON f.id = p.id",
- nativeQuery = true
- )
- List<FragmentExtract> findExtractsWithDescendants(@Param("anchorId") long anchorId,
- @Param("xpaths") String[] xpaths,
- @Param("maxDepth") int maxDepth);
-
- default List<FragmentExtract> findExtractsWithDescendants(final long anchorId, final Collection<String> xpaths,
- final int maxDepth) {
- return findExtractsWithDescendants(anchorId, xpaths.toArray(new String[0]), maxDepth);
- }
-
- @Query(value
- = "WITH RECURSIVE parent_search AS ("
- + " SELECT id, 0 AS depth "
- + " FROM fragment "
- + " WHERE id = ANY (:ids) "
- + " UNION "
- + " SELECT c.id, depth + 1 "
- + " FROM fragment c INNER JOIN parent_search p ON c.parent_id = p.id"
- + " WHERE depth < (SELECT CASE WHEN :maxDepth = -1 THEN " + Integer.MAX_VALUE + " ELSE :maxDepth END) "
- + ") "
- + "SELECT f.id, anchor_id AS anchorId, xpath, f.parent_id AS parentId, CAST(attributes AS TEXT) AS attributes "
- + "FROM fragment f INNER JOIN parent_search p ON f.id = p.id",
- nativeQuery = true
- )
- List<FragmentExtract> findExtractsWithDescendantsByIds(@Param("ids") long[] ids,
- @Param("maxDepth") int maxDepth);
-
- default List<FragmentExtract> findExtractsWithDescendantsByIds(final Collection<Long> ids, final int maxDepth) {
- return findExtractsWithDescendantsByIds(ids.stream().mapToLong(id -> id).toArray(), maxDepth);
- }
-
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy index e8921b3ed0..cb554faee8 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy @@ -26,7 +26,7 @@ import org.onap.cps.spi.FetchDescendantsOption import org.onap.cps.spi.entities.AnchorEntity import org.onap.cps.spi.entities.DataspaceEntity import org.onap.cps.spi.entities.FragmentEntity -import org.onap.cps.spi.entities.FragmentExtract + import org.onap.cps.spi.exceptions.ConcurrencyException import org.onap.cps.spi.exceptions.DataValidationException import org.onap.cps.spi.model.DataNode @@ -55,6 +55,7 @@ class CpsDataPersistenceServiceSpec extends Specification { def setup() { mockAnchorRepository.getByDataspaceAndName(_, _) >> anchorEntity + mockFragmentRepository.prefetchDescendantsOfFragmentEntities(_, _) >> { fetchDescendantsOption, fragmentEntities -> fragmentEntities } } def 'Storing data nodes individually when batch operation fails'(){ @@ -93,20 +94,20 @@ class CpsDataPersistenceServiceSpec extends Specification { def 'Batch update data node leaves and descendants: #scenario'(){ given: 'the fragment repository returns fragment entities related to the xpath inputs' - mockFragmentRepository.findExtractsWithDescendants(_, [] as Set, _) >> [] - mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath'] as Set, _) >> [ - mockFragmentExtract(1, null, 123, '/test/xpath', "{\"id\":\"testId1\"}") + mockFragmentRepository.findByAnchorAndXpathIn(_, [] as Set) >> [] + mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath'] as Set) >> [ + new FragmentEntity(1, '/test/xpath', null, "{\"id\":\"testId\"}", anchorEntity, [] as Set) ] - mockFragmentRepository.findExtractsWithDescendants(123, ['/test/xpath1', '/test/xpath2'] as Set, _) >> [ - mockFragmentExtract(1, null, 123, '/test/xpath1', "{\"id\":\"testId1\"}"), - mockFragmentExtract(2, null, 123, '/test/xpath2', "{\"id\":\"testId1\"}") + mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath1', '/test/xpath2'] as Set) >> [ + new FragmentEntity(1, '/test/xpath1', null, "{\"id\":\"testId1\"}", anchorEntity, [] as Set), + new FragmentEntity(2, '/test/xpath2', null, "{\"id\":\"testId2\"}", anchorEntity, [] as Set) ] when: 'replace data node tree' objectUnderTest.batchUpdateDataLeaves('dataspaceName', 'anchorName', dataNodes.stream().collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves))) then: 'call fragment repository save all method' 1 * mockFragmentRepository.saveAll({fragmentEntities -> - assert fragmentEntities as List == expectedFragmentEntities + assert fragmentEntities.sort() == expectedFragmentEntities.sort() assert fragmentEntities.size() == expectedSize }) where: 'the following Data Type is passed' @@ -172,9 +173,9 @@ class CpsDataPersistenceServiceSpec extends Specification { def 'Retrieving multiple data nodes.'() { given: 'fragment repository returns a collection of fragments' - mockFragmentRepository.findExtractsWithDescendants(123, ['/xpath1', '/xpath2'] as Set, _) >> [ - mockFragmentExtract(1, null, 123, '/xpath1', null), - mockFragmentExtract(2, null, 123, '/xpath2', null) + mockFragmentRepository.findByAnchorAndXpathIn(anchorEntity, ['/xpath1', '/xpath2'] as Set) >> [ + new FragmentEntity(1, '/xpath1', null, null, anchorEntity, [] as Set), + new FragmentEntity(2, '/xpath2', null, null, anchorEntity, [] as Set) ] when: 'getting data nodes for 2 xpaths' def result = objectUnderTest.getDataNodesForMultipleXpaths('some-dataspace', 'some-anchor', ['/xpath1', '/xpath2'], FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) @@ -207,9 +208,9 @@ class CpsDataPersistenceServiceSpec extends Specification { def 'Replace data node and descendants: #scenario'(){ given: 'the fragment repository returns fragment entities related to the xpath inputs' - mockFragmentRepository.findExtractsWithDescendants(_, [] as Set, _) >> [] - mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath'] as Set, _) >> [ - mockFragmentExtract(1, null, 123, '/test/xpath', null) + mockFragmentRepository.findByAnchorAndXpathIn(_, [] as Set) >> [] + mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath'] as Set) >> [ + new FragmentEntity(1, '/test/xpath', null, '{"id":"testId"}', anchorEntity, [] as Set) ] when: 'replace data node tree' objectUnderTest.updateDataNodesAndDescendants('dataspaceName', 'anchorName', dataNodes) @@ -223,9 +224,9 @@ class CpsDataPersistenceServiceSpec extends Specification { def 'Replace data nodes and descendants'() { given: 'the fragment repository returns fragment entities related to the xpath inputs' - mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath1', '/test/xpath2'] as Set, _) >> [ - mockFragmentExtract(1, null, 123, '/test/xpath1', null), - mockFragmentExtract(2, null, 123, '/test/xpath2', null) + mockFragmentRepository.findByAnchorAndXpathIn(_, ['/test/xpath1', '/test/xpath2'] as Set) >> [ + new FragmentEntity(1, '/test/xpath1', null, null, anchorEntity, [] as Set), + new FragmentEntity(2, '/test/xpath2', null, null, anchorEntity, [] as Set) ] and: 'some data nodes with descendants' def dataNode1 = new DataNode(xpath: '/test/xpath1', leaves: ['id': 'testId1'], childDataNodes: [new DataNode(xpath: '/test/xpath1/child', leaves: ['id': 'childTestId1'])]) @@ -253,38 +254,27 @@ class CpsDataPersistenceServiceSpec extends Specification { def createDataNodesAndMockRepositoryMethodSupportingThem(Map<String, String> xpathToScenarioMap) { def dataNodes = [] - def fragmentExtracts = [] + def fragmentEntities = [] def fragmentId = 1 xpathToScenarioMap.each { def xpath = it.key def scenario = it.value def dataNode = new DataNodeBuilder().withXpath(xpath).build() dataNodes.add(dataNode) - def fragmentExtract = mockFragmentExtract(fragmentId, null, 123, xpath, null) - fragmentExtracts.add(fragmentExtract) def fragmentEntity = new FragmentEntity(id: fragmentId, anchor: anchorEntity, xpath: xpath, childFragments: []) + fragmentEntities.add(fragmentEntity) if ('EXCEPTION' == scenario) { mockFragmentRepository.save(fragmentEntity) >> { throw new StaleStateException("concurrent updates") } } fragmentId++ } - mockFragmentRepository.findExtractsWithDescendants(_, xpathToScenarioMap.keySet(), _) >> fragmentExtracts + mockFragmentRepository.findByAnchorAndXpathIn(_, xpathToScenarioMap.keySet()) >> fragmentEntities return dataNodes } def mockFragmentWithJson(json) { - def fragmentExtract = mockFragmentExtract(456, null, 123, '/parent-01', json) - mockFragmentRepository.findExtractsWithDescendants(123, ['/parent-01'] as Set, _) >> [fragmentExtract] - } - - def mockFragmentExtract(id, parentId, anchorId, xpath, attributes) { - def fragmentExtract = Mock(FragmentExtract) - fragmentExtract.getId() >> id - fragmentExtract.getParentId() >> parentId - fragmentExtract.getAnchorId() >> anchorId - fragmentExtract.getXpath() >> xpath - fragmentExtract.getAttributes() >> attributes - return fragmentExtract + def fragmentEntity = new FragmentEntity(456, '/parent-01', null, json, anchorEntity, [] as Set) + mockFragmentRepository.findByAnchorAndXpathIn(_, ['/parent-01'] as Set) >> [fragmentEntity] } } diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy index 65d63dfe3b..2e4dba2e9b 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceConcurrencySpec.groovy @@ -20,81 +20,126 @@ */ package org.onap.cps.spi.impl - import org.hibernate.exception.ConstraintViolationException +import org.onap.cps.spi.CpsAdminPersistenceService import org.onap.cps.spi.CpsModulePersistenceService import org.onap.cps.spi.entities.DataspaceEntity +import org.onap.cps.spi.entities.SchemaSetEntity import org.onap.cps.spi.exceptions.DuplicatedYangResourceException import org.onap.cps.spi.model.ModuleReference import org.onap.cps.spi.repository.DataspaceRepository +import org.onap.cps.spi.repository.ModuleReferenceRepository +import org.onap.cps.spi.repository.SchemaSetRepository import org.onap.cps.spi.repository.YangResourceRepository import org.spockframework.spring.SpringBean import org.springframework.beans.factory.annotation.Autowired +import org.springframework.boot.test.context.SpringBootTest import org.springframework.dao.DataIntegrityViolationException -import spock.lang.Shared +import org.springframework.retry.annotation.EnableRetry +import spock.lang.Specification import java.sql.SQLException -class CpsModulePersistenceServiceConcurrencySpec extends CpsPersistenceSpecBase { +@SpringBootTest(classes=[CpsModulePersistenceServiceImpl]) +@EnableRetry +class CpsModulePersistenceServiceConcurrencySpec extends Specification { @Autowired CpsModulePersistenceService objectUnderTest @SpringBean - YangResourceRepository yangResourceRepositoryMock = Mock() + DataspaceRepository dataspaceRepository = Mock() + + @SpringBean + YangResourceRepository yangResourceRepository = Mock() + + @SpringBean + SchemaSetRepository schemaSetRepository = Mock() @SpringBean - DataspaceRepository dataspaceRepositoryMock = Mock() + CpsAdminPersistenceService cpsAdminPersistenceService = Mock() - static final String DATASPACE_NAME = 'DATASPACE-001' - static final String SCHEMA_SET_NAME_NEW = 'SCHEMA-SET-NEW' - static final String NEW_RESOURCE_NAME = 'some new resource' - static final String NEW_RESOURCE_CONTENT = 'module stores {\n' + + @SpringBean + ModuleReferenceRepository moduleReferenceRepository = Mock() + + def NEW_RESOURCE_NAME = 'some new resource' + def NEW_RESOURCE_CONTENT = 'module stores {\n' + ' yang-version 1.1;\n' + ' namespace "org:onap:ccsdk:sample";\n' + '}' def newYangResourcesNameToContentMap = [(NEW_RESOURCE_NAME):NEW_RESOURCE_CONTENT] - @Shared - yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539' + def yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539' - @Shared - yangResourceChecksumDbConstraint = 'yang_resource_checksum_key' + def yangResourceChecksumDbConstraint = 'yang_resource_checksum_key' - @Shared - sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum) + def sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum) - @Shared - checksumIntegrityException = - new DataIntegrityViolationException("checksum integrity exception", + def checksumIntegrityException = new DataIntegrityViolationException("checksum integrity exception", new ConstraintViolationException('', new SQLException(sqlExceptionMessage), yangResourceChecksumDbConstraint)) - def 'Store new schema set, retry mechanism'() { + def 'Store new schema set, maximum retries.'() { given: 'no pre-existing schemaset in database' - dataspaceRepositoryMock.getByName(_) >> new DataspaceEntity() - yangResourceRepositoryMock.findAllByChecksumIn(_) >> Collections.emptyList() + dataspaceRepository.getByName(_) >> new DataspaceEntity() + yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() when: 'a new schemaset is stored' - objectUnderTest.storeSchemaSet(DATASPACE_NAME, SCHEMA_SET_NAME_NEW, newYangResourcesNameToContentMap) - then: ' duplicated yang resource exception is thrown ' - def e = thrown(DuplicatedYangResourceException) + objectUnderTest.storeSchemaSet('some dataspace', 'some new schema set', newYangResourcesNameToContentMap) + then: 'a duplicated yang resource exception is thrown ' + thrown(DuplicatedYangResourceException) and: 'the system will attempt to save the data 5 times (because checksum integrity exception is thrown each time)' - 5 * yangResourceRepositoryMock.saveAll(_) >> { throw checksumIntegrityException } + 5 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } + } + + def 'Store new schema set, succeed on third attempt.'() { + given: 'no pre-existing schemaset in database' + dataspaceRepository.getByName(_) >> new DataspaceEntity() + yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() + when: 'a new schemaset is stored' + objectUnderTest.storeSchemaSet('some dataspace', 'some new schema set', newYangResourcesNameToContentMap) + then: 'no exception is thrown ' + noExceptionThrown() + and: 'the system will attempt to save the data 2 times with checksum integrity exception but then succeed' + 2 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } + 1 * yangResourceRepository.saveAll(_) >> [] } - def 'Store schema set using modules, retry mechanism'() { + def 'Store schema set using modules, maximum retries.'() { given: 'map of new modules, a list of existing modules, module reference' def mapOfNewModules = [newModule1: 'module newmodule { yang-version 1.1; revision "2021-10-12" { } }'] def moduleReferenceForExistingModule = new ModuleReference("test","2021-10-12") def listOfExistingModulesModuleReference = [moduleReferenceForExistingModule] and: 'no pre-existing schemaset in database' - dataspaceRepositoryMock.getByName(_) >> new DataspaceEntity() - yangResourceRepositoryMock.findAllByChecksumIn(_) >> Collections.emptyList() + dataspaceRepository.getByName(_) >> new DataspaceEntity() + yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() when: 'a new schemaset is stored from a module' - objectUnderTest.storeSchemaSetFromModules(DATASPACE_NAME, "newSchemaSetName" , mapOfNewModules, listOfExistingModulesModuleReference) - then: ' duplicated yang resource exception is thrown ' - def e = thrown(DuplicatedYangResourceException) + objectUnderTest.storeSchemaSetFromModules('some dataspace', 'some new schema set' , mapOfNewModules, listOfExistingModulesModuleReference) + then: 'a duplicated yang resource exception is thrown ' + thrown(DuplicatedYangResourceException) and: 'the system will attempt to save the data 5 times (because checksum integrity exception is thrown each time)' - 5 * yangResourceRepositoryMock.saveAll(_) >> { throw checksumIntegrityException } + 5 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } + } + + def 'Store schema set using modules, succeed on third attempt.'() { + given: 'map of new modules, a list of existing modules, module reference' + def mapOfNewModules = [newModule1: 'module newmodule { yang-version 1.1; revision "2021-10-12" { } }'] + def moduleReferenceForExistingModule = new ModuleReference("test","2021-10-12") + def listOfExistingModulesModuleReference = [moduleReferenceForExistingModule] + and: 'no pre-existing schemaset in database' + def dataspaceEntity = new DataspaceEntity() + dataspaceRepository.getByName(_) >> new DataspaceEntity() + yangResourceRepository.findAllByChecksumIn(_) >> Collections.emptyList() + yangResourceRepository.getResourceIdsByModuleReferences(_) >> [] + and: 'can retrieve schemaset details after storing it' + def schemaSetEntity = new SchemaSetEntity() + schemaSetRepository.getByDataspaceAndName(dataspaceEntity, 'new schema set') >> schemaSetEntity + when: 'a new schemaset is stored from a module' + objectUnderTest.storeSchemaSetFromModules('some dataspace', 'new schema set' , mapOfNewModules, listOfExistingModulesModuleReference) + then: 'no exception is thrown ' + noExceptionThrown() + and: 'the system will attempt to save the data 2 times with checksum integrity exception but then succeed' + 2 * yangResourceRepository.saveAll(_) >> { throw checksumIntegrityException } + 1 * yangResourceRepository.saveAll(_) >> [] } + } diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy index 5e42ce04e7..52651c6b18 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (c) 2021 Bell Canada. - * Modifications Copyright (C) 2022 Nordix Foundation + * Modifications Copyright (C) 2022-2023 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,7 +28,6 @@ import org.onap.cps.spi.repository.ModuleReferenceRepository import org.onap.cps.spi.repository.SchemaSetRepository import org.onap.cps.spi.repository.YangResourceRepository import org.springframework.dao.DataIntegrityViolationException -import spock.lang.Shared import spock.lang.Specification import java.sql.SQLException @@ -38,17 +37,14 @@ import java.sql.SQLException */ class CpsModulePersistenceServiceSpec extends Specification { - // Instance to test CpsModulePersistenceService objectUnderTest - // Mocks def dataspaceRepositoryMock = Mock(DataspaceRepository) def yangResourceRepositoryMock = Mock(YangResourceRepository) def schemaSetRepositoryMock = Mock(SchemaSetRepository) def cpsAdminPersistenceServiceMock = Mock(CpsAdminPersistenceService) def moduleReferenceRepositoryMock = Mock(ModuleReferenceRepository) - // Constants def yangResourceName = 'my-yang-resource-name' def yangResourceContent = 'module stores {\n' + ' yang-version 1.1;\n' + @@ -62,17 +58,14 @@ class CpsModulePersistenceServiceSpec extends Specification { ' }' + '}' - // Scenario data static yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539' static yangResourceChecksumDbConstraint = 'yang_resource_checksum_key' static sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum) - static checksumIntegrityException = new DataIntegrityViolationException( - "checksum integrity exception", + static checksumIntegrityException = new DataIntegrityViolationException('checksum integrity exception', new ConstraintViolationException('', new SQLException(sqlExceptionMessage), yangResourceChecksumDbConstraint)) - static checksumIntegrityExceptionWithoutChecksum = new DataIntegrityViolationException( - "checksum integrity exception", + static checksumIntegrityExceptionWithoutChecksum = new DataIntegrityViolationException('checksum integrity exception', new ConstraintViolationException('', new SQLException('no checksum'), yangResourceChecksumDbConstraint)) - static anotherIntegrityException = new DataIntegrityViolationException("another integrity exception") + static otherIntegrityException = new DataIntegrityViolationException('another integrity exception') def setup() { objectUnderTest = new CpsModulePersistenceServiceImpl(yangResourceRepositoryMock, schemaSetRepositoryMock, @@ -94,7 +87,7 @@ class CpsModulePersistenceServiceSpec extends Specification { scenario | dbException || expectedThrownException | expectedThrownExceptionMessage 'checksum data failure' | checksumIntegrityException || DuplicatedYangResourceException | yangResourceChecksum 'checksum failure without checksum' | checksumIntegrityExceptionWithoutChecksum || DuplicatedYangResourceException | 'no checksum found' - 'other data failure' | anotherIntegrityException || DataIntegrityViolationException | 'another integrity exception' + 'other data failure' | otherIntegrityException || DataIntegrityViolationException | 'another integrity exception' } } diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistenceSpecBase.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistenceSpecBase.groovy deleted file mode 100644 index 34a040e604..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistenceSpecBase.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2021-2022 Nordix Foundation - * Modifications Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2021 Bell Canada. - * Modifications Copyright (C) 2023 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.impl - -import com.fasterxml.jackson.databind.ObjectMapper -import org.onap.cps.DatabaseTestContainer -import org.onap.cps.spi.repository.AnchorRepository -import org.onap.cps.spi.repository.DataspaceRepository -import org.onap.cps.spi.repository.FragmentRepository -import org.onap.cps.spi.repository.YangResourceRepository -import org.onap.cps.utils.JsonObjectMapper -import org.spockframework.spring.SpringBean -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.boot.test.context.SpringBootTest -import org.testcontainers.spock.Testcontainers -import spock.lang.Shared -import spock.lang.Specification - -@SpringBootTest -@Testcontainers -class CpsPersistenceSpecBase extends Specification { - - @Shared - DatabaseTestContainer databaseTestContainer = DatabaseTestContainer.getInstance() - - @Autowired - DataspaceRepository dataspaceRepository - - @Autowired - YangResourceRepository yangResourceRepository - - @Autowired - AnchorRepository anchorRepository - - @Autowired - FragmentRepository fragmentRepository - - @SpringBean - JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) - - protected static final String CLEAR_DATA = '/data/clear-all.sql' - - static def DATASPACE_NAME = 'DATASPACE-001' - static def SCHEMA_SET_NAME1 = 'SCHEMA-SET-001' - static def SCHEMA_SET_NAME2 = 'SCHEMA-SET-002' - static def ANCHOR_NAME1 = 'ANCHOR-001' - static def ANCHOR_NAME2 = 'ANCHOR-002' - static def ANCHOR_NAME3 = 'ANCHOR-003' - static def ANCHOR_FOR_DATA_NODES_WITH_LEAVES = 'ANCHOR-003' - static def ANCHOR_FOR_SHOP_EXAMPLE = 'ANCHOR-004' - static def ANCHOR_HAVING_SINGLE_TOP_LEVEL_FRAGMENT = 'ANCHOR-005' - static def ANCHOR_WITH_MULTIPLE_TOP_LEVEL_FRAGMENTS = 'ANCHOR-006' -} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy deleted file mode 100644 index ceb9dd4cf3..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.utils - -import org.onap.cps.spi.config.CpsSessionFactory -import org.onap.cps.spi.exceptions.SessionManagerException -import org.onap.cps.spi.impl.CpsPersistenceSpecBase -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.test.context.jdbc.Sql - -class SessionManagerIntegrationSpec extends CpsPersistenceSpecBase{ - - final static String SET_DATA = '/data/anchor.sql' - - @Autowired - SessionManager objectUnderTest - - @Autowired - CpsSessionFactory cpsSessionFactory - - def sessionId - def shortTimeoutForTesting = 300L - - def setup(){ - sessionId = objectUnderTest.startSession() - } - - def cleanup(){ - objectUnderTest.closeSession(sessionId, objectUnderTest.WITH_COMMIT) - } - - @Sql([CLEAR_DATA, SET_DATA]) - def 'Lock anchor.'(){ - when: 'session tries to acquire anchor lock by passing anchor entity details' - objectUnderTest.lockAnchor(sessionId, DATASPACE_NAME, ANCHOR_NAME1, shortTimeoutForTesting) - then: 'no exception is thrown' - noExceptionThrown() - } - - @Sql([CLEAR_DATA, SET_DATA]) - def 'Attempt to lock anchor when another session is holding the lock.'(){ - given: 'another session that holds an anchor lock' - def otherSessionId = objectUnderTest.startSession() - objectUnderTest.lockAnchor(otherSessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting) - when: 'a session tries to acquire the same anchor lock' - objectUnderTest.lockAnchor(sessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting) - then: 'a session manager exception is thrown specifying operation reached timeout' - def thrown = thrown(SessionManagerException) - thrown.message.contains('Timeout') - then: 'when the other session holding the lock is closed, lock can finally be acquired' - objectUnderTest.closeSession(otherSessionId, objectUnderTest.WITH_COMMIT) - objectUnderTest.lockAnchor(sessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting) - } - - @Sql([CLEAR_DATA, SET_DATA]) - def 'Lock anchor twice using the same session.'(){ - given: 'session that already holds an anchor lock' - objectUnderTest.lockAnchor(sessionId, DATASPACE_NAME, ANCHOR_NAME1, shortTimeoutForTesting) - when: 'same session tries to acquire same anchor lock' - objectUnderTest.lockAnchor(sessionId, DATASPACE_NAME, ANCHOR_NAME1, shortTimeoutForTesting) - then: 'no exception is thrown' - noExceptionThrown() - } - -} diff --git a/cps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java b/cps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java deleted file mode 100755 index 61a5c042a6..0000000000 --- a/cps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020 Pantheon.tech - * Modifications Copyright (C) 2022 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps; - -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.DockerImageName; - -/** - * The Postgresql database test container wrapper. - * Singleton implementation allows saving time on database initialization which otherwise would occur on each test. - * for debugging/developing purposes you can suspend any test and connect to this database: - * docker exec -it {container-id} sh - * psql -d test -U test - */ -public class DatabaseTestContainer extends PostgreSQLContainer<DatabaseTestContainer> { - private static final String IMAGE_VERSION = "registry.nordix.org/onaptest/postgres:14.1"; - private static DatabaseTestContainer databaseTestContainer; - - private DatabaseTestContainer() { - super(DockerImageName.parse(IMAGE_VERSION).asCompatibleSubstituteFor("postgres")); - } - - /** - * Provides an instance of test container wrapper. - * The returned value expected to be assigned to static variable annotated with @ClassRule. - * This will allow to initialize DB connection env variables before DataSource object - * is initialized by Spring framework. - * - */ - public static DatabaseTestContainer getInstance() { - if (databaseTestContainer == null) { - databaseTestContainer = new DatabaseTestContainer(); - Runtime.getRuntime().addShutdownHook(new Thread(databaseTestContainer::terminate)); - } - return databaseTestContainer; - } - - @Override - public void start() { - super.start(); - System.setProperty("DB_URL", databaseTestContainer.getJdbcUrl()); - System.setProperty("DB_USERNAME", databaseTestContainer.getUsername()); - System.setProperty("DB_PASSWORD", databaseTestContainer.getPassword()); - } - - @Override - public void stop() { - // do nothing on test completion, image removal will be performed via terminate() on JVM shutdown - } - - private void terminate() { - super.stop(); - } -} diff --git a/cps-ri/src/test/java/org/onap/cps/TestApplication.java b/cps-ri/src/test/java/org/onap/cps/TestApplication.java deleted file mode 100644 index 075a241fc7..0000000000 --- a/cps-ri/src/test/java/org/onap/cps/TestApplication.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2020 Pantheon.tech - * Modifications Copyright (C) 2021 Bell Canada. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps; - -import org.springframework.boot.autoconfigure.SpringBootApplication; -import org.springframework.retry.annotation.EnableRetry; - -/** - * The @SpringBootApplication annotated class is required in order to run tests - * marked with @SpringBootTest annotation. - */ -@SpringBootApplication(scanBasePackages = "org.onap.cps.spi") -@EnableRetry -public class TestApplication { -} diff --git a/cps-ri/src/test/resources/application.yml b/cps-ri/src/test/resources/application.yml deleted file mode 100644 index 4f40aeaa06..0000000000 --- a/cps-ri/src/test/resources/application.yml +++ /dev/null @@ -1,38 +0,0 @@ -# ============LICENSE_START======================================================= -# Copyright (C) 2021 Pantheon.tech -# Modifications Copyright (C) 2022 Nordix Foundation. -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# SPDX-License-Identifier: Apache-2.0 -# ============LICENSE_END========================================================= - -spring: - jpa: - ddl-auto: create - show-sql: false - properties: - hibernate: - enable_lazy_load_no_trans: true - dialect: org.hibernate.dialect.PostgreSQLDialect - format_sql: true - show_sql: false - - datasource: - url: ${DB_URL} - username: ${DB_USERNAME} - password: ${DB_PASSWORD} - driverClassName: org.postgresql.Driver - initialization-mode: always - - liquibase: - change-log: classpath:changelog/changelog-master.yaml diff --git a/cps-ri/src/test/resources/data/anchor.sql b/cps-ri/src/test/resources/data/anchor.sql deleted file mode 100644 index a15d5aed21..0000000000 --- a/cps-ri/src/test/resources/data/anchor.sql +++ /dev/null @@ -1,39 +0,0 @@ -/* - ============LICENSE_START======================================================= - Copyright (C) 2020 Pantheon.tech - Modifications Copyright (C) 2020-2023 Nordix Foundation. - Modifications Copyright (C) 2021-2022 Bell Canada. - ================================================================================ - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - SPDX-License-Identifier: Apache-2.0 - ============LICENSE_END========================================================= -*/ - -INSERT INTO DATASPACE (ID, NAME) VALUES - (1001, 'DATASPACE-001'), - (1002, 'DATASPACE-002-NO-DATA'), - (1003, 'DATASPACE-003'); - -INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES - (2001, 'SCHEMA-SET-001', 1001), - (2002, 'SCHEMA-SET-002', 1001), - (2003, 'SCHEMA-SET-002-NO-ANCHORS', 1003); - -INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES - (3001, 'ANCHOR-001', 1001, 2001), - (3002, 'ANCHOR-002', 1001, 2002), - (3003, 'ANCHOR-003', 1001, 2002); - -INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES - (4001, 3001, null, '/xpath', '{}'); diff --git a/cps-ri/src/test/resources/data/anchors-schemaset-modules.sql b/cps-ri/src/test/resources/data/anchors-schemaset-modules.sql deleted file mode 100644 index 65b3a48ca5..0000000000 --- a/cps-ri/src/test/resources/data/anchors-schemaset-modules.sql +++ /dev/null @@ -1,49 +0,0 @@ -/* - ============LICENSE_START======================================================= - Copyright (C) 2021-2022 Nordix Foundation. - ================================================================================ - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - SPDX-License-Identifier: Apache-2.0 - ============LICENSE_END========================================================= -*/ - -INSERT INTO DATASPACE (ID, NAME) VALUES - (1001, 'dataspace-1'), (1002, 'dataspace-2'); - -INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES - (2001, 'schema-set-1', 1001), - (2002, 'schema-set-2', 1001), - (2003, 'schema-set-3', 1001), - (2004, 'schema-set-4', 1002); - -INSERT INTO YANG_RESOURCE (ID, FILE_NAME, CONTENT, CHECKSUM, MODULE_NAME, REVISION) VALUES - (3001, 'module1@revA.yang', 'some-content', 'checksum1','module-name-1','revA'), - (3002, 'module2@revA.yang', 'some-content', 'checksum2','module-name-2','revA'), - (3003, 'module2@revB.yang', 'some-content', 'checksum3','module-name-2','revB'), - (3004, 'module3@revA.yang', 'some-content', 'checksum4','module-name-3','revA'); - -INSERT INTO SCHEMA_SET_YANG_RESOURCES (SCHEMA_SET_ID, YANG_RESOURCE_ID) VALUES - (2001, 3001), --schema-set-1(anchor-1) has modules module1@revA, module2@revA - (2001, 3002), - (2002, 3001), --schema-set-2(anchor-2) has modules module1@revA, module2@revB - (2002, 3003), - (2003, 3002), --schema-set-3(anchor-3) has modules module2@revA, module2@revB - (2003, 3003), - (2004, 3001); --schema-set-4(anchor-4) has module module1@revA but in other dataspace - -INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES - (6001, 'anchor-1', 1001, 2001), - (6002, 'anchor-2', 1001, 2002), - (6003, 'anchor-3', 1001, 2003), - (6005, 'anchor-4', 1002, 2004); diff --git a/cps-ri/src/test/resources/data/clear-all.sql b/cps-ri/src/test/resources/data/clear-all.sql deleted file mode 100644 index 07c8a7aab5..0000000000 --- a/cps-ri/src/test/resources/data/clear-all.sql +++ /dev/null @@ -1,28 +0,0 @@ -/* - ============LICENSE_START======================================================= - Copyright (C) 2020-2021 Pantheon.tech - Modifications Copyright (C) 2020,2022 Nordix Foundation. - Modifications Copyright (C) 2020 Bell Canada. - ================================================================================ - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - SPDX-License-Identifier: Apache-2.0 - ============LICENSE_END========================================================= -*/ - -DELETE FROM FRAGMENT; -DELETE FROM ANCHOR; -DELETE FROM DATASPACE; -DELETE FROM YANG_RESOURCE --- following tables are cleared by CASCADE constraint: SCHEMA_SET, SCHEMA_SET_YANG_RESOURCES - diff --git a/cps-ri/src/test/resources/data/fragment.sql b/cps-ri/src/test/resources/data/fragment.sql deleted file mode 100755 index 4980073433..0000000000 --- a/cps-ri/src/test/resources/data/fragment.sql +++ /dev/null @@ -1,116 +0,0 @@ -/* - ============LICENSE_START======================================================= - Copyright (C) 2021-2023 Nordix Foundation. - Modifications Copyright (C) 2021 Pantheon.tech - Modifications Copyright (C) 2021-2022 Bell Canada. - ================================================================================ - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - SPDX-License-Identifier: Apache-2.0 - ============LICENSE_END========================================================= -*/ - -INSERT INTO DATASPACE (ID, NAME) VALUES - (1001, 'DATASPACE-001'), - (1002, 'NCMP-Admin'); - -INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES - (2001, 'SCHEMA-SET-001', 1001); - -INSERT INTO YANG_RESOURCE (ID, FILE_NAME, CONTENT, CHECKSUM, MODULE_NAME, REVISION) VALUES - (4001, 'TEST','', 'SAMPLECHECKSUM','TESTMODULENAME', 'SAMPLEREVISION'); - -UPDATE YANG_RESOURCE SET -content = 'module stores { - yang-version 1.1; - namespace "org:onap:ccsdk:sample"; - - prefix book-store; - - revision "2020-09-15" { - description - "Sample Model"; - } - } -' -where ID = 4001; - -INSERT INTO SCHEMA_SET_YANG_RESOURCES (SCHEMA_SET_ID, YANG_RESOURCE_ID) VALUES - (2001, 4001); - -INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES - (3001, 'ANCHOR-001', 1001, 2001), - (3003, 'ANCHOR-003', 1001, 2001), - (3004, 'ncmp-dmi-registry', 1002, 2001), - (3005, 'ANCHOR-005', 1001, 2001), - (3006, 'ANCHOR-006', 1001, 2001); - -INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH) VALUES - (4001, 3001, null, '/parent-1'), - (4002, 3001, null, '/parent-2'), - (4003, 3001, null, '/parent-3'), - (4004, 3001, 4001, '/parent-1/child-1'), - (4005, 3001, 4002, '/parent-2/child-2'), - (4006, 3001, 4004, '/parent-1/child-1/grandchild-1'); - -INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES - (5009, 3005, null, '/parent-207', '{"parent-leaf": "parent-leaf value"}'), - (5010, 3005, 5009, '/parent-207/child-001', '{"first-child-leaf": "first-child-leaf value"}'), - (5011, 3005, 5009, '/parent-207/child-002', '{"second-child-leaf": "second-child-leaf value"}'), - (5012, 3005, 5011, '/parent-207/child-002/grand-child', '{"grand-child-leaf": "grand-child-leaf value"}'); - -INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES - (5013, 3006, null, '/parent-208', '{"parent-leaf-1": "parent-leaf value-1"}'), - (5014, 3006, 5013, '/parent-208/child-001', '{"first-child-leaf": "first-child-leaf value"}'), - (5015, 3006, 5013, '/parent-208/child-002', '{"second-child-leaf": "second-child-leaf value"}'), - (5016, 3006, 5015, '/parent-208/child-002/grand-child', '{"grand-child-leaf": "grand-child-leaf value"}'), - (5017, 3006, null, '/parent-209', '{"parent-leaf-2": "parent-leaf value-2"}'), - (5018, 3006, 5017, '/parent-209/child-001', '{"first-child-leaf": "first-child-leaf value"}'), - (5019, 3006, 5017, '/parent-209/child-002', '{"second-child-leaf": "second-child-leaf value"}'), - (5020, 3006, 5019, '/parent-209/child-002/grand-child', '{"grand-child-leaf": "grand-child-leaf value"}'); - -INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES - (4201, 3003, null, '/parent-200', '{"leaf-value": "original"}'), - (4202, 3003, 4201, '/parent-200/child-201', '{"leaf-value": "original"}'), - (4203, 3003, 4202, '/parent-200/child-201/grand-child', '{"leaf-value": "original"}'), - (4206, 3003, null, '/parent-201', '{"leaf-value": "original"}'), - (4207, 3003, 4206, '/parent-201/child-203', '{}'), - (4208, 3003, 4206, '/parent-201/child-204[@key=''A'']', '{"key": "A"}'), - (4209, 3003, 4206, '/parent-201/child-204[@key=''B'']', '{"key": "B"}'), - (4211, 3003, null, '/parent-202', '{"leaf-value": "original"}'), - (4212, 3003, 4211, '/parent-202/child-205[@key=''A'' and @key2=''B'']', '{"key": "A", "key2": "B"}'), - (4213, 3003, 4211, '/parent-202/child-206[@key=''A'']', '{"key": "A"}'), - (4214, 3003, null, '/parent-203', '{"leaf-value": "original"}'), - (4215, 3003, 4214, '/parent-203/child-203', '{}'), - (4216, 3003, 4214, '/parent-203/child-204[@key=''A'']', '{"key": "A"}'), - (4217, 3003, 4214, '/parent-203/child-204[@key=''B'']', '{"key": "B"}'), - (4218, 3003, 4217, '/parent-203/child-204[@key=''B'']/grand-child-204[@key2=''Y'']', '{"key": "B", "key2": "Y"}'), - (4226, 3003, null, '/parent-206', '{"leaf-value": "original"}'), - (4227, 3003, 4226, '/parent-206/child-206', '{}'), - (4228, 3003, 4227, '/parent-206/child-206/grand-child-206', '{}'), - (4229, 3003, 4227, '/parent-206/child-206/grand-child-206[@key=''A'']', '{"key": "A"}'), - (4230, 3003, 4227, '/parent-206/child-206/grand-child-206[@key=''X'']', '{"key": "X"}'), - (4231, 3003, null, '/parent-206[@key=''A'']', '{"key": "A"}'), - (4232, 3003, 4231, '/parent-206[@key=''A'']/child-206', '{}'), - (4233, 3003, null, '/parent-206[@key=''B'']', '{"key": "B"}'); - -INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES - (5000, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo'']', '{"id": "PNFDemo", "dmi-service-name": "http://172.21.235.14:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'), - (5001, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo2'']', '{"id": "PNFDemo2", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'), - (5002, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo3'']', '{"id": "PNFDemo3", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'), - (5003, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo4'']', '{"id": "PNFDemo4", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'), - (5004, 3004, 5000, '/dmi-registry/cm-handles[@id=''PNFDemo'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'), - (5005, 3004, 5001, '/dmi-registry/cm-handles[@id=''PNFDemo2'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'), - (5006, 3004, 5002, '/dmi-registry/cm-handles[@id=''PNFDemo3'']/public-properties[@name=''Contact'']', '{"name": "Contact3", "value": "PNF3@bookstore.com"}'), - (5007, 3004, 5003, '/dmi-registry/cm-handles[@id=''PNFDemo4'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'), - (5008, 3004, 5004, '/dmi-registry/cm-handles[@id=''PNFDemo4'']/public-properties[@name=''Contact2'']', '{"name": "Contact2", "value": "newemailforstore2@bookstore.com"}'); diff --git a/cps-ri/src/test/resources/data/perf-test.sql b/cps-ri/src/test/resources/data/perf-test.sql deleted file mode 100644 index 48e8b1fbb5..0000000000 --- a/cps-ri/src/test/resources/data/perf-test.sql +++ /dev/null @@ -1,28 +0,0 @@ -/* - ============LICENSE_START======================================================= - Copyright (C) 2022-2023 Nordix Foundation. - ================================================================================ - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - SPDX-License-Identifier: Apache-2.0 - ============LICENSE_END========================================================= -*/ - -INSERT INTO DATASPACE (ID, NAME) VALUES (9001, 'PERF-DATASPACE'); - -INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES (9002, 'PERF-SCHEMA-SET', 9001); - -INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES (9003, 'PERF-ANCHOR', 9001, 9002); - -INSERT INTO FRAGMENT (ID, ANCHOR_ID, PARENT_ID, XPATH) VALUES (0, 9003, null, '/perf-parent-1'); - diff --git a/cps-ri/src/test/resources/data/schemaset.sql b/cps-ri/src/test/resources/data/schemaset.sql deleted file mode 100644 index e5bf63b701..0000000000 --- a/cps-ri/src/test/resources/data/schemaset.sql +++ /dev/null @@ -1,57 +0,0 @@ -/* - ============LICENSE_START======================================================= - Copyright (C) 2020-2021 Pantheon.tech - Modifications Copyright (C) 2020-2023 Nordix Foundation. - Modifications Copyright (C) 2020-2021 Bell Canada. - ================================================================================ - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - SPDX-License-Identifier: Apache-2.0 - ============LICENSE_END========================================================= -*/ - -INSERT INTO DATASPACE (ID, NAME) VALUES - (1001, 'DATASPACE-001'), (1002, 'DATASPACE-002'); - -INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES - (2001, 'SCHEMA-SET-001', 1001), - (2002, 'SCHEMA-SET-002', 1001), - (2100, 'SCHEMA-SET-100', 1001), -- for removal, not referenced by anchors - (2101, 'SCHEMA-SET-101', 1001), -- for removal, having anchor and data associated - (2003, 'SCHEMA-SET-003', 1002), - (2004, 'SCHEMA-SET-004', 1002), - (2005, 'SCHEMA-SET-005', 1001); - -INSERT INTO YANG_RESOURCE (ID, FILE_NAME, CONTENT, CHECKSUM, MODULE_NAME, REVISION) VALUES - (3001, 'module1@2020-02-02.yang', 'CONTENT-001', 'e8bdda931099310de66532e08c3fafec391db29f55c81927b168f6aa8f81b73b',null,null), - (3002, 'module2@2020-02-02.yang', 'CONTENT-002', '7e7d48afbe066ed0a890a09081859046d3dde52300dfcdb13be5b20780353a11','MODULE-NAME-002','REVISION-002'), - (3003, 'module3@2020-02-02.yang', 'CONTENT-003', 'ca20c45fec8547633f05ff8905c48ffa7b02b94ec3ad4ed79922e6ba40779df3','MODULE-NAME-003','REVISION-002'), - (3004, 'module4@2020-02-02.yang', 'CONTENT-004', 'f6ed09d343562e4d4ae5140f3c6a55df9c53f6da8e30dda8cbd9eaf9cd449be0','MODULE-NAME-004','REVISION-004'), - (3100, 'orphan@2020-02-02.yang', 'ORPHAN', 'checksum',null,null), -- for auto-removal as orphan - (3005, 'module5@2020-02-02.yang', 'CONTENT-005', 'checksum-005','MODULE-NAME-005','REVISION-002'), - (3006, 'module6@2020-02-02.yang', 'CONTENT-006', 'checksum-006','MODULE-NAME-006','REVISION-006'); - -INSERT INTO SCHEMA_SET_YANG_RESOURCES (SCHEMA_SET_ID, YANG_RESOURCE_ID) VALUES - (2001, 3001), (2001, 3002), - (2002, 3003), (2005, 3004), - (2100, 3003), (2100, 3100), -- orphan removal case - (2101, 3003), (2101, 3004), - (2003, 3005), (2004, 3006); - -INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES -- anchors for removal - (6001, 'ANCHOR1', 1001, 2101), - (6002, 'ANCHOR2', 1001, 2101), - (6003, 'ANCHOR3', 1001, 2005); - -INSERT INTO FRAGMENT (ID, XPATH, ANCHOR_ID) VALUES - (7001, '/XPATH', 6001); diff --git a/cps-ri/src/test/resources/hibernate.cfg.xml b/cps-ri/src/test/resources/hibernate.cfg.xml deleted file mode 100644 index fae9275ddc..0000000000 --- a/cps-ri/src/test/resources/hibernate.cfg.xml +++ /dev/null @@ -1,16 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE hibernate-configuration PUBLIC
- "-//Hibernate/Hibernate Configuration DTD 3.0//EN"
- "http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
-
-<hibernate-configuration>
- <session-factory>
- <property name="hibernate.connection.driver_class">org.postgresql.Driver</property>
- <property name="hibernate.connection.url">${DB_URL}</property>
- <property name="hibernate.connection.username">${DB_USERNAME}</property>
- <property name="hibernate.connection.password">${DB_PASSWORD}</property>
- <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQL82Dialect</property>
- <property name="show_sql">true</property>
- <property name="hibernate.hbm2ddl.auto">none</property>
- </session-factory>
-</hibernate-configuration>
\ No newline at end of file |