summaryrefslogtreecommitdiffstats
path: root/cps-ri
diff options
context:
space:
mode:
Diffstat (limited to 'cps-ri')
-rw-r--r--cps-ri/pom.xml9
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java85
-rwxr-xr-xcps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java3
-rwxr-xr-xcps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java41
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQuery.java29
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java120
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy46
-rwxr-xr-xcps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy436
-rw-r--r--[-rwxr-xr-x]cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy422
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy (renamed from cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceUnitSpec.groovy)2
-rwxr-xr-xcps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java6
-rw-r--r--cps-ri/src/test/resources/data/anchor.sql24
-rw-r--r--cps-ri/src/test/resources/data/clear-all.sql22
-rw-r--r--cps-ri/src/test/resources/data/cps-path-query.sql27
-rwxr-xr-xcps-ri/src/test/resources/data/fragment.sql24
-rw-r--r--cps-ri/src/test/resources/data/schemaset.sql22
16 files changed, 854 insertions, 464 deletions
diff --git a/cps-ri/pom.xml b/cps-ri/pom.xml
index 033b2e211..273cb2e72 100644
--- a/cps-ri/pom.xml
+++ b/cps-ri/pom.xml
@@ -32,6 +32,14 @@
<artifactId>cps-ri</artifactId>
+ <repositories>
+ <repository>
+ <id>nordix-liquibase-repo</id>
+ <name>liquibase</name>
+ <url>https://artifactory.nordix.org/artifactory/liquibase</url>
+ </repository>
+ </repositories>
+
<dependencies>
<dependency>
<groupId>${project.groupId}</groupId>
@@ -78,6 +86,7 @@
<dependency>
<groupId>org.liquibase</groupId>
<artifactId>liquibase-core</artifactId>
+ <version>4.4.2-nordix</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
index 844ad8474..fdbafd4be 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
@@ -37,36 +37,49 @@ import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.transaction.Transactional;
+import lombok.extern.slf4j.Slf4j;
+import org.hibernate.StaleStateException;
import org.onap.cps.cpspath.parser.CpsPathQuery;
-import org.onap.cps.cpspath.parser.CpsPathQueryType;
import org.onap.cps.spi.CpsDataPersistenceService;
import org.onap.cps.spi.FetchDescendantsOption;
import org.onap.cps.spi.entities.AnchorEntity;
import org.onap.cps.spi.entities.DataspaceEntity;
import org.onap.cps.spi.entities.FragmentEntity;
import org.onap.cps.spi.exceptions.AlreadyDefinedException;
+import org.onap.cps.spi.exceptions.ConcurrencyException;
import org.onap.cps.spi.exceptions.CpsPathException;
import org.onap.cps.spi.model.DataNode;
import org.onap.cps.spi.model.DataNodeBuilder;
import org.onap.cps.spi.repository.AnchorRepository;
import org.onap.cps.spi.repository.DataspaceRepository;
import org.onap.cps.spi.repository.FragmentRepository;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.stereotype.Service;
@Service
+@Slf4j
public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService {
- @Autowired
private DataspaceRepository dataspaceRepository;
- @Autowired
private AnchorRepository anchorRepository;
- @Autowired
private FragmentRepository fragmentRepository;
+ /**
+ * Constructor.
+ *
+ * @param dataspaceRepository dataspaceRepository
+ * @param anchorRepository anchorRepository
+ * @param fragmentRepository fragmentRepository
+ */
+ public CpsDataPersistenceServiceImpl(final DataspaceRepository dataspaceRepository,
+ final AnchorRepository anchorRepository, final FragmentRepository fragmentRepository) {
+ this.dataspaceRepository = dataspaceRepository;
+ this.anchorRepository = anchorRepository;
+ this.fragmentRepository = fragmentRepository;
+ }
+
private static final Gson GSON = new GsonBuilder().create();
private static final String REG_EX_FOR_OPTIONAL_LIST_INDEX = "(\\[@\\S+?]){0,1})";
@@ -179,20 +192,8 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
} catch (final IllegalStateException e) {
throw new CpsPathException(e.getMessage());
}
- List<FragmentEntity> fragmentEntities;
- if (CpsPathQueryType.XPATH_LEAF_VALUE.equals(cpsPathQuery.getCpsPathQueryType())) {
- fragmentEntities = fragmentRepository
- .getByAnchorAndXpathAndLeafAttributes(anchorEntity.getId(), cpsPathQuery.getXpathPrefix(),
- cpsPathQuery.getLeafName(), cpsPathQuery.getLeafValue());
- } else if (CpsPathQueryType.XPATH_HAS_DESCENDANT_WITH_LEAF_VALUES.equals(cpsPathQuery.getCpsPathQueryType())) {
- final String leafDataAsJson = GSON.toJson(cpsPathQuery.getLeavesData());
- fragmentEntities = fragmentRepository
- .getByAnchorAndDescendentNameAndLeafValues(anchorEntity.getId(), cpsPathQuery.getDescendantName(),
- leafDataAsJson);
- } else {
- fragmentEntities = fragmentRepository
- .getByAnchorAndXpathEndsInDescendantName(anchorEntity.getId(), cpsPathQuery.getDescendantName());
- }
+ List<FragmentEntity> fragmentEntities =
+ fragmentRepository.findByAnchorAndCpsPath(anchorEntity.getId(), cpsPathQuery);
if (cpsPathQuery.hasAncestorAxis()) {
final Set<String> ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery);
fragmentEntities = ancestorXpaths.isEmpty()
@@ -247,18 +248,41 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
}
@Override
- public void replaceDataNodeTree(final String dataspaceName, final String anchorName, final DataNode dataNode) {
+ public void replaceDataNodeTree(final String dataspaceName, final String anchorName,
+ final DataNode dataNode) {
final var fragmentEntity = getFragmentByXpath(dataspaceName, anchorName, dataNode.getXpath());
- removeExistingDescendants(fragmentEntity);
+ replaceDataNodeTree(fragmentEntity, dataNode);
+ try {
+ fragmentRepository.save(fragmentEntity);
+ } catch (final StaleStateException staleStateException) {
+ throw new ConcurrencyException("Concurrent Transactions",
+ String.format("dataspace :'%s', Anchor : '%s' and xpath: '%s' is updated by another transaction.",
+ dataspaceName, anchorName, dataNode.getXpath()),
+ staleStateException);
+ }
+ }
- fragmentEntity.setAttributes(GSON.toJson(dataNode.getLeaves()));
- final Set<FragmentEntity> childFragmentEntities = dataNode.getChildDataNodes().stream().map(
- childDataNode -> convertToFragmentWithAllDescendants(
- fragmentEntity.getDataspace(), fragmentEntity.getAnchor(), childDataNode)
- ).collect(Collectors.toUnmodifiableSet());
- fragmentEntity.setChildFragments(childFragmentEntities);
+ private void replaceDataNodeTree(final FragmentEntity existingFragmentEntity, final DataNode submittedDataNode) {
- fragmentRepository.save(fragmentEntity);
+ existingFragmentEntity.setAttributes(GSON.toJson(submittedDataNode.getLeaves()));
+
+ final Map<String, FragmentEntity> existingChildrenByXpath = existingFragmentEntity.getChildFragments()
+ .stream().collect(Collectors.toMap(FragmentEntity::getXpath, childFragmentEntity -> childFragmentEntity));
+
+ final var updatedChildFragments = new HashSet<FragmentEntity>();
+
+ for (final DataNode submittedChildDataNode : submittedDataNode.getChildDataNodes()) {
+ final FragmentEntity childFragment;
+ if (existingChildrenByXpath.containsKey(submittedChildDataNode.getXpath())) {
+ childFragment = existingChildrenByXpath.get(submittedChildDataNode.getXpath());
+ replaceDataNodeTree(childFragment, submittedChildDataNode);
+ } else {
+ childFragment = convertToFragmentWithAllDescendants(
+ existingFragmentEntity.getDataspace(), existingFragmentEntity.getAnchor(), submittedChildDataNode);
+ }
+ updatedChildFragments.add(childFragment);
+ }
+ existingFragmentEntity.setChildFragments(updatedChildFragments);
}
@Override
@@ -285,11 +309,6 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
}
}
- private void removeExistingDescendants(final FragmentEntity fragmentEntity) {
- fragmentEntity.setChildFragments(Collections.emptySet());
- fragmentRepository.save(fragmentEntity);
- }
-
private static boolean isRootXpath(final String xpath) {
return "/".equals(xpath) || "".equals(xpath);
}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java
index 972807b72..1b3dc2486 100755
--- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java
@@ -29,7 +29,6 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
-import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.transaction.Transactional;
@@ -186,7 +185,7 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ
*/
private String getDuplicatedChecksumFromException(final ConstraintViolationException exception) {
String checksum = null;
- final Matcher matcher = CHECKSUM_EXCEPTION_PATTERN.matcher(exception.getSQLException().getMessage());
+ final var matcher = CHECKSUM_EXCEPTION_PATTERN.matcher(exception.getSQLException().getMessage());
if (matcher.find() && matcher.groupCount() == 1) {
checksum = matcher.group(1);
}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java
index 4d7e7ff54..c48c79ef6 100755
--- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2020-201 Nordix Foundation.
+ * Copyright (C) 2020-2021 Nordix Foundation.
* Modifications Copyright (C) 2020-2021 Bell Canada.
* Modifications Copyright (C) 2020-2021 Pantheon.tech.
* ================================================================================
@@ -38,13 +38,15 @@ import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
@Repository
-public interface FragmentRepository extends JpaRepository<FragmentEntity, Long> {
+public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>, FragmentRepositoryCpsPathQuery {
Optional<FragmentEntity> findByDataspaceAndAnchorAndXpath(@NonNull DataspaceEntity dataspaceEntity,
- @NonNull AnchorEntity anchorEntity, @NonNull String xpath);
+ @NonNull AnchorEntity anchorEntity,
+ @NonNull String xpath);
default FragmentEntity getByDataspaceAndAnchorAndXpath(@NonNull DataspaceEntity dataspaceEntity,
- @NonNull AnchorEntity anchorEntity, @NonNull String xpath) {
+ @NonNull AnchorEntity anchorEntity,
+ @NonNull String xpath) {
return findByDataspaceAndAnchorAndXpath(dataspaceEntity, anchorEntity, xpath)
.orElseThrow(() -> new DataNodeNotFoundException(dataspaceEntity.getName(), anchorEntity.getName(), xpath));
}
@@ -52,42 +54,19 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>
@Query(
value = "SELECT * FROM FRAGMENT WHERE anchor_id = :anchor AND dataspace_id = :dataspace AND parent_id is NULL",
nativeQuery = true)
- List<FragmentEntity> findRootsByDataspaceAndAnchor(
- @Param("dataspace") int dataspaceId, @Param("anchor") int anchorId);
+ List<FragmentEntity> findRootsByDataspaceAndAnchor(@Param("dataspace") int dataspaceId,
+ @Param("anchor") int anchorId);
default FragmentEntity findFirstRootByDataspaceAndAnchor(@NonNull DataspaceEntity dataspaceEntity,
- @NonNull AnchorEntity anchorEntity) {
+ @NonNull AnchorEntity anchorEntity) {
return findRootsByDataspaceAndAnchor(dataspaceEntity.getId(), anchorEntity.getId()).stream().findFirst()
.orElseThrow(() -> new DataNodeNotFoundException(dataspaceEntity.getName(), anchorEntity.getName()));
}
List<FragmentEntity> findAllByAnchorAndXpathIn(@NonNull AnchorEntity anchorEntity,
- @NonNull Collection<String> xpath);
+ @NonNull Collection<String> xpath);
@Modifying
@Query("DELETE FROM FragmentEntity fe WHERE fe.anchor IN (:anchors)")
void deleteByAnchorIn(@NotNull @Param("anchors") Collection<AnchorEntity> anchorEntities);
-
- @Query(value =
- "SELECT * FROM FRAGMENT WHERE (anchor_id = :anchor) AND (xpath = (:xpath) OR xpath LIKE "
- + "CONCAT(:xpath,'\\[@%]')) AND attributes @> jsonb_build_object(:leafName , :leafValue)",
- nativeQuery = true)
- // Above query will match an xpath with or without the index for a list [@key=value] and match anchor id,
- // leaf name and leaf value
- List<FragmentEntity> getByAnchorAndXpathAndLeafAttributes(@Param("anchor") int anchorId, @Param("xpath")
- String xpathPrefix, @Param("leafName") String leafName, @Param("leafValue") Object leafValue);
-
- @Query(value = "SELECT * FROM FRAGMENT WHERE anchor_id = :anchor AND xpath LIKE CONCAT('%/',:descendantName)",
- nativeQuery = true)
- // Above query will match the anchor id and last descendant name
- List<FragmentEntity> getByAnchorAndXpathEndsInDescendantName(@Param("anchor") int anchorId,
- @Param("descendantName") String descendantName);
-
- @Query(value = "SELECT * FROM FRAGMENT WHERE anchor_id = :anchor AND (xpath LIKE CONCAT('%/',:descendantName) OR "
- + "xpath LIKE CONCAT('%/', :descendantName,'\\[@%]')) AND attributes @> :leafDataAsJson\\:\\:jsonb",
- nativeQuery = true)
- // Above query will match the anchor id, last descendant name and all parameters passed into leafDataASJson with the
- // attribute values of the requested data node eg: {"leaf_name":"value", "another_leaf_name":"another value"}​​​​​​
- List<FragmentEntity> getByAnchorAndDescendentNameAndLeafValues(@Param("anchor") int anchorId,
- @Param("descendantName") String descendantName, @Param("leafDataAsJson") String leafDataAsJson);
}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQuery.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQuery.java
new file mode 100644
index 000000000..04138ecc6
--- /dev/null
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQuery.java
@@ -0,0 +1,29 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2021 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.spi.repository;
+
+import java.util.List;
+import org.onap.cps.cpspath.parser.CpsPathQuery;
+import org.onap.cps.spi.entities.FragmentEntity;
+
+public interface FragmentRepositoryCpsPathQuery {
+ List<FragmentEntity> findByAnchorAndCpsPath(int anchorId, CpsPathQuery cpsPathQuery);
+}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java
new file mode 100644
index 000000000..4aa3e5fb3
--- /dev/null
+++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java
@@ -0,0 +1,120 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2021 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.spi.repository;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import javax.persistence.EntityManager;
+import javax.persistence.PersistenceContext;
+import javax.persistence.Query;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.onap.cps.cpspath.parser.CpsPathPrefixType;
+import org.onap.cps.cpspath.parser.CpsPathQuery;
+import org.onap.cps.spi.entities.FragmentEntity;
+
+public class FragmentRepositoryCpsPathQueryImpl implements FragmentRepositoryCpsPathQuery {
+
+ public static final String SIMILAR_TO_ABSOLUTE_PATH_PREFIX = "%/";
+ public static final String SIMILAR_TO_OPTIONAL_LIST_INDEX_POSTFIX = "(\\[[^/]*])?";
+
+ @PersistenceContext
+ private EntityManager entityManager;
+
+ private static final Gson GSON = new GsonBuilder().create();
+
+ @Override
+ public List<FragmentEntity> findByAnchorAndCpsPath(final int anchorId, final CpsPathQuery cpsPathQuery) {
+ final var sqlStringBuilder = new StringBuilder("SELECT * FROM FRAGMENT WHERE anchor_id = :anchorId");
+ final Map<String, Object> queryParameters = new HashMap<>();
+ queryParameters.put("anchorId", anchorId);
+ sqlStringBuilder.append(" AND xpath SIMILAR TO :xpathRegex");
+ final String xpathRegex = getSimilarToXpathSqlRegex(cpsPathQuery);
+ queryParameters.put("xpathRegex", xpathRegex);
+ if (cpsPathQuery.hasLeafConditions()) {
+ sqlStringBuilder.append(" AND attributes @> :leafDataAsJson\\:\\:jsonb");
+ queryParameters.put("leafDataAsJson", GSON.toJson(cpsPathQuery.getLeavesData()));
+ }
+
+ addTextFunctionCondition(cpsPathQuery, sqlStringBuilder, queryParameters);
+ final var query = entityManager.createNativeQuery(sqlStringBuilder.toString(), FragmentEntity.class);
+ setQueryParameters(query, queryParameters);
+ return query.getResultList();
+ }
+
+ @NotNull
+ private static String getSimilarToXpathSqlRegex(final CpsPathQuery cpsPathQuery) {
+ final var xpathRegexBuilder = new StringBuilder();
+ if (CpsPathPrefixType.ABSOLUTE.equals(cpsPathQuery.getCpsPathPrefixType())) {
+ xpathRegexBuilder.append(escapeXpath(cpsPathQuery.getXpathPrefix()));
+ } else {
+ xpathRegexBuilder.append(SIMILAR_TO_ABSOLUTE_PATH_PREFIX);
+ xpathRegexBuilder.append(escapeXpath(cpsPathQuery.getDescendantName()));
+ }
+ xpathRegexBuilder.append(SIMILAR_TO_OPTIONAL_LIST_INDEX_POSTFIX);
+ return xpathRegexBuilder.toString();
+ }
+
+ @NotNull
+ private static String escapeXpath(final String xpath) {
+ // See https://jira.onap.org/browse/CPS-500 for limitations of this basic escape mechanism
+ return xpath.replace("[@", "\\[@");
+ }
+
+ @Nullable
+ private static Integer getTextValueAsInt(final CpsPathQuery cpsPathQuery) {
+ try {
+ return Integer.parseInt(cpsPathQuery.getTextFunctionConditionValue());
+ } catch (final NumberFormatException e) {
+ return null;
+ }
+ }
+
+ private static void addTextFunctionCondition(final CpsPathQuery cpsPathQuery, final StringBuilder sqlStringBuilder,
+ final Map<String, Object> queryParameters) {
+ if (cpsPathQuery.hasTextFunctionCondition()) {
+ sqlStringBuilder.append(" AND (");
+ sqlStringBuilder.append("attributes @> jsonb_build_object(:textLeafName, :textValue)");
+ sqlStringBuilder
+ .append(" OR attributes @> jsonb_build_object(:textLeafName, json_build_array(:textValue))");
+ queryParameters.put("textLeafName", cpsPathQuery.getTextFunctionConditionLeafName());
+ queryParameters.put("textValue", cpsPathQuery.getTextFunctionConditionValue());
+ final var textValueAsInt = getTextValueAsInt(cpsPathQuery);
+ if (textValueAsInt != null) {
+ sqlStringBuilder.append(" OR attributes @> jsonb_build_object(:textLeafName, :textValueAsInt)");
+ sqlStringBuilder
+ .append(" OR attributes @> jsonb_build_object(:textLeafName, json_build_array(:textValueAsInt))");
+ queryParameters.put("textValueAsInt", textValueAsInt);
+ }
+ sqlStringBuilder.append(")");
+ }
+ }
+
+ private static void setQueryParameters(final Query query, final Map<String, Object> queryParameters) {
+ for (final Map.Entry<String, Object> queryParameter : queryParameters.entrySet()) {
+ query.setParameter(queryParameter.getKey(), queryParameter.getValue());
+ }
+ }
+
+}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy
index 8dc9b7f3c..ae88d302b 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy
@@ -23,7 +23,6 @@ package org.onap.cps.spi.impl
import org.onap.cps.spi.CpsDataPersistenceService
import org.onap.cps.spi.exceptions.CpsPathException
-import org.onap.cps.spi.model.DataNode
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.test.context.jdbc.Sql
@@ -38,23 +37,25 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
static final String SET_DATA = '/data/cps-path-query.sql'
@Sql([CLEAR_DATA, SET_DATA])
- def 'Cps Path query for single leaf value with type: #type.'() {
+ def 'Cps Path query for leaf value(s) with : #scenario.'() {
when: 'a query is executed to get a data node by the given cps path'
def result = objectUnderTest.queryDataNodes(DATASPACE_NAME, ANCHOR_FOR_SHOP_EXAMPLE, cpsPath, includeDescendantsOption)
+ then: 'the correct number of parent nodes are returned'
+ result.size() == expectedNumberOfParentNodes
then: 'the correct data is returned'
- def leaves = '[price:15.0, title:Dune]'
- DataNode dataNode = result.stream().findFirst().get()
- dataNode.getLeaves().toString() == leaves
- dataNode.getChildDataNodes().size() == expectedNumberOfChidlNodes
+ result.each {
+ assert it.getChildDataNodes().size() == expectedNumberOfChildNodes
+ }
where: 'the following data is used'
- type | cpsPath | includeDescendantsOption || expectedNumberOfChidlNodes
- 'String and no descendants' | '/shops/shop[@id=1]/categories[@code=1]/book[@title="Dune"]' | OMIT_DESCENDANTS || 0
- 'Integer and descendants' | '/shops/shop[@id=1]/categories[@code=1]/book[@price=15]' | INCLUDE_ALL_DESCENDANTS || 1
+ scenario | cpsPath | includeDescendantsOption || expectedNumberOfParentNodes | expectedNumberOfChildNodes
+ 'String and no descendants' | '/shops/shop[@id=1]/categories[@code=1]/book[@title="Dune"]' | OMIT_DESCENDANTS || 1 | 0
+ 'Integer and descendants' | '/shops/shop[@id=1]/categories[@code=1]/book[@price=5]' | INCLUDE_ALL_DESCENDANTS || 1 | 1
+ 'No condition no descendants' | '/shops/shop[@id=1]/categories' | OMIT_DESCENDANTS || 2 | 0
}
@Sql([CLEAR_DATA, SET_DATA])
def 'Query for attribute by cps path with cps paths that return no data because of #scenario.'() {
- when: 'a query is executed to get datanodes for the given cps path'
+ when: 'a query is executed to get data nodes for the given cps path'
def result = objectUnderTest.queryDataNodes(DATASPACE_NAME, ANCHOR_FOR_SHOP_EXAMPLE, cpsPath, OMIT_DESCENDANTS)
then: 'no data is returned'
result.isEmpty()
@@ -71,7 +72,7 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
def cpsPath = '//categories[@code=1]'
def result = objectUnderTest.queryDataNodes(DATASPACE_NAME, ANCHOR_FOR_SHOP_EXAMPLE, cpsPath, includeDescendantsOption)
then: 'the data node has the correct number of children'
- DataNode dataNode = result.stream().findFirst().get()
+ def dataNode = result.stream().findFirst().get()
dataNode.getChildDataNodes().size() == expectedNumberOfChildNodes
where: 'the following data is used'
type | includeDescendantsOption || expectedNumberOfChildNodes
@@ -90,9 +91,16 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
assert result[i].getXpath() == expectedXPaths[i]
}
where: 'the following data is used'
- scenario | cpsPath || expectedXPaths
- 'fully unique descendant name' | '//categories[@code=2]' || ['/shops/shop[@id=1]/categories[@code=2]', '/shops/shop[@id=2]/categories[@code=1]', '/shops/shop[@id=2]/categories[@code=2]']
- 'descendant name match end of other node' | '//book' || ['/shops/shop[@id=1]/categories[@code=1]/book', '/shops/shop[@id=1]/categories[@code=2]/book']
+ scenario | cpsPath || expectedXPaths
+ 'fully unique descendant name' | '//categories[@code=2]' || ['/shops/shop[@id=1]/categories[@code=2]', '/shops/shop[@id=2]/categories[@code=1]', '/shops/shop[@id=2]/categories[@code=2]']
+ 'descendant name match end of other node' | '//book' || ['/shops/shop[@id=1]/categories[@code=1]/book', '/shops/shop[@id=1]/categories[@code=2]/book']
+ 'descendant with text condition on leaf' | '//book/title[text()="Chapters"]' || ['/shops/shop[@id=1]/categories[@code=2]/book']
+ 'descendant with text condition case mismatch' | '//book/title[text()="chapters"]' || []
+ 'descendant with text condition on int leaf' | '//book/price[text()="5"]' || ['/shops/shop[@id=1]/categories[@code=1]/book']
+ 'descendant with text condition on leaf-list' | '//book/labels[text()="special offer"]' || ['/shops/shop[@id=1]/categories[@code=1]/book']
+ 'descendant with text condition partial match' | '//book/labels[text()="special"]' || []
+ 'descendant with text condition (existing) empty string' | '//book/labels[text()=""]' || ['/shops/shop[@id=1]/categories[@code=1]/book']
+ 'descendant with text condition on int leaf-list' | '//book/editions[text()="2000"]' || ['/shops/shop[@id=1]/categories[@code=2]/book']
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -106,10 +114,11 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
assert result[i].getXpath() == expectedXPaths[i]
}
where: 'the following data is used'
- scenario | cpsPath || expectedXPaths
- 'one leaf' | '//author[@FirstName="Joe"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]', '/shops/shop[@id=1]/categories[@code=2]/book/author[@FirstName="Joe" and @Surname="Smith"]']
- 'more than one leaf' | '//author[@FirstName="Joe" and @Surname="Bloggs"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
- 'leaves reversed in order' | '//author[@Surname="Bloggs" and @FirstName="Joe"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
+ scenario | cpsPath || expectedXPaths
+ 'one leaf' | '//author[@FirstName="Joe"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]', '/shops/shop[@id=1]/categories[@code=2]/book/author[@FirstName="Joe" and @Surname="Smith"]']
+ 'more than one leaf' | '//author[@FirstName="Joe" and @Surname="Bloggs"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
+ 'leaves reversed in order' | '//author[@Surname="Bloggs" and @FirstName="Joe"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
+ 'leaf and text condition' | '//author[@FirstName="Joe"]/Surname[text()="Bloggs"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -146,6 +155,7 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ['/shops/shop[@id=1]']
'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ['/shops/shop[@id=1]/categories[@code=2]']
'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ['/shops/shop[@id=3]/info/contact']
+ 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ['/shops/shop[@id=1]']
'ancestor with parent that does not exist' | '//book/ancestor::parentDoesNoExist/categories' || []
'ancestor does not exist' | '//book/ancestor::ancestorDoesNotExist' || []
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
new file mode 100755
index 000000000..ad8db766f
--- /dev/null
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
@@ -0,0 +1,436 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2021 Nordix Foundation
+ * Modifications Copyright (C) 2021 Pantheon.tech
+ * Modifications Copyright (C) 2021 Bell Canada.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.cps.spi.impl
+
+import org.onap.cps.spi.exceptions.ConcurrencyException
+
+import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS
+import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS
+
+import com.google.common.collect.ImmutableSet
+import com.google.gson.Gson
+import com.google.gson.GsonBuilder
+import org.onap.cps.spi.CpsDataPersistenceService
+import org.onap.cps.spi.entities.FragmentEntity
+import org.onap.cps.spi.exceptions.AlreadyDefinedException
+import org.onap.cps.spi.exceptions.AnchorNotFoundException
+import org.onap.cps.spi.exceptions.DataNodeNotFoundException
+import org.onap.cps.spi.exceptions.DataspaceNotFoundException
+import org.onap.cps.spi.model.DataNode
+import org.onap.cps.spi.model.DataNodeBuilder
+import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.test.context.jdbc.Sql
+
+import javax.validation.ConstraintViolationException
+
+class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
+
+ @Autowired
+ CpsDataPersistenceService objectUnderTest
+
+ static final Gson GSON = new GsonBuilder().create()
+
+ static final String SET_DATA = '/data/fragment.sql'
+ static final long ID_DATA_NODE_WITH_DESCENDANTS = 4001
+ static final String XPATH_DATA_NODE_WITH_DESCENDANTS = '/parent-1'
+ static final String XPATH_DATA_NODE_WITH_LEAVES = '/parent-100'
+ static final long UPDATE_DATA_NODE_FRAGMENT_ID = 4202L
+ static final long UPDATE_DATA_NODE_SUB_FRAGMENT_ID = 4203L
+ static final long LIST_DATA_NODE_PARENT_FRAGMENT_ID = 4206L
+
+ static final DataNode newDataNode = new DataNodeBuilder().build()
+ static DataNode existingDataNode
+ static DataNode existingChildDataNode
+
+ def expectedLeavesByXpathMap = [
+ '/parent-100' : ['parent-leaf': 'parent-leaf value'],
+ '/parent-100/child-001' : ['first-child-leaf': 'first-child-leaf value'],
+ '/parent-100/child-002' : ['second-child-leaf': 'second-child-leaf value'],
+ '/parent-100/child-002/grand-child': ['grand-child-leaf': 'grand-child-leaf value']
+ ]
+
+ static {
+ existingDataNode = createDataNodeTree(XPATH_DATA_NODE_WITH_DESCENDANTS)
+ existingChildDataNode = createDataNodeTree('/parent-1/child-1')
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'StoreDataNode with descendants.'() {
+ when: 'a fragment with descendants is stored'
+ def parentXpath = "/parent-new"
+ def childXpath = "/parent-new/child-new"
+ def grandChildXpath = "/parent-new/child-new/grandchild-new"
+ objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME1,
+ createDataNodeTree(parentXpath, childXpath, grandChildXpath))
+ then: 'it can be retrieved by its xpath'
+ def parentFragment = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME1, parentXpath)
+ and: 'it contains the children'
+ parentFragment.childFragments.size() == 1
+ def childFragment = parentFragment.childFragments[0]
+ childFragment.xpath == childXpath
+ and: "and its children's children"
+ childFragment.childFragments.size() == 1
+ def grandchildFragment = childFragment.childFragments[0]
+ grandchildFragment.xpath == grandChildXpath
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Store data node for multiple anchors using the same schema.'() {
+ def xpath = "/parent-new"
+ given: 'a fragment is stored for an anchor'
+ objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME1, createDataNodeTree(xpath))
+ when: 'another fragment is stored for an other anchor, using the same schema set'
+ objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME3, createDataNodeTree(xpath))
+ then: 'both fragments can be retrieved by their xpath'
+ def fragment1 = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME1, xpath)
+ fragment1.anchor.name == ANCHOR_NAME1
+ fragment1.xpath == xpath
+ def fragment2 = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME3, xpath)
+ fragment2.anchor.name == ANCHOR_NAME3
+ fragment2.xpath == xpath
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Store datanode error scenario: #scenario.'() {
+ when: 'attempt to store a data node with #scenario'
+ objectUnderTest.storeDataNode(dataspaceName, anchorName, dataNode)
+ then: 'a #expectedException is thrown'
+ thrown(expectedException)
+ where: 'the following data is used'
+ scenario | dataspaceName | anchorName | dataNode || expectedException
+ 'dataspace does not exist' | 'unknown' | 'not-relevant' | newDataNode || DataspaceNotFoundException
+ 'schema set does not exist' | DATASPACE_NAME | 'unknown' | newDataNode || AnchorNotFoundException
+ 'anchor already exists' | DATASPACE_NAME | ANCHOR_NAME1 | newDataNode || ConstraintViolationException
+ 'datanode already exists' | DATASPACE_NAME | ANCHOR_NAME1 | existingDataNode || AlreadyDefinedException
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Add a child to a Fragment that already has a child.'() {
+ given: ' a new child node'
+ def newChild = createDataNodeTree('xpath for new child')
+ when: 'the child is added to an existing parent with 1 child'
+ objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME1, XPATH_DATA_NODE_WITH_DESCENDANTS, newChild)
+ then: 'the parent is now has to 2 children'
+ def expectedExistingChildPath = '/parent-1/child-1'
+ def parentFragment = fragmentRepository.findById(ID_DATA_NODE_WITH_DESCENDANTS).orElseThrow()
+ parentFragment.getChildFragments().size() == 2
+ and: 'it still has the old child'
+ parentFragment.getChildFragments().find({ it.xpath == expectedExistingChildPath })
+ and: 'it has the new child'
+ parentFragment.getChildFragments().find({ it.xpath == newChild.xpath })
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Add child error scenario: #scenario.'() {
+ when: 'attempt to add a child data node with #scenario'
+ objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, dataNode)
+ then: 'a #expectedException is thrown'
+ thrown(expectedException)
+ where: 'the following data is used'
+ scenario | parentXpath | dataNode || expectedException
+ 'parent does not exist' | 'unknown' | newDataNode || DataNodeNotFoundException
+ 'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNode || AlreadyDefinedException
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Add list-node fragment with multiple elements.'() {
+ given: 'list node data fragment as a collection of data nodes'
+ def listNodeXpaths = ['/parent-201/child-204[@key="B"]', '/parent-201/child-204[@key="C"]']
+ def listNodeCollection = buildDataNodeCollection(listNodeXpaths)
+ when: 'list-node elements added to existing parent node'
+ objectUnderTest.addListDataNodes(DATASPACE_NAME, ANCHOR_NAME3, '/parent-201', listNodeCollection)
+ then: 'new entries successfully persisted, parent node now contains 5 children (2 new + 3 existing before)'
+ def parentFragment = fragmentRepository.getOne(LIST_DATA_NODE_PARENT_FRAGMENT_ID)
+ def allChildXpaths = parentFragment.getChildFragments().collect { it.getXpath() }
+ assert allChildXpaths.size() == 5
+ assert allChildXpaths.containsAll(listNodeXpaths)
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Add list-node fragment error scenario: #scenario.'() {
+ given: 'list node data fragment as a collection of data nodes'
+ def listNodeCollection = buildDataNodeCollection(listNodeXpaths)
+ when: 'list-node elements added to existing parent node'
+ objectUnderTest.addListDataNodes(DATASPACE_NAME, ANCHOR_NAME3, parentNodeXpath, listNodeCollection)
+ then: 'a #expectedException is thrown'
+ thrown(expectedException)
+ where: 'following parameters were used'
+ scenario | parentNodeXpath | listNodeXpaths || expectedException
+ 'parent node does not exist' | '/unknown' | ['irrelevant'] || DataNodeNotFoundException
+ 'already existing fragment' | '/parent-201' | ['/parent-201/child-204[@key="A"]'] || AlreadyDefinedException
+
+ }
+
+ static def createDataNodeTree(String... xpaths) {
+ def dataNodeBuilder = new DataNodeBuilder().withXpath(xpaths[0])
+ if (xpaths.length > 1) {
+ def xPathsDescendant = Arrays.copyOfRange(xpaths, 1, xpaths.length)
+ def childDataNode = createDataNodeTree(xPathsDescendant)
+ dataNodeBuilder.withChildDataNodes(ImmutableSet.of(childDataNode))
+ }
+ dataNodeBuilder.build()
+ }
+
+ def getFragmentByXpath(dataspaceName, anchorName, xpath) {
+ def dataspace = dataspaceRepository.getByName(dataspaceName)
+ def anchor = anchorRepository.getByDataspaceAndName(dataspace, anchorName)
+ return fragmentRepository.findByDataspaceAndAnchorAndXpath(dataspace, anchor, xpath).orElseThrow()
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Get data node by xpath without descendants.'() {
+ when: 'data node is requested'
+ def result = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES,
+ inputXPath, OMIT_DESCENDANTS)
+ then: 'data node is returned with no descendants'
+ assert result.getXpath() == XPATH_DATA_NODE_WITH_LEAVES
+ and: 'expected leaves'
+ assert result.getChildDataNodes().size() == 0
+ assertLeavesMaps(result.getLeaves(), expectedLeavesByXpathMap[XPATH_DATA_NODE_WITH_LEAVES])
+ where: 'the following data is used'
+ scenario | inputXPath
+ 'some xpath' | '/parent-100'
+ 'root xpath' | '/'
+ 'empty xpath' | ''
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Get data node by xpath with all descendants.'() {
+ when: 'data node is requested with all descendants'
+ def result = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES,
+ inputXPath, INCLUDE_ALL_DESCENDANTS)
+ def mappedResult = treeToFlatMapByXpath(new HashMap<>(), result)
+ then: 'data node is returned with all the descendants populated'
+ assert mappedResult.size() == 4
+ assert result.getChildDataNodes().size() == 2
+ assert mappedResult.get('/parent-100/child-001').getChildDataNodes().size() == 0
+ assert mappedResult.get('/parent-100/child-002').getChildDataNodes().size() == 1
+ and: 'extracted leaves maps are matching expected'
+ mappedResult.forEach(
+ (xPath, dataNode) -> assertLeavesMaps(dataNode.getLeaves(), expectedLeavesByXpathMap[xPath]))
+ where: 'the following data is used'
+ scenario | inputXPath
+ 'some xpath' | '/parent-100'
+ 'root xpath' | '/'
+ 'empty xpath' | ''
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Get data node error scenario: #scenario.'() {
+ when: 'attempt to get data node with #scenario'
+ objectUnderTest.getDataNode(dataspaceName, anchorName, xpath, OMIT_DESCENDANTS)
+ then: 'a #expectedException is thrown'
+ thrown(expectedException)
+ where: 'the following data is used'
+ scenario | dataspaceName | anchorName | xpath || expectedException
+ 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
+ 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NO XPATH' || DataNodeNotFoundException
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Update data node leaves.'() {
+ when: 'update is performed for leaves'
+ objectUnderTest.updateDataLeaves(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES,
+ "/parent-200/child-201", ['leaf-value': 'new'])
+ then: 'leaves are updated for selected data node'
+ def updatedFragment = fragmentRepository.getOne(UPDATE_DATA_NODE_FRAGMENT_ID)
+ def updatedLeaves = getLeavesMap(updatedFragment)
+ assert updatedLeaves.size() == 1
+ assert updatedLeaves.'leaf-value' == 'new'
+ and: 'existing child entry remains as is'
+ def childFragment = updatedFragment.getChildFragments().iterator().next()
+ def childLeaves = getLeavesMap(childFragment)
+ assert childFragment.getId() == UPDATE_DATA_NODE_SUB_FRAGMENT_ID
+ assert childLeaves.'leaf-value' == 'original'
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Update data leaves error scenario: #scenario.'() {
+ when: 'attempt to update data node for #scenario'
+ objectUnderTest.updateDataLeaves(dataspaceName, anchorName, xpath, ['leaf-name': 'leaf-value'])
+ then: 'a #expectedException is thrown'
+ thrown(expectedException)
+ where: 'the following data is used'
+ scenario | dataspaceName | anchorName | xpath || expectedException
+ 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
+ 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NON-EXISTING XPATH' || DataNodeNotFoundException
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Replace data node tree with descendants removal.'() {
+ given: 'data node object with leaves updated, no children'
+ def submittedDataNode = buildDataNode("/parent-200/child-201", ['leaf-value': 'new'], [])
+ when: 'replace data node tree is performed'
+ objectUnderTest.replaceDataNodeTree(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNode)
+ then: 'leaves have been updated for selected data node'
+ def updatedFragment = fragmentRepository.getOne(UPDATE_DATA_NODE_FRAGMENT_ID)
+ def updatedLeaves = getLeavesMap(updatedFragment)
+ assert updatedLeaves.size() == 1
+ assert updatedLeaves.'leaf-value' == 'new'
+ and: 'updated entry has no children'
+ updatedFragment.getChildFragments().isEmpty()
+ and: 'previously attached child entry is removed from database'
+ fragmentRepository.findById(UPDATE_DATA_NODE_SUB_FRAGMENT_ID).isEmpty()
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Replace data node tree with descendants.'() {
+ given: 'data node object with leaves updated, having child with old content'
+ def submittedDataNode = buildDataNode("/parent-200/child-201", ['leaf-value': 'new'], [
+ buildDataNode("/parent-200/child-201/grand-child", ['leaf-value': 'original'], [])
+ ])
+ when: 'update is performed including descendants'
+ objectUnderTest.replaceDataNodeTree(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNode)
+ then: 'leaves have been updated for selected data node'
+ def updatedFragment = fragmentRepository.getOne(UPDATE_DATA_NODE_FRAGMENT_ID)
+ def updatedLeaves = getLeavesMap(updatedFragment)
+ assert updatedLeaves.size() == 1
+ assert updatedLeaves.'leaf-value' == 'new'
+ and: 'existing child entry is not updated as content is same'
+ def childFragment = updatedFragment.getChildFragments().iterator().next()
+ childFragment.getXpath() == '/parent-200/child-201/grand-child'
+ def childLeaves = getLeavesMap(childFragment)
+ assert childLeaves.'leaf-value' == 'original'
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Replace data node tree with same descendants but changed leaf value.'() {
+ given: 'data node object with leaves updated, having child with old content'
+ def submittedDataNode = buildDataNode("/parent-200/child-201", ['leaf-value': 'new'], [
+ buildDataNode("/parent-200/child-201/grand-child", ['leaf-value': 'new'], [])
+ ])
+ when: 'update is performed including descendants'
+ objectUnderTest.replaceDataNodeTree(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNode)
+ then: 'leaves have been updated for selected data node'
+ def updatedFragment = fragmentRepository.getOne(UPDATE_DATA_NODE_FRAGMENT_ID)
+ def updatedLeaves = getLeavesMap(updatedFragment)
+ assert updatedLeaves.size() == 1
+ assert updatedLeaves.'leaf-value' == 'new'
+ and: 'existing child entry is updated with the new content'
+ def childFragment = updatedFragment.getChildFragments().iterator().next()
+ childFragment.getXpath() == '/parent-200/child-201/grand-child'
+ def childLeaves = getLeavesMap(childFragment)
+ assert childLeaves.'leaf-value' == 'new'
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Replace data node tree with different descendants xpath'() {
+ given: 'data node object with leaves updated, having child with old content'
+ def submittedDataNode = buildDataNode("/parent-200/child-201", ['leaf-value': 'new'], [
+ buildDataNode("/parent-200/child-201/grand-child-new", ['leaf-value': 'new'], [])
+ ])
+ when: 'update is performed including descendants'
+ objectUnderTest.replaceDataNodeTree(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNode)
+ then: 'leaves have been updated for selected data node'
+ def updatedFragment = fragmentRepository.getOne(UPDATE_DATA_NODE_FRAGMENT_ID)
+ def updatedLeaves = getLeavesMap(updatedFragment)
+ assert updatedLeaves.size() == 1
+ assert updatedLeaves.'leaf-value' == 'new'
+ and: 'previously attached child entry is removed from database'
+ fragmentRepository.findById(UPDATE_DATA_NODE_SUB_FRAGMENT_ID).isEmpty()
+ and: 'new child entry is persisted'
+ def childFragment = updatedFragment.getChildFragments().iterator().next()
+ childFragment.getXpath() == '/parent-200/child-201/grand-child-new'
+ def childLeaves = getLeavesMap(childFragment)
+ assert childLeaves.'leaf-value' == 'new'
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Replace data node tree error scenario: #scenario.'() {
+ given: 'data node object'
+ def submittedDataNode = buildDataNode(xpath, ['leaf-name': 'leaf-value'], [])
+ when: 'attempt to update data node for #scenario'
+ objectUnderTest.replaceDataNodeTree(dataspaceName, anchorName, submittedDataNode)
+ then: 'a #expectedException is thrown'
+ thrown(expectedException)
+ where: 'the following data is used'
+ scenario | dataspaceName | anchorName | xpath || expectedException
+ 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
+ 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NON-EXISTING XPATH' || DataNodeNotFoundException
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Replace list-node content of #scenario.'() {
+ given: 'list node data fragment as a collection of data nodes'
+ def listNodeCollection = buildDataNodeCollection(listNodeXpaths)
+ when: 'list-node elements replaced within the existing parent node'
+ objectUnderTest.replaceListDataNodes(DATASPACE_NAME, ANCHOR_NAME3, '/parent-201', listNodeCollection)
+ then: 'child list elements are updated as expected, non-list element remains as is'
+ def parentFragment = fragmentRepository.getOne(LIST_DATA_NODE_PARENT_FRAGMENT_ID)
+ def allChildXpaths = parentFragment.getChildFragments().collect { it.getXpath() }
+ assert allChildXpaths.size() == expectedChildXpaths.size()
+ assert allChildXpaths.containsAll(expectedChildXpaths)
+ where: 'following parameters were used'
+ scenario | listNodeXpaths || expectedChildXpaths
+ 'existing list-node' | ['/parent-201/child-204[@key="B"]'] || ['/parent-201/child-203', '/parent-201/child-204[@key="B"]']
+ 'non-existing list-node' | ['/parent-201/child-205[@key="1"]'] || ['/parent-201/child-203', '/parent-201/child-204[@key="A"]', '/parent-201/child-204[@key="X"]', '/parent-201/child-205[@key="1"]']
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Replace list-node fragment error scenario: #scenario.'() {
+ given: 'list node data fragment as a collection of data nodes'
+ def listNodeCollection = buildDataNodeCollection(listNodeXpaths)
+ when: 'list-node elements were replaced under existing parent node'
+ objectUnderTest.replaceListDataNodes(DATASPACE_NAME, ANCHOR_NAME3, parentNodeXpath, listNodeCollection)
+ then: 'a #expectedException is thrown'
+ thrown(expectedException)
+ where: 'following parameters were used'
+ scenario | parentNodeXpath | listNodeXpaths || expectedException
+ 'parent node does not exist' | '/unknown' | ['irrelevant'] || DataNodeNotFoundException
+ }
+
+ static Collection<DataNode> buildDataNodeCollection(xpaths) {
+ return xpaths.collect { new DataNodeBuilder().withXpath(it).build() }
+ }
+
+ static DataNode buildDataNode(xpath, leaves, childDataNodes) {
+ return new DataNodeBuilder().withXpath(xpath).withLeaves(leaves).withChildDataNodes(childDataNodes).build()
+ }
+
+ static Map<String, Object> getLeavesMap(FragmentEntity fragmentEntity) {
+ return GSON.fromJson(fragmentEntity.getAttributes(), Map<String, Object>.class)
+ }
+
+ def static assertLeavesMaps(actualLeavesMap, expectedLeavesMap) {
+ expectedLeavesMap.forEach((key, value) -> {
+ def actualValue = actualLeavesMap[key]
+ if (value instanceof Collection<?> && actualValue instanceof Collection<?>) {
+ assert value.size() == actualValue.size()
+ assert value.containsAll(actualValue)
+ } else {
+ assert value == actualValue
+ }
+ })
+ return true
+ }
+
+ def static treeToFlatMapByXpath(Map<String, DataNode> flatMap, DataNode dataNodeTree) {
+ flatMap.put(dataNodeTree.getXpath(), dataNodeTree)
+ dataNodeTree.getChildDataNodes()
+ .forEach(childDataNode -> treeToFlatMapByXpath(flatMap, childDataNode))
+ return flatMap
+ }
+
+}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
index 0ad67d541..5ed3ae3e8 100755..100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
@@ -1,394 +1,72 @@
/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2021 Nordix Foundation
- * Modifications Copyright (C) 2021 Pantheon.tech
- * Modifications Copyright (C) 2021 Bell Canada.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * ============LICENSE_START=======================================================
+ * Copyright (c) 2021 Bell Canada.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-package org.onap.cps.spi.impl
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+*/
-import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS
-import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS
+package org.onap.cps.spi.impl
-import com.google.common.collect.ImmutableSet
-import com.google.gson.Gson
-import com.google.gson.GsonBuilder
-import org.onap.cps.spi.CpsDataPersistenceService
+import org.hibernate.StaleStateException
import org.onap.cps.spi.entities.FragmentEntity
-import org.onap.cps.spi.exceptions.AlreadyDefinedException
-import org.onap.cps.spi.exceptions.AnchorNotFoundException
-import org.onap.cps.spi.exceptions.DataNodeNotFoundException
-import org.onap.cps.spi.exceptions.DataspaceNotFoundException
-import org.onap.cps.spi.model.DataNode
+import org.onap.cps.spi.exceptions.ConcurrencyException
import org.onap.cps.spi.model.DataNodeBuilder
-import org.springframework.beans.factory.annotation.Autowired
-import org.springframework.test.context.jdbc.Sql
-
-import javax.validation.ConstraintViolationException
-
-class CpsDataPersistenceServiceSpec extends CpsPersistenceSpecBase {
-
- @Autowired
- CpsDataPersistenceService objectUnderTest
-
- static final Gson GSON = new GsonBuilder().create()
-
- static final String SET_DATA = '/data/fragment.sql'
- static final long ID_DATA_NODE_WITH_DESCENDANTS = 4001
- static final String XPATH_DATA_NODE_WITH_DESCENDANTS = '/parent-1'
- static final String XPATH_DATA_NODE_WITH_LEAVES = '/parent-100'
- static final long UPDATE_DATA_NODE_FRAGMENT_ID = 4202L
- static final long UPDATE_DATA_NODE_SUB_FRAGMENT_ID = 4203L
- static final long LIST_DATA_NODE_PARENT_FRAGMENT_ID = 4206L
-
- static final DataNode newDataNode = new DataNodeBuilder().build()
- static DataNode existingDataNode
- static DataNode existingChildDataNode
-
- def expectedLeavesByXpathMap = [
- '/parent-100' : ['parent-leaf': 'parent-leaf value'],
- '/parent-100/child-001' : ['first-child-leaf': 'first-child-leaf value'],
- '/parent-100/child-002' : ['second-child-leaf': 'second-child-leaf value'],
- '/parent-100/child-002/grand-child': ['grand-child-leaf': 'grand-child-leaf value']
- ]
-
- static {
- existingDataNode = createDataNodeTree(XPATH_DATA_NODE_WITH_DESCENDANTS)
- existingChildDataNode = createDataNodeTree('/parent-1/child-1')
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'StoreDataNode with descendants.'() {
- when: 'a fragment with descendants is stored'
- def parentXpath = "/parent-new"
- def childXpath = "/parent-new/child-new"
- def grandChildXpath = "/parent-new/child-new/grandchild-new"
- objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME1,
- createDataNodeTree(parentXpath, childXpath, grandChildXpath))
- then: 'it can be retrieved by its xpath'
- def parentFragment = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME1, parentXpath)
- and: 'it contains the children'
- parentFragment.childFragments.size() == 1
- def childFragment = parentFragment.childFragments[0]
- childFragment.xpath == childXpath
- and: "and its children's children"
- childFragment.childFragments.size() == 1
- def grandchildFragment = childFragment.childFragments[0]
- grandchildFragment.xpath == grandChildXpath
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Store data node for multiple anchors using the same schema.'() {
- def xpath = "/parent-new"
- given: 'a fragment is stored for an anchor'
- objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME1, createDataNodeTree(xpath))
- when: 'another fragment is stored for an other anchor, using the same schema set'
- objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME3, createDataNodeTree(xpath))
- then: 'both fragments can be retrieved by their xpath'
- def fragment1 = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME1, xpath)
- fragment1.anchor.name == ANCHOR_NAME1
- fragment1.xpath == xpath
- def fragment2 = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME3, xpath)
- fragment2.anchor.name == ANCHOR_NAME3
- fragment2.xpath == xpath
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Store datanode error scenario: #scenario.'() {
- when: 'attempt to store a data node with #scenario'
- objectUnderTest.storeDataNode(dataspaceName, anchorName, dataNode)
- then: 'a #expectedException is thrown'
- thrown(expectedException)
- where: 'the following data is used'
- scenario | dataspaceName | anchorName | dataNode || expectedException
- 'dataspace does not exist' | 'unknown' | 'not-relevant' | newDataNode || DataspaceNotFoundException
- 'schema set does not exist' | DATASPACE_NAME | 'unknown' | newDataNode || AnchorNotFoundException
- 'anchor already exists' | DATASPACE_NAME | ANCHOR_NAME1 | newDataNode || ConstraintViolationException
- 'datanode already exists' | DATASPACE_NAME | ANCHOR_NAME1 | existingDataNode || AlreadyDefinedException
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Add a child to a Fragment that already has a child.'() {
- given: ' a new child node'
- def newChild = createDataNodeTree('xpath for new child')
- when: 'the child is added to an existing parent with 1 child'
- objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME1, XPATH_DATA_NODE_WITH_DESCENDANTS, newChild)
- then: 'the parent is now has to 2 children'
- def expectedExistingChildPath = '/parent-1/child-1'
- def parentFragment = fragmentRepository.findById(ID_DATA_NODE_WITH_DESCENDANTS).orElseThrow()
- parentFragment.getChildFragments().size() == 2
- and: 'it still has the old child'
- parentFragment.getChildFragments().find({ it.xpath == expectedExistingChildPath })
- and: 'it has the new child'
- parentFragment.getChildFragments().find({ it.xpath == newChild.xpath })
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Add child error scenario: #scenario.'() {
- when: 'attempt to add a child data node with #scenario'
- objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, dataNode)
- then: 'a #expectedException is thrown'
- thrown(expectedException)
- where: 'the following data is used'
- scenario | parentXpath | dataNode || expectedException
- 'parent does not exist' | 'unknown' | newDataNode || DataNodeNotFoundException
- 'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNode || AlreadyDefinedException
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Add list-node fragment with multiple elements.'() {
- given: 'list node data fragment as a collection of data nodes'
- def listNodeXpaths = ['/parent-201/child-204[@key="B"]', '/parent-201/child-204[@key="C"]']
- def listNodeCollection = buildDataNodeCollection(listNodeXpaths)
- when: 'list-node elements added to existing parent node'
- objectUnderTest.addListDataNodes(DATASPACE_NAME, ANCHOR_NAME3, '/parent-201', listNodeCollection)
- then: 'new entries successfully persisted, parent node now contains 5 children (2 new + 3 existing before)'
- def parentFragment = fragmentRepository.getOne(LIST_DATA_NODE_PARENT_FRAGMENT_ID)
- def allChildXpaths = parentFragment.getChildFragments().collect { it.getXpath() }
- assert allChildXpaths.size() == 5
- assert allChildXpaths.containsAll(listNodeXpaths)
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Add list-node fragment error scenario: #scenario.'() {
- given: 'list node data fragment as a collection of data nodes'
- def listNodeCollection = buildDataNodeCollection(listNodeXpaths)
- when: 'list-node elements added to existing parent node'
- objectUnderTest.addListDataNodes(DATASPACE_NAME, ANCHOR_NAME3, parentNodeXpath, listNodeCollection)
- then: 'a #expectedException is thrown'
- thrown(expectedException)
- where: 'following parameters were used'
- scenario | parentNodeXpath | listNodeXpaths || expectedException
- 'parent node does not exist' | '/unknown' | ['irrelevant'] || DataNodeNotFoundException
- 'already existing fragment' | '/parent-201' | ['/parent-201/child-204[@key="A"]'] || AlreadyDefinedException
-
- }
+import org.onap.cps.spi.repository.AnchorRepository
+import org.onap.cps.spi.repository.DataspaceRepository
+import org.onap.cps.spi.repository.FragmentRepository
+import spock.lang.Specification
- static def createDataNodeTree(String... xpaths) {
- def dataNodeBuilder = new DataNodeBuilder().withXpath(xpaths[0])
- if (xpaths.length > 1) {
- def xPathsDescendant = Arrays.copyOfRange(xpaths, 1, xpaths.length)
- def childDataNode = createDataNodeTree(xPathsDescendant)
- dataNodeBuilder.withChildDataNodes(ImmutableSet.of(childDataNode))
- }
- dataNodeBuilder.build()
- }
- def getFragmentByXpath(dataspaceName, anchorName, xpath) {
- def dataspace = dataspaceRepository.getByName(dataspaceName)
- def anchor = anchorRepository.getByDataspaceAndName(dataspace, anchorName)
- return fragmentRepository.findByDataspaceAndAnchorAndXpath(dataspace, anchor, xpath).orElseThrow()
- }
+class CpsDataPersistenceServiceSpec extends Specification {
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Get data node by xpath without descendants.'() {
- when: 'data node is requested'
- def result = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES,
- inputXPath, OMIT_DESCENDANTS)
- then: 'data node is returned with no descendants'
- assert result.getXpath() == XPATH_DATA_NODE_WITH_LEAVES
- and: 'expected leaves'
- assert result.getChildDataNodes().size() == 0
- assertLeavesMaps(result.getLeaves(), expectedLeavesByXpathMap[XPATH_DATA_NODE_WITH_LEAVES])
- where: 'the following data is used'
- scenario | inputXPath
- 'some xpath' | '/parent-100'
- 'root xpath' | '/'
- 'empty xpath' | ''
- }
+ def mockDataspaceRepository = Mock(DataspaceRepository)
+ def mockAnchorRepository = Mock(AnchorRepository)
+ def mockFragmentRepository = Mock(FragmentRepository)
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Get data node by xpath with all descendants.'() {
- when: 'data node is requested with all descendants'
- def result = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES,
- inputXPath, INCLUDE_ALL_DESCENDANTS)
- def mappedResult = treeToFlatMapByXpath(new HashMap<>(), result)
- then: 'data node is returned with all the descendants populated'
- assert mappedResult.size() == 4
- assert result.getChildDataNodes().size() == 2
- assert mappedResult.get('/parent-100/child-001').getChildDataNodes().size() == 0
- assert mappedResult.get('/parent-100/child-002').getChildDataNodes().size() == 1
- and: 'extracted leaves maps are matching expected'
- mappedResult.forEach(
- (xPath, dataNode) -> assertLeavesMaps(dataNode.getLeaves(), expectedLeavesByXpathMap[xPath]))
- where: 'the following data is used'
- scenario | inputXPath
- 'some xpath' | '/parent-100'
- 'root xpath' | '/'
- 'empty xpath' | ''
- }
+ def objectUnderTest = new CpsDataPersistenceServiceImpl(
+ mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository)
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Get data node error scenario: #scenario.'() {
- when: 'attempt to get data node with #scenario'
- objectUnderTest.getDataNode(dataspaceName, anchorName, xpath, OMIT_DESCENDANTS)
- then: 'a #expectedException is thrown'
- thrown(expectedException)
- where: 'the following data is used'
- scenario | dataspaceName | anchorName | xpath || expectedException
- 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
- 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NO XPATH' || DataNodeNotFoundException
- }
+ def 'Handling of StaleStateException (caused by concurrent updates) during data node tree update.'() {
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Update data node leaves.'() {
- when: 'update is performed for leaves'
- objectUnderTest.updateDataLeaves(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES,
- "/parent-200/child-201", ['leaf-value': 'new'])
- then: 'leaves are updated for selected data node'
- def updatedFragment = fragmentRepository.getOne(UPDATE_DATA_NODE_FRAGMENT_ID)
- def updatedLeaves = getLeavesMap(updatedFragment)
- assert updatedLeaves.size() == 1
- assert updatedLeaves.'leaf-value' == 'new'
- and: 'existing child entry remains as is'
- def childFragment = updatedFragment.getChildFragments().iterator().next()
- def childLeaves = getLeavesMap(childFragment)
- assert childFragment.getId() == UPDATE_DATA_NODE_SUB_FRAGMENT_ID
- assert childLeaves.'leaf-value' == 'original'
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Update data leaves error scenario: #scenario.'() {
- when: 'attempt to update data node for #scenario'
- objectUnderTest.updateDataLeaves(dataspaceName, anchorName, xpath, ['leaf-name': 'leaf-value'])
- then: 'a #expectedException is thrown'
- thrown(expectedException)
- where: 'the following data is used'
- scenario | dataspaceName | anchorName | xpath || expectedException
- 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
- 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NON-EXISTING XPATH' || DataNodeNotFoundException
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Replace data node tree with descendants removal.'() {
- given: 'data node object with leaves updated, no children'
- def submittedDataNode = buildDataNode("/parent-200/child-201", ['leaf-value': 'new'], [])
- when: 'replace data node tree is performed'
- objectUnderTest.replaceDataNodeTree(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNode)
- then: 'leaves have been updated for selected data node'
- def updatedFragment = fragmentRepository.getOne(UPDATE_DATA_NODE_FRAGMENT_ID)
- def updatedLeaves = getLeavesMap(updatedFragment)
- assert updatedLeaves.size() == 1
- assert updatedLeaves.'leaf-value' == 'new'
- and: 'updated entry has no children'
- updatedFragment.getChildFragments().isEmpty()
- and: 'previously attached child entry is removed from database'
- fragmentRepository.findById(UPDATE_DATA_NODE_SUB_FRAGMENT_ID).isEmpty()
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Replace data node tree with descendants.'() {
- given: 'data node object with leaves updated, having child with old content'
- def submittedDataNode = buildDataNode("/parent-200/child-201", ['leaf-value': 'new'], [
- buildDataNode("/parent-200/child-201/grand-child", ['leaf-value': 'original'], [])
- ])
- when: 'update is performed including descendants'
- objectUnderTest.replaceDataNodeTree(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNode)
- then: 'leaves have been updated for selected data node'
- def updatedFragment = fragmentRepository.getOne(UPDATE_DATA_NODE_FRAGMENT_ID)
- def updatedLeaves = getLeavesMap(updatedFragment)
- assert updatedLeaves.size() == 1
- assert updatedLeaves.'leaf-value' == 'new'
- and: 'previously attached child entry is removed from database'
- fragmentRepository.findById(UPDATE_DATA_NODE_SUB_FRAGMENT_ID).isEmpty()
- and: 'new child entry with same content is created'
- def childFragment = updatedFragment.getChildFragments().iterator().next()
- def childLeaves = getLeavesMap(childFragment)
- assert childFragment.getId() != UPDATE_DATA_NODE_SUB_FRAGMENT_ID
- assert childLeaves.'leaf-value' == 'original'
- }
+ def parentXpath = 'parent-01'
+ def myDataspaceName = 'my-dataspace'
+ def myAnchorName = 'my-anchor'
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Replace data node tree error scenario: #scenario.'() {
given: 'data node object'
- def submittedDataNode = buildDataNode(xpath, ['leaf-name': 'leaf-value'], [])
- when: 'attempt to update data node for #scenario'
- objectUnderTest.replaceDataNodeTree(dataspaceName, anchorName, submittedDataNode)
- then: 'a #expectedException is thrown'
- thrown(expectedException)
- where: 'the following data is used'
- scenario | dataspaceName | anchorName | xpath || expectedException
- 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
- 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NON-EXISTING XPATH' || DataNodeNotFoundException
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Replace list-node content of #scenario.'() {
- given: 'list node data fragment as a collection of data nodes'
- def listNodeCollection = buildDataNodeCollection(listNodeXpaths)
- when: 'list-node elements replaced within the existing parent node'
- objectUnderTest.replaceListDataNodes(DATASPACE_NAME, ANCHOR_NAME3, '/parent-201', listNodeCollection)
- then: 'child list elements are updated as expected, non-list element remains as is'
- def parentFragment = fragmentRepository.getOne(LIST_DATA_NODE_PARENT_FRAGMENT_ID)
- def allChildXpaths = parentFragment.getChildFragments().collect { it.getXpath() }
- assert allChildXpaths.size() == expectedChildXpaths.size()
- assert allChildXpaths.containsAll(expectedChildXpaths)
- where: 'following parameters were used'
- scenario | listNodeXpaths || expectedChildXpaths
- 'existing list-node' | ['/parent-201/child-204[@key="B"]'] || ['/parent-201/child-203', '/parent-201/child-204[@key="B"]']
- 'non-existing list-node' | ['/parent-201/child-205[@key="1"]'] || ['/parent-201/child-203', '/parent-201/child-204[@key="A"]', '/parent-201/child-204[@key="X"]', '/parent-201/child-205[@key="1"]']
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Replace list-node fragment error scenario: #scenario.'() {
- given: 'list node data fragment as a collection of data nodes'
- def listNodeCollection = buildDataNodeCollection(listNodeXpaths)
- when: 'list-node elements were replaced under existing parent node'
- objectUnderTest.replaceListDataNodes(DATASPACE_NAME, ANCHOR_NAME3, parentNodeXpath, listNodeCollection)
- then: 'a #expectedException is thrown'
- thrown(expectedException)
- where: 'following parameters were used'
- scenario | parentNodeXpath | listNodeXpaths || expectedException
- 'parent node does not exist' | '/unknown' | ['irrelevant'] || DataNodeNotFoundException
- }
-
- static Collection<DataNode> buildDataNodeCollection(xpaths) {
- return xpaths.collect { new DataNodeBuilder().withXpath(it).build() }
- }
-
- static DataNode buildDataNode(xpath, leaves, childDataNodes) {
- return new DataNodeBuilder().withXpath(xpath).withLeaves(leaves).withChildDataNodes(childDataNodes).build()
- }
+ def submittedDataNode = new DataNodeBuilder()
+ .withXpath(parentXpath)
+ .withLeaves(['leaf-name': 'leaf-value'])
+ .build()
+ and: 'fragment to be updated'
+ mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
+ def fragmentEntity = new FragmentEntity()
+ fragmentEntity.setXpath(parentXpath)
+ fragmentEntity.setChildFragments(Collections.emptySet())
+ return fragmentEntity
+ }
+ and: 'data node is concurrently updated by another transaction'
+ mockFragmentRepository.save(_) >> { throw new StaleStateException("concurrent updates") }
- static Map<String, Object> getLeavesMap(FragmentEntity fragmentEntity) {
- return GSON.fromJson(fragmentEntity.getAttributes(), Map<String, Object>.class)
- }
+ when: 'attempt to update data node'
+ objectUnderTest.replaceDataNodeTree(myDataspaceName, myAnchorName, submittedDataNode)
- def static assertLeavesMaps(actualLeavesMap, expectedLeavesMap) {
- expectedLeavesMap.forEach((key, value) -> {
- def actualValue = actualLeavesMap[key]
- if (value instanceof Collection<?> && actualValue instanceof Collection<?>) {
- assert value.size() == actualValue.size()
- assert value.containsAll(actualValue)
- } else {
- assert value == actualValue
- }
- })
- return true
+ then: 'concurrency exception is thrown'
+ def concurrencyException = thrown(ConcurrencyException)
+ assert concurrencyException.getDetails().contains(myDataspaceName)
+ assert concurrencyException.getDetails().contains(myAnchorName)
+ assert concurrencyException.getDetails().contains(parentXpath)
}
- def static treeToFlatMapByXpath(Map<String, DataNode> flatMap, DataNode dataNodeTree) {
- flatMap.put(dataNodeTree.getXpath(), dataNodeTree)
- dataNodeTree.getChildDataNodes()
- .forEach(childDataNode -> treeToFlatMapByXpath(flatMap, childDataNode))
- return flatMap
- }
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceUnitSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy
index ffbac485e..4455a6fa4 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceUnitSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy
@@ -33,7 +33,7 @@ import java.sql.SQLException
/**
* Specification unit test class for CPS module persistence service.
*/
-class CpsModulePersistenceServiceUnitSpec extends Specification {
+class CpsModulePersistenceServiceSpec extends Specification {
// Instance to test
CpsModulePersistenceService objectUnderTest
diff --git a/cps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java b/cps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java
index d3908ea80..10f8de422 100755
--- a/cps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java
+++ b/cps-ri/src/test/java/org/onap/cps/DatabaseTestContainer.java
@@ -23,8 +23,10 @@ import org.testcontainers.containers.PostgreSQLContainer;
/**
* The Postgresql database test container wrapper.
- * Singleton implementation allows saving time on database initialization which
- * otherwise would occur on each test.
+ * Singleton implementation allows saving time on database initialization which otherwise would occur on each test.
+ * for debugging/developing purposes you can suspend any test and connect to this database:
+ * docker exec -it {container-id} sh
+ * psql -d test -U test
*/
public class DatabaseTestContainer extends PostgreSQLContainer<DatabaseTestContainer> {
private static final String IMAGE_VERSION = "postgres:13.2";
diff --git a/cps-ri/src/test/resources/data/anchor.sql b/cps-ri/src/test/resources/data/anchor.sql
index a7d3e6715..dbf1a6a47 100644
--- a/cps-ri/src/test/resources/data/anchor.sql
+++ b/cps-ri/src/test/resources/data/anchor.sql
@@ -1,3 +1,25 @@
+/*
+ ============LICENSE_START=======================================================
+ Copyright (C) 2020 Pantheon.tech
+ Modifications Copyright (C) 2020 Nordix Foundation.
+ Modifications Copyright (C) 2021 Bell Canada.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ SPDX-License-Identifier: Apache-2.0
+ ============LICENSE_END=========================================================
+*/
+
INSERT INTO DATASPACE (ID, NAME) VALUES
(1001, 'DATASPACE-001'), (1002, 'DATASPACE-002');
@@ -9,4 +31,4 @@ INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES
(3002, 'ANCHOR-002', 1001, 2002);
INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
- (4001, 1001, 3001, null, '/xpath', '{}'); \ No newline at end of file
+ (4001, 1001, 3001, null, '/xpath', '{}');
diff --git a/cps-ri/src/test/resources/data/clear-all.sql b/cps-ri/src/test/resources/data/clear-all.sql
index 9aee604a3..8a5e8444e 100644
--- a/cps-ri/src/test/resources/data/clear-all.sql
+++ b/cps-ri/src/test/resources/data/clear-all.sql
@@ -1,3 +1,25 @@
+/*
+ ============LICENSE_START=======================================================
+ Copyright (C) 2020-2021 Pantheon.tech
+ Modifications Copyright (C) 2020 Nordix Foundation.
+ Modifications Copyright (C) 2020 Bell Canada.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ SPDX-License-Identifier: Apache-2.0
+ ============LICENSE_END=========================================================
+*/
+
DELETE FROM FRAGMENT;
DELETE FROM ANCHOR;
DELETE FROM DATASPACE;
diff --git a/cps-ri/src/test/resources/data/cps-path-query.sql b/cps-ri/src/test/resources/data/cps-path-query.sql
index 67558456e..8f525df6b 100644
--- a/cps-ri/src/test/resources/data/cps-path-query.sql
+++ b/cps-ri/src/test/resources/data/cps-path-query.sql
@@ -1,3 +1,24 @@
+/*
+ ============LICENSE_START=======================================================
+ Copyright (C) 2021 Nordix Foundation.
+ Modifications Copyright (C) 2021 Bell Canada.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ SPDX-License-Identifier: Apache-2.0
+ ============LICENSE_END=========================================================
+*/
+
INSERT INTO DATASPACE (ID, NAME) VALUES
(1001, 'DATASPACE-001');
@@ -12,8 +33,8 @@ INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES)
(2, 1001, 1003, 1, '/shops/shop[@id=1]', '{"id" : 1, "type" : "bookstore"}'),
(3, 1001, 1003, 2, '/shops/shop[@id=1]/categories[@code=1]', '{"code" : 1, "type" : "bookstore", "name": "SciFi"}'),
(4, 1001, 1003, 2, '/shops/shop[@id=1]/categories[@code=2]', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}'),
- (5, 1001, 1003, 3, '/shops/shop[@id=1]/categories[@code=1]/book', '{"price" : 15, "title": "Dune"}'),
- (6, 1001, 1003, 4, '/shops/shop[@id=1]/categories[@code=2]/book', '{"price" : 15, "title": "Chapters"}'),
+ (5, 1001, 1003, 3, '/shops/shop[@id=1]/categories[@code=1]/book', '{"price" : 5, "title" : "Dune", "labels" : ["special offer","classics",""]}'),
+ (6, 1001, 1003, 4, '/shops/shop[@id=1]/categories[@code=2]/book', '{"price" : 15, "title" : "Chapters", "editions" : [2000,2010,2020]}'),
(7, 1001, 1003, 5, '/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]', '{"FirstName" : "Joe", "Surname": "Bloggs","title": "Dune"}'),
(8, 1001, 1003, 6, '/shops/shop[@id=1]/categories[@code=2]/book/author[@FirstName="Joe" and @Surname="Smith"]', '{"FirstName" : "Joe", "Surname": "Smith","title": "Chapters"}');
@@ -30,4 +51,4 @@ INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES)
(17, 1001, 1003, 1, '/shops/shop[@id=3]/info/contact', null),
(18, 1001, 1003, 1, '/shops/shop[@id=3]/info/contact/website', '{"address" : "myshop.ie"}'),
(19, 1001, 1003, 12, '/shops/shop[@id=3]/info/contact/phonenumbers[@type="mob"]', '{"type" : "mob", "number" : "123123456"}'),
- (20, 1001, 1003, 12, '/shops/shop[@id=3]/info/contact/phonenumbers[@type="landline"]', '{"type" : "landline", "number" : "012123456"}'); \ No newline at end of file
+ (20, 1001, 1003, 12, '/shops/shop[@id=3]/info/contact/phonenumbers[@type="landline"]', '{"type" : "landline", "number" : "012123456"}');
diff --git a/cps-ri/src/test/resources/data/fragment.sql b/cps-ri/src/test/resources/data/fragment.sql
index 1897185fa..d7109f20b 100755
--- a/cps-ri/src/test/resources/data/fragment.sql
+++ b/cps-ri/src/test/resources/data/fragment.sql
@@ -1,3 +1,25 @@
+/*
+ ============LICENSE_START=======================================================
+ Copyright (C) 2021 Nordix Foundation.
+ Modifications Copyright (C) 2021 Pantheon.tech
+ Modifications Copyright (C) 2021 Bell Canada.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ SPDX-License-Identifier: Apache-2.0
+ ============LICENSE_END=========================================================
+*/
+
INSERT INTO DATASPACE (ID, NAME) VALUES
(1001, 'DATASPACE-001');
@@ -31,4 +53,4 @@ INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES)
(4206, 1001, 3003, null, '/parent-201', '{"leaf-value": "original"}'),
(4207, 1001, 3003, 4206, '/parent-201/child-203', '{}'),
(4208, 1001, 3003, 4206, '/parent-201/child-204[@key="A"]', '{"key": "A"}'),
- (4209, 1001, 3003, 4206, '/parent-201/child-204[@key="X"]', '{"key": "X"}'); \ No newline at end of file
+ (4209, 1001, 3003, 4206, '/parent-201/child-204[@key="X"]', '{"key": "X"}');
diff --git a/cps-ri/src/test/resources/data/schemaset.sql b/cps-ri/src/test/resources/data/schemaset.sql
index e6306d0d0..adfcfa172 100644
--- a/cps-ri/src/test/resources/data/schemaset.sql
+++ b/cps-ri/src/test/resources/data/schemaset.sql
@@ -1,3 +1,25 @@
+/*
+ ============LICENSE_START=======================================================
+ Copyright (C) 2020-2021 Pantheon.tech
+ Modifications Copyright (C) 2020 Nordix Foundation.
+ Modifications Copyright (C) 2020-2021 Bell Canada.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ SPDX-License-Identifier: Apache-2.0
+ ============LICENSE_END=========================================================
+*/
+
INSERT INTO DATASPACE (ID, NAME) VALUES
(1001, 'DATASPACE-001'), (1002, 'DATASPACE-002');