summaryrefslogtreecommitdiffstats
path: root/cps-ri/src
diff options
context:
space:
mode:
Diffstat (limited to 'cps-ri/src')
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java61
-rwxr-xr-xcps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java10
-rwxr-xr-xcps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy128
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy33
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy81
5 files changed, 209 insertions, 104 deletions
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
index b22f171f2..82bcea2f1 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
@@ -3,6 +3,7 @@
* Copyright (C) 2021-2022 Nordix Foundation
* Modifications Copyright (C) 2021 Pantheon.tech
* Modifications Copyright (C) 2020-2022 Bell Canada.
+ * Modifications Copyright (C) 2022 TechMahindra Ltd.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -24,6 +25,7 @@ package org.onap.cps.spi.impl;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
+import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -77,9 +79,6 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
private final SessionManager sessionManager;
private static final String REG_EX_FOR_OPTIONAL_LIST_INDEX = "(\\[@[\\s\\S]+?]){0,1})";
- private static final Pattern REG_EX_PATTERN_FOR_LIST_ELEMENT_KEY_PREDICATE =
- Pattern.compile("\\[(\\@([^\\/]{0,9999}))\\]$");
- private static final String TOP_LEVEL_MODULE_PREFIX_PROPERTY_NAME = "topLevelModulePrefix";
@Override
public void addChildDataNode(final String dataspaceName, final String anchorName, final String parentNodeXpath,
@@ -88,6 +87,12 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
}
@Override
+ public void addChildDataNodes(final String dataspaceName, final String anchorName,
+ final String parentNodeXpath, final Collection<DataNode> dataNodes) {
+ addChildrenDataNodes(dataspaceName, anchorName, parentNodeXpath, dataNodes);
+ }
+
+ @Override
public void addListElements(final String dataspaceName, final String anchorName, final String parentNodeXpath,
final Collection<DataNode> newListElements) {
addChildrenDataNodes(dataspaceName, anchorName, parentNodeXpath, newListElements);
@@ -166,14 +171,45 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
@Override
public void storeDataNode(final String dataspaceName, final String anchorName, final DataNode dataNode) {
+ storeDataNodes(dataspaceName, anchorName, Collections.singletonList(dataNode));
+ }
+
+ @Override
+ public void storeDataNodes(final String dataspaceName, final String anchorName,
+ final Collection<DataNode> dataNodes) {
final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName);
final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName);
- final FragmentEntity fragmentEntity = convertToFragmentWithAllDescendants(dataspaceEntity, anchorEntity,
- dataNode);
+ final List<FragmentEntity> fragmentEntities = new ArrayList<>(dataNodes.size());
try {
- fragmentRepository.save(fragmentEntity);
+ for (final DataNode dataNode: dataNodes) {
+ final FragmentEntity fragmentEntity = convertToFragmentWithAllDescendants(dataspaceEntity, anchorEntity,
+ dataNode);
+ fragmentEntities.add(fragmentEntity);
+ }
+ fragmentRepository.saveAll(fragmentEntities);
} catch (final DataIntegrityViolationException exception) {
- throw AlreadyDefinedException.forDataNode(dataNode.getXpath(), anchorName, exception);
+ log.warn("Exception occurred : {} , While saving : {} data nodes, Retrying saving data nodes individually",
+ exception, dataNodes.size());
+ storeDataNodesIndividually(dataspaceName, anchorName, dataNodes);
+ }
+ }
+
+ private void storeDataNodesIndividually(final String dataspaceName, final String anchorName,
+ final Collection<DataNode> dataNodes) {
+ final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName);
+ final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName);
+ final Collection<String> failedXpaths = new HashSet<>();
+ for (final DataNode dataNode: dataNodes) {
+ try {
+ final FragmentEntity fragmentEntity = convertToFragmentWithAllDescendants(dataspaceEntity, anchorEntity,
+ dataNode);
+ fragmentRepository.save(fragmentEntity);
+ } catch (final DataIntegrityViolationException e) {
+ failedXpaths.add(dataNode.getXpath());
+ }
+ }
+ if (!failedXpaths.isEmpty()) {
+ throw new AlreadyDefinedExceptionBatch(failedXpaths);
}
}
@@ -346,7 +382,7 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
private DataNode toDataNode(final FragmentEntity fragmentEntity,
final FetchDescendantsOption fetchDescendantsOption) {
final List<DataNode> childDataNodes = getChildDataNodes(fragmentEntity, fetchDescendantsOption);
- Map<String, Object> leaves = new HashMap<>();
+ Map<String, Serializable> leaves = new HashMap<>();
if (fragmentEntity.getAttributes() != null) {
leaves = jsonObjectMapper.convertJsonString(fragmentEntity.getAttributes(), Map.class);
}
@@ -368,7 +404,7 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
@Override
public void updateDataLeaves(final String dataspaceName, final String anchorName, final String xpath,
- final Map<String, Object> leaves) {
+ final Map<String, Serializable> leaves) {
final FragmentEntity fragmentEntity = getFragmentWithoutDescendantsByXpath(dataspaceName, anchorName, xpath);
fragmentEntity.setAttributes(jsonObjectMapper.asJsonString(leaves));
fragmentRepository.save(fragmentEntity);
@@ -511,13 +547,10 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
if (isRootContainerNodeXpath(targetXpath)) {
parentNodeXpath = targetXpath;
} else {
- parentNodeXpath = targetXpath.substring(0, targetXpath.lastIndexOf('/'));
+ parentNodeXpath = CpsPathUtil.getNormalizedParentXpath(targetXpath);
}
parentFragmentEntity = getFragmentWithoutDescendantsByXpath(dataspaceName, anchorName, parentNodeXpath);
- final String lastXpathElement = targetXpath.substring(targetXpath.lastIndexOf('/'));
- final boolean isListElement = REG_EX_PATTERN_FOR_LIST_ELEMENT_KEY_PREDICATE
- .matcher(lastXpathElement).find();
- if (isListElement) {
+ if (CpsPathUtil.isPathToListElement(targetXpath)) {
targetDeleted = deleteDataNode(parentFragmentEntity, targetXpath);
} else {
targetDeleted = deleteAllListElements(parentFragmentEntity, targetXpath);
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java
index 03f021e76..c9f9a78ef 100755
--- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java
@@ -337,12 +337,14 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ
*/
private String getNameForChecksum(
final String checksum, final Collection<YangResourceEntity> yangResourceEntities) {
- return
- yangResourceEntities.stream()
+ final Optional<String> optionalFileName = yangResourceEntities.stream()
.filter(entity -> StringUtils.equals(checksum, (entity.getChecksum())))
.findFirst()
- .map(YangResourceEntity::getFileName)
- .orElse(null);
+ .map(YangResourceEntity::getFileName);
+ if (optionalFileName.isPresent()) {
+ return optionalFileName.get();
+ }
+ return null;
}
/**
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
index fbf414d2a..cc2369d50 100755
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
@@ -3,6 +3,7 @@
* Copyright (C) 2021-2022 Nordix Foundation
* Modifications Copyright (C) 2021 Pantheon.tech
* Modifications Copyright (C) 2021-2022 Bell Canada.
+ * Modifications Copyright (C) 2022 TechMahindra Ltd.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -26,7 +27,6 @@ import com.google.common.collect.ImmutableSet
import org.onap.cps.cpspath.parser.PathParsingException
import org.onap.cps.spi.CpsDataPersistenceService
import org.onap.cps.spi.entities.FragmentEntity
-import org.onap.cps.spi.exceptions.AlreadyDefinedException
import org.onap.cps.spi.exceptions.AlreadyDefinedExceptionBatch
import org.onap.cps.spi.exceptions.AnchorNotFoundException
import org.onap.cps.spi.exceptions.CpsAdminException
@@ -38,6 +38,7 @@ import org.onap.cps.spi.model.DataNodeBuilder
import org.onap.cps.utils.JsonObjectMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.test.context.jdbc.Sql
+
import javax.validation.ConstraintViolationException
import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS
@@ -48,25 +49,29 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
@Autowired
CpsDataPersistenceService objectUnderTest
- static final JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper())
- static final DataNodeBuilder dataNodeBuilder = new DataNodeBuilder()
+ static JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper())
+ static DataNodeBuilder dataNodeBuilder = new DataNodeBuilder()
static final String SET_DATA = '/data/fragment.sql'
- static final int DATASPACE_1001_ID = 1001L
- static final int ANCHOR_3003_ID = 3003L
- static final long ID_DATA_NODE_WITH_DESCENDANTS = 4001
- static final String XPATH_DATA_NODE_WITH_DESCENDANTS = '/parent-1'
- static final String XPATH_DATA_NODE_WITH_LEAVES = '/parent-207'
- static final long DATA_NODE_202_FRAGMENT_ID = 4202L
- static final long CHILD_OF_DATA_NODE_202_FRAGMENT_ID = 4203L
- static final long LIST_DATA_NODE_PARENT201_FRAGMENT_ID = 4206L
- static final long LIST_DATA_NODE_PARENT203_FRAGMENT_ID = 4214L
- static final long LIST_DATA_NODE_PARENT202_FRAGMENT_ID = 4211L
- static final long PARENT_3_FRAGMENT_ID = 4003L
-
- static final DataNode newDataNode = new DataNodeBuilder().build()
- static DataNode existingDataNode
- static DataNode existingChildDataNode
+ static int DATASPACE_1001_ID = 1001L
+ static int ANCHOR_3003_ID = 3003L
+ static long ID_DATA_NODE_WITH_DESCENDANTS = 4001
+ static String XPATH_DATA_NODE_WITH_DESCENDANTS = '/parent-1'
+ static String XPATH_DATA_NODE_WITH_LEAVES = '/parent-207'
+ static long DATA_NODE_202_FRAGMENT_ID = 4202L
+ static long CHILD_OF_DATA_NODE_202_FRAGMENT_ID = 4203L
+ static long LIST_DATA_NODE_PARENT201_FRAGMENT_ID = 4206L
+ static long LIST_DATA_NODE_PARENT203_FRAGMENT_ID = 4214L
+ static long LIST_DATA_NODE_PARENT202_FRAGMENT_ID = 4211L
+ static long PARENT_3_FRAGMENT_ID = 4003L
+
+ static Collection<DataNode> newDataNodes = [new DataNodeBuilder().build()]
+ static Collection<DataNode> existingDataNodes = [createDataNodeTree(XPATH_DATA_NODE_WITH_DESCENDANTS)]
+ static Collection<DataNode> existingChildDataNodes = [createDataNodeTree('/parent-1/child-1')]
+
+ def static deleteTestParentXPath = '/parent-200'
+ def static deleteTestChildXpath = "${deleteTestParentXPath}/child-with-slash[@key='a/b']"
+ def static deleteTestGrandChildXPath = "${deleteTestChildXpath}/grandChild"
def expectedLeavesByXpathMap = [
'/parent-207' : ['parent-leaf': 'parent-leaf value'],
@@ -75,11 +80,6 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
'/parent-207/child-002/grand-child': ['grand-child-leaf': 'grand-child-leaf value']
]
- static {
- existingDataNode = createDataNodeTree(XPATH_DATA_NODE_WITH_DESCENDANTS)
- existingChildDataNode = createDataNodeTree('/parent-1/child-1')
- }
-
@Sql([CLEAR_DATA, SET_DATA])
def 'Get existing datanode with descendants.'() {
when: 'the node is retrieved by its xpath'
@@ -93,13 +93,13 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Storing and Retrieving a new DataNode with descendants.'() {
+ def 'Storing and Retrieving a new DataNodes with descendants.'() {
when: 'a fragment with descendants is stored'
def parentXpath = '/parent-new'
def childXpath = '/parent-new/child-new'
def grandChildXpath = '/parent-new/child-new/grandchild-new'
- objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME1,
- createDataNodeTree(parentXpath, childXpath, grandChildXpath))
+ def dataNodes = [createDataNodeTree(parentXpath, childXpath, grandChildXpath)]
+ objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME1, dataNodes)
then: 'it can be retrieved by its xpath'
def dataNode = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, INCLUDE_ALL_DESCENDANTS)
assert dataNode.xpath == parentXpath
@@ -117,9 +117,9 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
def 'Store data node for multiple anchors using the same schema.'() {
def xpath = '/parent-new'
given: 'a fragment is stored for an anchor'
- objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME1, createDataNodeTree(xpath))
+ objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME1, [createDataNodeTree(xpath)])
when: 'another fragment is stored for an other anchor, using the same schema set'
- objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME3, createDataNodeTree(xpath))
+ objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME3, [createDataNodeTree(xpath)])
then: 'both fragments can be retrieved by their xpath'
def fragment1 = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME1, xpath)
fragment1.anchor.name == ANCHOR_NAME1
@@ -130,45 +130,48 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Store datanode error scenario: #scenario.'() {
+ def 'Store datanodes error scenario: #scenario.'() {
when: 'attempt to store a data node with #scenario'
- objectUnderTest.storeDataNode(dataspaceName, anchorName, dataNode)
+ objectUnderTest.storeDataNodes(dataspaceName, anchorName, dataNodes)
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'the following data is used'
- scenario | dataspaceName | anchorName | dataNode || expectedException
- 'dataspace does not exist' | 'unknown' | 'not-relevant' | newDataNode || DataspaceNotFoundException
- 'schema set does not exist' | DATASPACE_NAME | 'unknown' | newDataNode || AnchorNotFoundException
- 'anchor already exists' | DATASPACE_NAME | ANCHOR_NAME1 | newDataNode || ConstraintViolationException
- 'datanode already exists' | DATASPACE_NAME | ANCHOR_NAME1 | existingDataNode || AlreadyDefinedException
+ scenario | dataspaceName | anchorName | dataNodes || expectedException
+ 'dataspace does not exist' | 'unknown' | 'not-relevant' | newDataNodes || DataspaceNotFoundException
+ 'schema set does not exist' | DATASPACE_NAME | 'unknown' | newDataNodes || AnchorNotFoundException
+ 'anchor already exists' | DATASPACE_NAME | ANCHOR_NAME1 | newDataNodes || ConstraintViolationException
+ 'datanode already exists' | DATASPACE_NAME | ANCHOR_NAME1 | existingDataNodes || AlreadyDefinedExceptionBatch
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Add a child to a Fragment that already has a child.'() {
- given: ' a new child node'
- def newChild = createDataNodeTree('xpath for new child')
+ def 'Add children to a Fragment that already has a child.'() {
+ given: 'collection of new child data nodes'
+ def newChild1 = createDataNodeTree('/parent-1/child-2')
+ def newChild2 = createDataNodeTree('/parent-1/child-3')
+ def newChildrenCollection = [newChild1, newChild2]
when: 'the child is added to an existing parent with 1 child'
- objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME1, XPATH_DATA_NODE_WITH_DESCENDANTS, newChild)
- then: 'the parent is now has to 2 children'
+ objectUnderTest.addChildDataNodes(DATASPACE_NAME, ANCHOR_NAME1, XPATH_DATA_NODE_WITH_DESCENDANTS, newChildrenCollection)
+ then: 'the parent is now has to 3 children'
def expectedExistingChildPath = '/parent-1/child-1'
def parentFragment = fragmentRepository.findById(ID_DATA_NODE_WITH_DESCENDANTS).orElseThrow()
- parentFragment.childFragments.size() == 2
+ parentFragment.childFragments.size() == 3
and: 'it still has the old child'
parentFragment.childFragments.find({ it.xpath == expectedExistingChildPath })
- and: 'it has the new child'
- parentFragment.childFragments.find({ it.xpath == newChild.xpath })
+ and: 'it has the new children'
+ parentFragment.childFragments.find({ it.xpath == newChildrenCollection[0].xpath })
+ parentFragment.childFragments.find({ it.xpath == newChildrenCollection[1].xpath })
}
@Sql([CLEAR_DATA, SET_DATA])
def 'Add child error scenario: #scenario.'() {
when: 'attempt to add a child data node with #scenario'
- objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, dataNode)
+ objectUnderTest.addChildDataNodes(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, dataNodes)
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'the following data is used'
- scenario | parentXpath | dataNode || expectedException
- 'parent does not exist' | '/unknown' | newDataNode || DataNodeNotFoundException
- 'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNode || AlreadyDefinedException
+ scenario | parentXpath | dataNodes || expectedException
+ 'parent does not exist' | '/unknown' | newDataNodes || DataNodeNotFoundException
+ 'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNodes || AlreadyDefinedExceptionBatch
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -288,7 +291,8 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
scenario | dataspaceName | anchorName | xpath || expectedException
'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NO XPATH' || DataNodeNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NO-XPATH' || DataNodeNotFoundException
+ 'invalid xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'INVALID XPATH' || CpsPathException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -318,7 +322,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
scenario | dataspaceName | anchorName | xpath || expectedException
'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING XPATH' || DataNodeNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING-XPATH' || DataNodeNotFoundException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -412,7 +416,8 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
scenario | dataspaceName | anchorName | xpath || expectedException
'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING XPATH' || DataNodeNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING-XPATH' || DataNodeNotFoundException
+ 'invalid xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'INVALID XPATH' || CpsPathException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -525,6 +530,25 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
+ def 'Delete data nodes with "/"-token in list key value: #scenario. (CPS-1409)'() {
+ given: 'a data nodes with list-element child with "/" in index value (and grandchild)'
+ def grandChild = new DataNodeBuilder().withXpath(deleteTestGrandChildXPath).build()
+ def child = new DataNodeBuilder().withXpath(deleteTestChildXpath).withChildDataNodes([grandChild]).build()
+ objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME3, deleteTestParentXPath, child)
+ and: 'number of children before delete is stored'
+ def numberOfChildrenBeforeDelete = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_NAME3, pathToParentOfDeletedNode, INCLUDE_ALL_DESCENDANTS).childDataNodes.size()
+ when: 'target node is deleted'
+ objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, deleteTarget)
+ then: 'one child has been deleted'
+ def numberOfChildrenAfterDelete = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_NAME3, pathToParentOfDeletedNode, INCLUDE_ALL_DESCENDANTS).childDataNodes.size()
+ assert numberOfChildrenAfterDelete == numberOfChildrenBeforeDelete - 1
+ where:
+ scenario | deleteTarget | pathToParentOfDeletedNode
+ 'list element with /' | deleteTestChildXpath | deleteTestParentXPath
+ 'child of list element' | deleteTestGrandChildXPath | deleteTestChildXpath
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
def 'Delete list error scenario: #scenario.'() {
when: 'attempting to delete scenario: #scenario.'
objectUnderTest.deleteListDataNode(DATASPACE_NAME, ANCHOR_NAME3, targetXpaths)
@@ -541,7 +565,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Confirm deletion of #scenario.'() {
+ def 'Delete data node by xpath #scenario.'() {
given: 'a valid data node'
def dataNode
and: 'data nodes are deleted'
@@ -566,7 +590,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Delete data node with #scenario.'() {
+ def 'Delete data node error scenario: #scenario.'() {
when: 'data node is deleted'
objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, datanodeXpath)
then: 'a #expectedException is thrown'
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
index e69cbee47..255e8e52f 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
@@ -2,6 +2,7 @@
* ============LICENSE_START=======================================================
* Copyright (c) 2021 Bell Canada.
* Modifications Copyright (C) 2021-2022 Nordix Foundation
+ * Modifications Copyright (C) 2022 TechMahindra Ltd.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -34,6 +35,7 @@ import org.onap.cps.spi.repository.DataspaceRepository
import org.onap.cps.spi.repository.FragmentRepository
import org.onap.cps.spi.utils.SessionManager
import org.onap.cps.utils.JsonObjectMapper
+import org.springframework.dao.DataIntegrityViolationException
import spock.lang.Specification
class CpsDataPersistenceServiceSpec extends Specification {
@@ -44,7 +46,28 @@ class CpsDataPersistenceServiceSpec extends Specification {
def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper())
def mockSessionManager = Mock(SessionManager)
- def objectUnderTest = new CpsDataPersistenceServiceImpl(mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository, jsonObjectMapper, mockSessionManager)
+ def objectUnderTest = Spy(new CpsDataPersistenceServiceImpl(mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository, jsonObjectMapper, mockSessionManager))
+
+ def 'Storing data nodes individually when batch operation fails'(){
+ given: 'two data nodes and supporting repository mock behavior'
+ def dataNode1 = createDataNodeAndMockRepositoryMethodSupportingIt('xpath1','OK')
+ def dataNode2 = createDataNodeAndMockRepositoryMethodSupportingIt('xpath2','OK')
+ and: 'the batch store operation will fail'
+ mockFragmentRepository.saveAll(*_) >> { throw new DataIntegrityViolationException("Exception occurred") }
+ when: 'trying to store data nodes'
+ objectUnderTest.storeDataNodes('dataSpaceName', 'anchorName', [dataNode1, dataNode2])
+ then: 'the two data nodes are saved individually'
+ 2 * mockFragmentRepository.save(_);
+ }
+
+ def 'Store single data node.'() {
+ given: 'a data node'
+ def dataNode = new DataNode()
+ when: 'storing a single data node'
+ objectUnderTest.storeDataNode('dataspace1', 'anchor1', dataNode)
+ then: 'the call is redirected to storing a collection of data nodes with just the given data node'
+ 1 * objectUnderTest.storeDataNodes('dataspace1', 'anchor1', [dataNode])
+ }
def 'Handling of StaleStateException (caused by concurrent updates) during update data node and descendants.'() {
given: 'the fragment repository returns a fragment entity'
@@ -66,10 +89,10 @@ class CpsDataPersistenceServiceSpec extends Specification {
def 'Handling of StaleStateException (caused by concurrent updates) during update data nodes and descendants.'() {
given: 'the system contains and can update one datanode'
- def dataNode1 = mockDataNodeAndFragmentEntity('/node1', 'OK')
+ def dataNode1 = createDataNodeAndMockRepositoryMethodSupportingIt('/node1', 'OK')
and: 'the system contains two more datanodes that throw an exception while updating'
- def dataNode2 = mockDataNodeAndFragmentEntity('/node2', 'EXCEPTION')
- def dataNode3 = mockDataNodeAndFragmentEntity('/node3', 'EXCEPTION')
+ def dataNode2 = createDataNodeAndMockRepositoryMethodSupportingIt('/node2', 'EXCEPTION')
+ def dataNode3 = createDataNodeAndMockRepositoryMethodSupportingIt('/node3', 'EXCEPTION')
and: 'the batch update will therefore also fail'
mockFragmentRepository.saveAll(*_) >> { throw new StaleStateException("concurrent updates") }
when: 'attempt batch update data nodes'
@@ -174,7 +197,7 @@ class CpsDataPersistenceServiceSpec extends Specification {
}})
}
- def mockDataNodeAndFragmentEntity(xpath, scenario) {
+ def createDataNodeAndMockRepositoryMethodSupportingIt(xpath, scenario) {
def dataNode = new DataNodeBuilder().withXpath(xpath).build()
def fragmentEntity = new FragmentEntity(xpath: xpath, childFragments: [])
mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, xpath) >> fragmentEntity
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy
index fb6749c3f..b26cef4de 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy
@@ -27,7 +27,11 @@ import org.onap.cps.spi.model.DataNode
import org.onap.cps.spi.model.DataNodeBuilder
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.test.context.jdbc.Sql
+
+import java.util.concurrent.TimeUnit
+
import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS
+import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS
class CpsToDataNodePerfTest extends CpsPersistenceSpecBase {
@@ -36,66 +40,85 @@ class CpsToDataNodePerfTest extends CpsPersistenceSpecBase {
@Autowired
CpsDataPersistenceService objectUnderTest
- def PERF_TEST_PARENT = '/perf-parent-1'
+ static def PERF_TEST_PARENT = '/perf-parent-1'
+ static def NUMBER_OF_CHILDREN = 200
+ static def NUMBER_OF_GRAND_CHILDREN = 50
+ static def TOTAL_NUMBER_OF_NODES = 1 + NUMBER_OF_CHILDREN + (NUMBER_OF_CHILDREN * NUMBER_OF_GRAND_CHILDREN) // Parent + Children + Grand-children
+ static def ALLOWED_SETUP_TIME_MS = TimeUnit.SECONDS.toMillis(10)
+ static def ALLOWED_READ_TIME_AL_NODES_MS = 500
- def EXPECTED_NUMBER_OF_NODES = 10051 // 1 Parent + 50 Children + 10000 Grand-children
+ def readStopWatch = new StopWatch()
@Sql([CLEAR_DATA, PERF_TEST_DATA])
- def 'Get data node by xpath with all descendants with many children'() {
- given: 'nodes and grandchildren have been persisted'
+ def 'Create a node with many descendants (please note, subsequent tests depend on this running first).'() {
+ given: 'a node with a large number of descendants is created'
def setupStopWatch = new StopWatch()
setupStopWatch.start()
createLineage()
setupStopWatch.stop()
def setupDurationInMillis = setupStopWatch.getTime()
- and: 'setup duration is under 8000 milliseconds'
- assert setupDurationInMillis < 8000
+ and: 'setup duration is under #ALLOWED_SETUP_TIME_MS milliseconds'
+ assert setupDurationInMillis < ALLOWED_SETUP_TIME_MS
+ }
+
+ def 'Get data node with many descendants by xpath #scenario'() {
when: 'get parent is executed with all descendants'
- def readStopWatch = new StopWatch()
readStopWatch.start()
- def result = objectUnderTest.getDataNode('PERF-DATASPACE', 'PERF-ANCHOR', PERF_TEST_PARENT, INCLUDE_ALL_DESCENDANTS)
+ def result = objectUnderTest.getDataNode('PERF-DATASPACE', 'PERF-ANCHOR', xpath, INCLUDE_ALL_DESCENDANTS)
readStopWatch.stop()
def readDurationInMillis = readStopWatch.getTime()
- then: 'read duration is under 450 milliseconds'
- assert readDurationInMillis < 450
+ then: 'read duration is under 500 milliseconds'
+ assert readDurationInMillis < ALLOWED_READ_TIME_AL_NODES_MS
and: 'data node is returned with all the descendants populated'
- assert countDataNodes(result) == EXPECTED_NUMBER_OF_NODES
- when: 'get root is executed with all descendants'
+ assert countDataNodes(result) == TOTAL_NUMBER_OF_NODES
+ where: 'the following xPaths are used'
+ scenario || xpath
+ 'parent' || PERF_TEST_PARENT
+ 'root' || ''
+ }
+
+ def 'Query parent data node with many descendants by cps-path'() {
+ when: 'query is executed with all descendants'
readStopWatch.reset()
readStopWatch.start()
- result = objectUnderTest.getDataNode('PERF-DATASPACE', 'PERF-ANCHOR', '', INCLUDE_ALL_DESCENDANTS)
+ def result = objectUnderTest.queryDataNodes('PERF-DATASPACE', 'PERF-ANCHOR', '//perf-parent-1' , INCLUDE_ALL_DESCENDANTS)
readStopWatch.stop()
- readDurationInMillis = readStopWatch.getTime()
- then: 'read duration is under 450 milliseconds'
- assert readDurationInMillis < 450
+ def readDurationInMillis = readStopWatch.getTime()
+ then: 'read duration is under 500 milliseconds'
+ assert readDurationInMillis < ALLOWED_READ_TIME_AL_NODES_MS
and: 'data node is returned with all the descendants populated'
- assert countDataNodes(result) == EXPECTED_NUMBER_OF_NODES
+ assert countDataNodes(result) == TOTAL_NUMBER_OF_NODES
+ }
+
+ def 'Query many descendants by cps-path with #scenario'() {
when: 'query is executed with all descendants'
readStopWatch.reset()
readStopWatch.start()
- result = objectUnderTest.queryDataNodes('PERF-DATASPACE', 'PERF-ANCHOR', '//perf-parent-1', INCLUDE_ALL_DESCENDANTS)
+ def result = objectUnderTest.queryDataNodes('PERF-DATASPACE', 'PERF-ANCHOR', '//perf-test-grand-child-1', descendantsOption)
readStopWatch.stop()
- readDurationInMillis = readStopWatch.getTime()
- then: 'read duration is under 450 milliseconds'
- assert readDurationInMillis < 450
+ def readDurationInMillis = readStopWatch.getTime()
+ then: 'read duration is under 500 milliseconds'
+ assert readDurationInMillis < alowedDuration
and: 'data node is returned with all the descendants populated'
- assert countDataNodes(result) == EXPECTED_NUMBER_OF_NODES
+ assert result.size() == NUMBER_OF_CHILDREN
+ where: 'the following options are used'
+ scenario | descendantsOption || alowedDuration
+ 'omit descendants ' | OMIT_DESCENDANTS || 150
+ 'include descendants (although there are none)' | INCLUDE_ALL_DESCENDANTS || 1500
}
def createLineage() {
- def numOfChildren = 50
- def numOfGrandChildren = 200
- (1..numOfChildren).each {
+ (1..NUMBER_OF_CHILDREN).each {
def childName = "perf-test-child-${it}".toString()
- def newChild = goForthAndMultiply(PERF_TEST_PARENT, childName, numOfGrandChildren)
+ def newChild = goForthAndMultiply(PERF_TEST_PARENT, childName)
objectUnderTest.addChildDataNode('PERF-DATASPACE', 'PERF-ANCHOR', PERF_TEST_PARENT, newChild)
}
}
- def goForthAndMultiply(parentXpath, childName, numOfGrandChildren) {
+ def goForthAndMultiply(parentXpath, childName) {
def children = []
- (1..numOfGrandChildren).each {
- def child = new DataNodeBuilder().withXpath("${parentXpath}/${childName}/${it}perf-test-grand-child").build()
+ (1..NUMBER_OF_GRAND_CHILDREN).each {
+ def child = new DataNodeBuilder().withXpath("${parentXpath}/${childName}/perf-test-grand-child-${it}").build()
children.add(child)
}
return new DataNodeBuilder().withXpath("${parentXpath}/${childName}").withChildDataNodes(children).build()