diff options
Diffstat (limited to 'cps-ri/src/test')
9 files changed, 662 insertions, 194 deletions
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy index 56e388335e..b6d2c5d65e 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy @@ -26,6 +26,8 @@ import org.onap.cps.spi.exceptions.CpsPathException import org.springframework.beans.factory.annotation.Autowired import org.springframework.test.context.jdbc.Sql +import java.util.stream.Collectors + import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS @@ -147,27 +149,30 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase { def result = objectUnderTest.queryDataNodes(DATASPACE_NAME, ANCHOR_FOR_SHOP_EXAMPLE, cpsPath, INCLUDE_ALL_DESCENDANTS) then: 'the xpaths of the retrieved data nodes are as expected' result.size() == expectedXPaths.size() - for (int i = 0; i < result.size(); i++) { - assert result[i].getXpath() == expectedXPaths[i] - assert result[i].childDataNodes.size() == expectedNumberOfChildren[i] + if (result.size() > 0) { + def resultXpaths = result.stream().map(it -> it.xpath).collect(Collectors.toSet()) + resultXpaths.containsAll(expectedXPaths) + result.each { + assert it.childDataNodes.size() == expectedNumberOfChildren + } } where: 'the following data is used' scenario | cpsPath || expectedXPaths || expectedNumberOfChildren - 'multiple list-ancestors' | '//book/ancestor::categories' || ["/shops/shop[@id='1']/categories[@code='1']", "/shops/shop[@id='1']/categories[@code='2']"] || [1, 1] - 'one ancestor with list value' | '//book/ancestor::categories[@code=1]' || ["/shops/shop[@id='1']/categories[@code='1']"] || [1] - 'top ancestor' | '//shop[@id=1]/ancestor::shops' || ['/shops'] || [5] - 'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ["/shops/shop[@id='1']"] || [3] - 'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ["/shops/shop[@id='1']/categories[@code='2']"] || [1] - 'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ["/shops/shop[@id='3']/info/contact"] || [3] - 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ["/shops/shop[@id='1']"] || [3] - 'ancestor with parent that does not exist' | '//book/ancestor::parentDoesNoExist/categories' || [] || [] - 'ancestor does not exist' | '//book/ancestor::ancestorDoesNotExist' || [] || [] + 'multiple list-ancestors' | '//book/ancestor::categories' || ["/shops/shop[@id='1']/categories[@code='2']", "/shops/shop[@id='1']/categories[@code='1']"] || 1 + 'one ancestor with list value' | '//book/ancestor::categories[@code=1]' || ["/shops/shop[@id='1']/categories[@code='1']"] || 1 + 'top ancestor' | '//shop[@id=1]/ancestor::shops' || ['/shops'] || 5 + 'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ["/shops/shop[@id='1']"] || 3 + 'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ["/shops/shop[@id='1']/categories[@code='2']"] || 1 + 'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ["/shops/shop[@id='3']/info/contact"] || 3 + 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ["/shops/shop[@id='1']"] || 3 + 'ancestor with parent that does not exist' | '//book/ancestor::parentDoesNoExist/categories' || [] || null + 'ancestor does not exist' | '//book/ancestor::ancestorDoesNotExist' || [] || null } def 'Cps Path query with syntax error throws a CPS Path Exception.'() { when: 'trying to execute a query with a syntax (parsing) error' objectUnderTest.queryDataNodes(DATASPACE_NAME, ANCHOR_FOR_SHOP_EXAMPLE, 'cpsPath that cannot be parsed' , OMIT_DESCENDANTS) - then: 'exception is thrown' + then: 'a cps path exception is thrown' thrown(CpsPathException) } diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy index fbf414d2ad..6252fff56c 100755 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy @@ -1,8 +1,9 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2022 Nordix Foundation + * Copyright (C) 2021-2023 Nordix Foundation * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2021-2022 Bell Canada. + * Modifications Copyright (C) 2022 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,7 +27,6 @@ import com.google.common.collect.ImmutableSet import org.onap.cps.cpspath.parser.PathParsingException import org.onap.cps.spi.CpsDataPersistenceService import org.onap.cps.spi.entities.FragmentEntity -import org.onap.cps.spi.exceptions.AlreadyDefinedException import org.onap.cps.spi.exceptions.AlreadyDefinedExceptionBatch import org.onap.cps.spi.exceptions.AnchorNotFoundException import org.onap.cps.spi.exceptions.CpsAdminException @@ -38,6 +38,7 @@ import org.onap.cps.spi.model.DataNodeBuilder import org.onap.cps.utils.JsonObjectMapper import org.springframework.beans.factory.annotation.Autowired import org.springframework.test.context.jdbc.Sql + import javax.validation.ConstraintViolationException import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS @@ -48,25 +49,29 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { @Autowired CpsDataPersistenceService objectUnderTest - static final JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) - static final DataNodeBuilder dataNodeBuilder = new DataNodeBuilder() + static JsonObjectMapper jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) + static DataNodeBuilder dataNodeBuilder = new DataNodeBuilder() static final String SET_DATA = '/data/fragment.sql' - static final int DATASPACE_1001_ID = 1001L - static final int ANCHOR_3003_ID = 3003L - static final long ID_DATA_NODE_WITH_DESCENDANTS = 4001 - static final String XPATH_DATA_NODE_WITH_DESCENDANTS = '/parent-1' - static final String XPATH_DATA_NODE_WITH_LEAVES = '/parent-207' - static final long DATA_NODE_202_FRAGMENT_ID = 4202L - static final long CHILD_OF_DATA_NODE_202_FRAGMENT_ID = 4203L - static final long LIST_DATA_NODE_PARENT201_FRAGMENT_ID = 4206L - static final long LIST_DATA_NODE_PARENT203_FRAGMENT_ID = 4214L - static final long LIST_DATA_NODE_PARENT202_FRAGMENT_ID = 4211L - static final long PARENT_3_FRAGMENT_ID = 4003L - - static final DataNode newDataNode = new DataNodeBuilder().build() - static DataNode existingDataNode - static DataNode existingChildDataNode + static int DATASPACE_1001_ID = 1001L + static int ANCHOR_3003_ID = 3003L + static long ID_DATA_NODE_WITH_DESCENDANTS = 4001 + static String XPATH_DATA_NODE_WITH_DESCENDANTS = '/parent-1' + static String XPATH_DATA_NODE_WITH_LEAVES = '/parent-207' + static long DATA_NODE_202_FRAGMENT_ID = 4202L + static long CHILD_OF_DATA_NODE_202_FRAGMENT_ID = 4203L + static long LIST_DATA_NODE_PARENT201_FRAGMENT_ID = 4206L + static long LIST_DATA_NODE_PARENT203_FRAGMENT_ID = 4214L + static long LIST_DATA_NODE_PARENT202_FRAGMENT_ID = 4211L + static long PARENT_3_FRAGMENT_ID = 4003L + + static Collection<DataNode> newDataNodes = [new DataNodeBuilder().build()] + static Collection<DataNode> existingDataNodes = [createDataNodeTree(XPATH_DATA_NODE_WITH_DESCENDANTS)] + static Collection<DataNode> existingChildDataNodes = [createDataNodeTree('/parent-1/child-1')] + + def static deleteTestParentXPath = '/parent-200' + def static deleteTestChildXpath = "${deleteTestParentXPath}/child-with-slash[@key='a/b']" + def static deleteTestGrandChildXPath = "${deleteTestChildXpath}/grandChild" def expectedLeavesByXpathMap = [ '/parent-207' : ['parent-leaf': 'parent-leaf value'], @@ -75,11 +80,6 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { '/parent-207/child-002/grand-child': ['grand-child-leaf': 'grand-child-leaf value'] ] - static { - existingDataNode = createDataNodeTree(XPATH_DATA_NODE_WITH_DESCENDANTS) - existingChildDataNode = createDataNodeTree('/parent-1/child-1') - } - @Sql([CLEAR_DATA, SET_DATA]) def 'Get existing datanode with descendants.'() { when: 'the node is retrieved by its xpath' @@ -93,13 +93,13 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { } @Sql([CLEAR_DATA, SET_DATA]) - def 'Storing and Retrieving a new DataNode with descendants.'() { + def 'Storing and Retrieving a new DataNodes with descendants.'() { when: 'a fragment with descendants is stored' def parentXpath = '/parent-new' def childXpath = '/parent-new/child-new' def grandChildXpath = '/parent-new/child-new/grandchild-new' - objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME1, - createDataNodeTree(parentXpath, childXpath, grandChildXpath)) + def dataNodes = [createDataNodeTree(parentXpath, childXpath, grandChildXpath)] + objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME1, dataNodes) then: 'it can be retrieved by its xpath' def dataNode = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, INCLUDE_ALL_DESCENDANTS) assert dataNode.xpath == parentXpath @@ -117,9 +117,9 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { def 'Store data node for multiple anchors using the same schema.'() { def xpath = '/parent-new' given: 'a fragment is stored for an anchor' - objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME1, createDataNodeTree(xpath)) + objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME1, [createDataNodeTree(xpath)]) when: 'another fragment is stored for an other anchor, using the same schema set' - objectUnderTest.storeDataNode(DATASPACE_NAME, ANCHOR_NAME3, createDataNodeTree(xpath)) + objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME3, [createDataNodeTree(xpath)]) then: 'both fragments can be retrieved by their xpath' def fragment1 = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME1, xpath) fragment1.anchor.name == ANCHOR_NAME1 @@ -130,45 +130,48 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { } @Sql([CLEAR_DATA, SET_DATA]) - def 'Store datanode error scenario: #scenario.'() { + def 'Store datanodes error scenario: #scenario.'() { when: 'attempt to store a data node with #scenario' - objectUnderTest.storeDataNode(dataspaceName, anchorName, dataNode) + objectUnderTest.storeDataNodes(dataspaceName, anchorName, dataNodes) then: 'a #expectedException is thrown' thrown(expectedException) where: 'the following data is used' - scenario | dataspaceName | anchorName | dataNode || expectedException - 'dataspace does not exist' | 'unknown' | 'not-relevant' | newDataNode || DataspaceNotFoundException - 'schema set does not exist' | DATASPACE_NAME | 'unknown' | newDataNode || AnchorNotFoundException - 'anchor already exists' | DATASPACE_NAME | ANCHOR_NAME1 | newDataNode || ConstraintViolationException - 'datanode already exists' | DATASPACE_NAME | ANCHOR_NAME1 | existingDataNode || AlreadyDefinedException + scenario | dataspaceName | anchorName | dataNodes || expectedException + 'dataspace does not exist' | 'unknown' | 'not-relevant' | newDataNodes || DataspaceNotFoundException + 'schema set does not exist' | DATASPACE_NAME | 'unknown' | newDataNodes || AnchorNotFoundException + 'anchor already exists' | DATASPACE_NAME | ANCHOR_NAME1 | newDataNodes || ConstraintViolationException + 'datanode already exists' | DATASPACE_NAME | ANCHOR_NAME1 | existingDataNodes || AlreadyDefinedExceptionBatch } @Sql([CLEAR_DATA, SET_DATA]) - def 'Add a child to a Fragment that already has a child.'() { - given: ' a new child node' - def newChild = createDataNodeTree('xpath for new child') + def 'Add children to a Fragment that already has a child.'() { + given: 'collection of new child data nodes' + def newChild1 = createDataNodeTree('/parent-1/child-2') + def newChild2 = createDataNodeTree('/parent-1/child-3') + def newChildrenCollection = [newChild1, newChild2] when: 'the child is added to an existing parent with 1 child' - objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME1, XPATH_DATA_NODE_WITH_DESCENDANTS, newChild) - then: 'the parent is now has to 2 children' + objectUnderTest.addChildDataNodes(DATASPACE_NAME, ANCHOR_NAME1, XPATH_DATA_NODE_WITH_DESCENDANTS, newChildrenCollection) + then: 'the parent is now has to 3 children' def expectedExistingChildPath = '/parent-1/child-1' def parentFragment = fragmentRepository.findById(ID_DATA_NODE_WITH_DESCENDANTS).orElseThrow() - parentFragment.childFragments.size() == 2 + parentFragment.childFragments.size() == 3 and: 'it still has the old child' parentFragment.childFragments.find({ it.xpath == expectedExistingChildPath }) - and: 'it has the new child' - parentFragment.childFragments.find({ it.xpath == newChild.xpath }) + and: 'it has the new children' + parentFragment.childFragments.find({ it.xpath == newChildrenCollection[0].xpath }) + parentFragment.childFragments.find({ it.xpath == newChildrenCollection[1].xpath }) } @Sql([CLEAR_DATA, SET_DATA]) def 'Add child error scenario: #scenario.'() { when: 'attempt to add a child data node with #scenario' - objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, dataNode) + objectUnderTest.addChildDataNodes(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, dataNodes) then: 'a #expectedException is thrown' thrown(expectedException) where: 'the following data is used' - scenario | parentXpath | dataNode || expectedException - 'parent does not exist' | '/unknown' | newDataNode || DataNodeNotFoundException - 'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNode || AlreadyDefinedException + scenario | parentXpath | dataNodes || expectedException + 'parent does not exist' | '/unknown' | newDataNodes || DataNodeNotFoundException + 'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNodes || AlreadyDefinedExceptionBatch } @Sql([CLEAR_DATA, SET_DATA]) @@ -288,7 +291,41 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { scenario | dataspaceName | anchorName | xpath || expectedException 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException - 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NO XPATH' || DataNodeNotFoundException + 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NO-XPATH' || DataNodeNotFoundException + 'invalid xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'INVALID XPATH' || CpsPathException + } + + @Sql([CLEAR_DATA, SET_DATA]) + def 'Get multiple data nodes by xpath.'() { + when: 'fetch #scenario.' + def results = objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_NAME3, inputXpaths, OMIT_DESCENDANTS) + then: 'the expected number of data nodes are returned' + assert results.size() == expectedResultSize + where: 'following parameters were used' + scenario | inputXpaths || expectedResultSize + '1 node' | ["/parent-200"] || 1 + '2 unique nodes' | ["/parent-200", "/parent-201"] || 2 + '3 unique nodes' | ["/parent-200", "/parent-201", "/parent-202"] || 3 + '1 unique node with duplicate xpath' | ["/parent-200", "/parent-200"] || 1 + '2 unique nodes with duplicate xpath' | ["/parent-200", "/parent-202", "/parent-200"] || 2 + 'list element with key (single quote)' | ["/parent-201/child-204[@key='A']"] || 1 + 'list element with key (double quote)' | ['/parent-201/child-204[@key="A"]'] || 1 + 'non-existing xpath' | ["/NO-XPATH"] || 0 + 'existing and non-existing xpaths' | ["/parent-200", "/NO-XPATH", "/parent-201"] || 2 + 'invalid xpath' | ["INVALID XPATH"] || 0 + 'valid and invalid xpaths' | ["/parent-200", "INVALID XPATH", "/parent-201"] || 2 + } + + @Sql([CLEAR_DATA, SET_DATA]) + def 'Get multiple data nodes error scenario: #scenario.'() { + when: 'attempt to get data nodes with #scenario' + objectUnderTest.getDataNodes(dataspaceName, anchorName, ['/not-relevant'], OMIT_DESCENDANTS) + then: 'a #expectedException is thrown' + thrown(expectedException) + where: 'the following data is used' + scenario | dataspaceName | anchorName || expectedException + 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' || DataspaceNotFoundException + 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' || AnchorNotFoundException } @Sql([CLEAR_DATA, SET_DATA]) @@ -318,7 +355,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { scenario | dataspaceName | anchorName | xpath || expectedException 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException - 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING XPATH' || DataNodeNotFoundException + 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING-XPATH' || DataNodeNotFoundException } @Sql([CLEAR_DATA, SET_DATA]) @@ -412,7 +449,8 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { scenario | dataspaceName | anchorName | xpath || expectedException 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException - 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING XPATH' || DataNodeNotFoundException + 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING-XPATH' || DataNodeNotFoundException + 'invalid xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'INVALID XPATH' || CpsPathException } @Sql([CLEAR_DATA, SET_DATA]) @@ -525,6 +563,25 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { } @Sql([CLEAR_DATA, SET_DATA]) + def 'Delete data nodes with "/"-token in list key value: #scenario. (CPS-1409)'() { + given: 'a data nodes with list-element child with "/" in index value (and grandchild)' + def grandChild = new DataNodeBuilder().withXpath(deleteTestGrandChildXPath).build() + def child = new DataNodeBuilder().withXpath(deleteTestChildXpath).withChildDataNodes([grandChild]).build() + objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME3, deleteTestParentXPath, child) + and: 'number of children before delete is stored' + def numberOfChildrenBeforeDelete = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_NAME3, pathToParentOfDeletedNode, INCLUDE_ALL_DESCENDANTS).childDataNodes.size() + when: 'target node is deleted' + objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, deleteTarget) + then: 'one child has been deleted' + def numberOfChildrenAfterDelete = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_NAME3, pathToParentOfDeletedNode, INCLUDE_ALL_DESCENDANTS).childDataNodes.size() + assert numberOfChildrenAfterDelete == numberOfChildrenBeforeDelete - 1 + where: + scenario | deleteTarget | pathToParentOfDeletedNode + 'list element with /' | deleteTestChildXpath | deleteTestParentXPath + 'child of list element' | deleteTestGrandChildXPath | deleteTestChildXpath + } + + @Sql([CLEAR_DATA, SET_DATA]) def 'Delete list error scenario: #scenario.'() { when: 'attempting to delete scenario: #scenario.' objectUnderTest.deleteListDataNode(DATASPACE_NAME, ANCHOR_NAME3, targetXpaths) @@ -541,7 +598,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { } @Sql([CLEAR_DATA, SET_DATA]) - def 'Confirm deletion of #scenario.'() { + def 'Delete data node by xpath #scenario.'() { given: 'a valid data node' def dataNode and: 'data nodes are deleted' @@ -566,7 +623,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { } @Sql([CLEAR_DATA, SET_DATA]) - def 'Delete data node with #scenario.'() { + def 'Delete data node error scenario: #scenario.'() { when: 'data node is deleted' objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, datanodeXpath) then: 'a #expectedException is thrown' diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy index e69cbee471..87e59c60dc 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy @@ -1,7 +1,8 @@ /* * ============LICENSE_START======================================================= * Copyright (c) 2021 Bell Canada. - * Modifications Copyright (C) 2021-2022 Nordix Foundation + * Modifications Copyright (C) 2021-2023 Nordix Foundation + * Modifications Copyright (C) 2022 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +35,7 @@ import org.onap.cps.spi.repository.DataspaceRepository import org.onap.cps.spi.repository.FragmentRepository import org.onap.cps.spi.utils.SessionManager import org.onap.cps.utils.JsonObjectMapper +import org.springframework.dao.DataIntegrityViolationException import spock.lang.Specification class CpsDataPersistenceServiceSpec extends Specification { @@ -44,7 +46,28 @@ class CpsDataPersistenceServiceSpec extends Specification { def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper()) def mockSessionManager = Mock(SessionManager) - def objectUnderTest = new CpsDataPersistenceServiceImpl(mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository, jsonObjectMapper, mockSessionManager) + def objectUnderTest = Spy(new CpsDataPersistenceServiceImpl(mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository, jsonObjectMapper, mockSessionManager)) + + def 'Storing data nodes individually when batch operation fails'(){ + given: 'two data nodes and supporting repository mock behavior' + def dataNode1 = createDataNodeAndMockRepositoryMethodSupportingIt('xpath1','OK') + def dataNode2 = createDataNodeAndMockRepositoryMethodSupportingIt('xpath2','OK') + and: 'the batch store operation will fail' + mockFragmentRepository.saveAll(*_) >> { throw new DataIntegrityViolationException("Exception occurred") } + when: 'trying to store data nodes' + objectUnderTest.storeDataNodes('dataSpaceName', 'anchorName', [dataNode1, dataNode2]) + then: 'the two data nodes are saved individually' + 2 * mockFragmentRepository.save(_); + } + + def 'Store single data node.'() { + given: 'a data node' + def dataNode = new DataNode() + when: 'storing a single data node' + objectUnderTest.storeDataNode('dataspace1', 'anchor1', dataNode) + then: 'the call is redirected to storing a collection of data nodes with just the given data node' + 1 * objectUnderTest.storeDataNodes('dataspace1', 'anchor1', [dataNode]) + } def 'Handling of StaleStateException (caused by concurrent updates) during update data node and descendants.'() { given: 'the fragment repository returns a fragment entity' @@ -66,10 +89,10 @@ class CpsDataPersistenceServiceSpec extends Specification { def 'Handling of StaleStateException (caused by concurrent updates) during update data nodes and descendants.'() { given: 'the system contains and can update one datanode' - def dataNode1 = mockDataNodeAndFragmentEntity('/node1', 'OK') + def dataNode1 = createDataNodeAndMockRepositoryMethodSupportingIt('/node1', 'OK') and: 'the system contains two more datanodes that throw an exception while updating' - def dataNode2 = mockDataNodeAndFragmentEntity('/node2', 'EXCEPTION') - def dataNode3 = mockDataNodeAndFragmentEntity('/node3', 'EXCEPTION') + def dataNode2 = createDataNodeAndMockRepositoryMethodSupportingIt('/node2', 'EXCEPTION') + def dataNode3 = createDataNodeAndMockRepositoryMethodSupportingIt('/node3', 'EXCEPTION') and: 'the batch update will therefore also fail' mockFragmentRepository.saveAll(*_) >> { throw new StaleStateException("concurrent updates") } when: 'attempt batch update data nodes' @@ -84,7 +107,6 @@ class CpsDataPersistenceServiceSpec extends Specification { assert thrown.details.contains('/node3') } - def 'Retrieving a data node with a property JSON value of #scenario'() { given: 'the db has a fragment with an attribute property JSON value of #scenario' mockFragmentWithJson("{\"some attribute\": ${dataString}}") @@ -119,6 +141,20 @@ class CpsDataPersistenceServiceSpec extends Specification { thrown(DataValidationException) } + def 'Retrieving multiple data nodes.'() { + given: 'db contains an anchor' + def anchorEntity = new AnchorEntity(id:123) + mockAnchorRepository.getByDataspaceAndName(*_) >> anchorEntity + and: 'fragment repository returns a collection of fragments' + def fragmentEntity1 = new FragmentEntity(xpath: '/xpath1', childFragments: []) + def fragmentEntity2 = new FragmentEntity(xpath: '/xpath2', childFragments: []) + mockFragmentRepository.findByAnchorAndMultipleCpsPaths(123, ['/xpath1', '/xpath2'] as Set<String>) >> [fragmentEntity1, fragmentEntity2] + when: 'getting data nodes for 2 xpaths' + def result = objectUnderTest.getDataNodes('some-dataspace', 'some-anchor', ['/xpath1', '/xpath2'], FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + then: '2 data nodes are returned' + assert result.size() == 2 + } + def 'start session'() { when: 'start session' objectUnderTest.startSession() @@ -142,6 +178,25 @@ class CpsDataPersistenceServiceSpec extends Specification { 1 * mockSessionManager.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L) } + def 'update data node leaves: #scenario'(){ + given: 'A node exists for the given xpath' + mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, '/some/xpath') >> new FragmentEntity(xpath: '/some/xpath', attributes: existingAttributes) + when: 'the node leaves are updated' + objectUnderTest.updateDataLeaves('some-dataspace', 'some-anchor', '/some/xpath', newAttributes as Map<String, Serializable>) + then: 'the fragment entity saved has the original and new attributes' + 1 * mockFragmentRepository.save({fragmentEntity -> { + assert fragmentEntity.getXpath() == '/some/xpath' + assert fragmentEntity.getAttributes() == mergedAttributes + }}) + where: 'the following attributes combinations are used' + scenario | existingAttributes | newAttributes | mergedAttributes + 'add new leaf' | '{"existing":"value"}' | ["new":"value"] | '{"existing":"value","new":"value"}' + 'update existing leaf' | '{"existing":"value"}' | ["existing":"value2"] | '{"existing":"value2"}' + 'update nothing with nothing' | '' | [] | '' + 'update with nothing' | '{"existing":"value"}' | [] | '{"existing":"value"}' + 'update with same value' | '{"existing":"value"}' | ["existing":"value"] | '{"existing":"value"}' + } + def 'update data node and descendants: #scenario'(){ given: 'mocked responses' mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, '/test/xpath') >> new FragmentEntity(xpath: '/test/xpath', childFragments: []) @@ -174,7 +229,7 @@ class CpsDataPersistenceServiceSpec extends Specification { }}) } - def mockDataNodeAndFragmentEntity(xpath, scenario) { + def createDataNodeAndMockRepositoryMethodSupportingIt(xpath, scenario) { def dataNode = new DataNodeBuilder().withXpath(xpath).build() def fragmentEntity = new FragmentEntity(xpath: xpath, childFragments: []) mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, xpath) >> fragmentEntity @@ -185,11 +240,8 @@ class CpsDataPersistenceServiceSpec extends Specification { } def mockFragmentWithJson(json) { - def anchorName = 'some anchor' - def mockAnchor = Mock(AnchorEntity) - mockAnchor.getId() >> 123 - mockAnchor.getName() >> anchorName - mockAnchorRepository.getByDataspaceAndName(*_) >> mockAnchor + def anchorEntity = new AnchorEntity(id:123) + mockAnchorRepository.getByDataspaceAndName(*_) >> anchorEntity def mockFragmentExtract = Mock(FragmentExtract) mockFragmentExtract.getId() >> 456 mockFragmentExtract.getAttributes() >> json diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy index f9ebc52f18..4c67f7e972 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceIntegrationSpec.groovy @@ -2,6 +2,7 @@ * ============LICENSE_START======================================================= * Copyright (C) 2021-2022 Nordix Foundation * Modifications Copyright (C) 2021-2022 Bell Canada. + * Modifications Copyright (C) 2022 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. @@ -28,6 +29,7 @@ import org.onap.cps.spi.exceptions.DataspaceNotFoundException import org.onap.cps.spi.exceptions.SchemaSetNotFoundException import org.onap.cps.spi.model.ModuleDefinition import org.onap.cps.spi.model.ModuleReference +import org.onap.cps.spi.model.SchemaSet import org.onap.cps.spi.repository.AnchorRepository import org.onap.cps.spi.repository.SchemaSetRepository import org.onap.cps.spi.repository.SchemaSetYangResourceRepositoryImpl @@ -209,6 +211,14 @@ class CpsModulePersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase } @Sql([CLEAR_DATA, SET_DATA]) + def 'Retrieve schema sets for a given dataspace name'() { + when: 'the schema set resources for a given dataspace name is retrieved' + def result = objectUnderTest.getSchemaSetsByDataspaceName(DATASPACE_NAME) + then: 'the correct resources are returned' + result.contains(new SchemaSet(name: 'SCHEMA-SET-001', dataspaceName: 'DATASPACE-001')) + } + + @Sql([CLEAR_DATA, SET_DATA]) def 'Delete schema set'() { when: 'a schema set is deleted with cascade-prohibited option' objectUnderTest.deleteSchemaSet(DATASPACE_NAME, SCHEMA_SET_NAME_NO_ANCHORS) @@ -220,8 +230,9 @@ class CpsModulePersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase def 'Identifying new module references where #scenario'() { when: 'identifyNewModuleReferences is called' def result = objectUnderTest.identifyNewModuleReferences(moduleReferences) - then: 'the correct module reference collection is returned' - assert result == expectedResult + then: 'the correct module references are returned' + assert result.size() == expectedResult.size() + assert result.containsAll(expectedResult) where: 'the following data is used' scenario | moduleReferences || expectedResult 'new module references exist' | toModuleReference([['some module 1' : 'some revision 1'], ['some module 2' : 'some revision 2']]) || toModuleReference([['some module 1' : 'some revision 1'], ['some module 2' : 'some revision 2']]) @@ -294,7 +305,7 @@ class CpsModulePersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase def moduleReferences = [].withDefault { [:] } moduleReferenceAsMap.forEach(property -> property.forEach((moduleName, revision) -> { - moduleReferences.add(new ModuleReference('moduleName' : moduleName, 'revision' : revision)) + moduleReferences.add(new ModuleReference(moduleName, revision)) })) return moduleReferences } diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy new file mode 100644 index 0000000000..3bbae2d08c --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy @@ -0,0 +1,74 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.impl + +import org.onap.cps.spi.model.DataNode +import org.onap.cps.spi.model.DataNodeBuilder + +class CpsPersistencePerfSpecBase extends CpsPersistenceSpecBase { + + static final String PERF_TEST_DATA = '/data/perf-test.sql' + static final String PERF_DATASPACE = 'PERF-DATASPACE' + static final String PERF_ANCHOR = 'PERF-ANCHOR' + static final String PERF_TEST_PARENT = '/perf-parent-1' + + static def xpathsToAllGrandChildren = [] + + def createLineage(cpsDataPersistenceService, numberOfChildren, numberOfGrandChildren, createLists) { + xpathsToAllGrandChildren = [] + (1..numberOfChildren).each { + if (createLists) { + def xpathFormat = "${PERF_TEST_PARENT}/perf-test-list-${it}[@key='%d']" + def listElements = goForthAndMultiply(xpathFormat, numberOfGrandChildren) + cpsDataPersistenceService.addListElements(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT, listElements) + } else { + def xpathFormat = "${PERF_TEST_PARENT}/perf-test-child-${it}/perf-test-grand-child-%d" + def grandChildren = goForthAndMultiply(xpathFormat, numberOfGrandChildren) + def child = new DataNodeBuilder() + .withXpath("${PERF_TEST_PARENT}/perf-test-child-${it}") + .withChildDataNodes(grandChildren) + .build() + cpsDataPersistenceService.addChildDataNode(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT, child) + } + } + } + + def goForthAndMultiply(xpathFormat, numberOfGrandChildren) { + def grandChildren = [] + (1..numberOfGrandChildren).each { + def xpath = String.format(xpathFormat as String, it) + def grandChild = new DataNodeBuilder().withXpath(xpath).build() + xpathsToAllGrandChildren.add(grandChild.xpath) + grandChildren.add(grandChild) + } + return grandChildren + } + + def countDataNodes(dataNodes) { + int nodeCount = 1 + for (DataNode parent : dataNodes) { + for (DataNode child : parent.childDataNodes) { + nodeCount = nodeCount + (countDataNodes(child)) + } + } + return nodeCount + } +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy new file mode 100644 index 0000000000..5aae285d7b --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy @@ -0,0 +1,154 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.performance + +import org.onap.cps.spi.CpsDataPersistenceService +import org.onap.cps.spi.impl.CpsPersistencePerfSpecBase +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.test.context.jdbc.Sql +import org.springframework.util.StopWatch + +import java.util.concurrent.TimeUnit + +class CpsDataPersistenceServiceDeletePerfTest extends CpsPersistencePerfSpecBase { + + @Autowired + CpsDataPersistenceService objectUnderTest + + static def NUMBER_OF_CHILDREN = 100 + static def NUMBER_OF_GRAND_CHILDREN = 50 + static def NUMBER_OF_LISTS = 100 + static def NUMBER_OF_LIST_ELEMENTS = 50 + static def ALLOWED_SETUP_TIME_MS = TimeUnit.SECONDS.toMillis(10) + + def stopWatch = new StopWatch() + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Create a node with many descendants (please note, subsequent tests depend on this running first).'() { + given: 'a node with a large number of descendants is created' + stopWatch.start() + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + stopWatch.stop() + def setupDurationInMillis = stopWatch.getTotalTimeMillis() + and: 'setup duration is under #ALLOWED_SETUP_TIME_MS milliseconds' + assert setupDurationInMillis < ALLOWED_SETUP_TIME_MS + } + + def 'Delete 5 children with grandchildren'() { + when: 'child nodes are deleted' + stopWatch.start() + (1..5).each { + def childPath = "${PERF_TEST_PARENT}/perf-test-child-${it}".toString(); + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, childPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 6000 milliseconds' + assert deleteDurationInMillis < 6000 + } + + def 'Delete 50 grandchildren (that have no descendants)'() { + when: 'target nodes are deleted' + stopWatch.start() + (1..50).each { + def grandchildPath = "${PERF_TEST_PARENT}/perf-test-child-6/perf-test-grand-child-${it}".toString(); + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, grandchildPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 500 milliseconds' + assert deleteDurationInMillis < 500 + } + + def 'Delete 1 large data node with many descendants'() { + when: 'parent node is deleted' + stopWatch.start() + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT) + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 2500 milliseconds' + assert deleteDurationInMillis < 2500 + } + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Create a node with many list elements (please note, subsequent tests depend on this running first).'() { + given: 'a node with a large number of descendants is created' + stopWatch.start() + createLineage(objectUnderTest, NUMBER_OF_LISTS, NUMBER_OF_LIST_ELEMENTS, true) + stopWatch.stop() + def setupDurationInMillis = stopWatch.getTotalTimeMillis() + and: 'setup duration is under #ALLOWED_SETUP_TIME_MS milliseconds' + assert setupDurationInMillis < ALLOWED_SETUP_TIME_MS + } + + def 'Delete 5 whole lists with many elements'() { + when: 'list nodes are deleted' + stopWatch.start() + (1..5).each { + def childPath = "${PERF_TEST_PARENT}/perf-test-list-${it}".toString(); + objectUnderTest.deleteListDataNode(PERF_DATASPACE, PERF_ANCHOR, childPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 4000 milliseconds' + assert deleteDurationInMillis < 4000 + } + + def 'Delete 10 list elements with keys'() { + when: 'list elements are deleted' + stopWatch.start() + (1..10).each { + def key = it.toString() + def grandchildPath = "${PERF_TEST_PARENT}/perf-test-list-6[@key='${key}']" + objectUnderTest.deleteListDataNode(PERF_DATASPACE, PERF_ANCHOR, grandchildPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 6000 milliseconds' + assert deleteDurationInMillis < 6000 + } + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Delete root node with many descendants'() { + given: 'a node with a large number of descendants is created' + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + when: 'root node is deleted' + stopWatch.start() + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, '/') + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 250 milliseconds' + assert deleteDurationInMillis < 250 + } + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Delete data nodes for an anchor'() { + given: 'a node with a large number of descendants is created' + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + when: 'data nodes are deleted' + stopWatch.start() + objectUnderTest.deleteDataNodes(PERF_DATASPACE, PERF_ANCHOR) + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 250 milliseconds' + assert deleteDurationInMillis < 250 + } +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy new file mode 100644 index 0000000000..2346239dff --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy @@ -0,0 +1,125 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.performance + +import org.onap.cps.spi.impl.CpsPersistencePerfSpecBase +import org.springframework.util.StopWatch +import org.onap.cps.spi.CpsDataPersistenceService +import org.onap.cps.spi.repository.AnchorRepository +import org.onap.cps.spi.repository.DataspaceRepository +import org.onap.cps.spi.repository.FragmentRepository +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.test.context.jdbc.Sql + +import java.util.concurrent.TimeUnit + +import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS +import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS + +class CpsDataPersistenceServicePerfTest extends CpsPersistencePerfSpecBase { + + @Autowired + CpsDataPersistenceService objectUnderTest + + @Autowired + DataspaceRepository dataspaceRepository + + @Autowired + AnchorRepository anchorRepository + + @Autowired + FragmentRepository fragmentRepository + + static def NUMBER_OF_CHILDREN = 200 + static def NUMBER_OF_GRAND_CHILDREN = 50 + static def TOTAL_NUMBER_OF_NODES = 1 + NUMBER_OF_CHILDREN + (NUMBER_OF_CHILDREN * NUMBER_OF_GRAND_CHILDREN) // Parent + Children + Grand-children + + def stopWatch = new StopWatch() + def readStopWatch = new StopWatch() + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Create a node with many descendants (please note, subsequent tests depend on this running first).'() { + given: 'a node with a large number of descendants is created' + stopWatch.start() + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + stopWatch.stop() + def setupDurationInMillis = stopWatch.getTotalTimeMillis() + and: 'setup duration is under 10 seconds' + assert setupDurationInMillis < 10000 + } + + def 'Get data node with many descendants by xpath #scenario'() { + when: 'get parent is executed with all descendants' + stopWatch.start() + def result = objectUnderTest.getDataNode(PERF_DATASPACE, PERF_ANCHOR, xpath, INCLUDE_ALL_DESCENDANTS) + stopWatch.stop() + def readDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'read duration is under 500 milliseconds' + assert readDurationInMillis < 500 + and: 'data node is returned with all the descendants populated' + assert countDataNodes(result) == TOTAL_NUMBER_OF_NODES + where: 'the following xPaths are used' + scenario || xpath + 'parent' || PERF_TEST_PARENT + 'root' || '' + } + + def 'Query parent data node with many descendants by cps-path'() { + when: 'query is executed with all descendants' + stopWatch.start() + def result = objectUnderTest.queryDataNodes(PERF_DATASPACE, PERF_ANCHOR, '//perf-parent-1' , INCLUDE_ALL_DESCENDANTS) + stopWatch.stop() + def readDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'read duration is under 500 milliseconds' + assert readDurationInMillis < 500 + and: 'data node is returned with all the descendants populated' + assert countDataNodes(result) == TOTAL_NUMBER_OF_NODES + } + + def 'Performance of finding multiple xpaths'() { + when: 'we query for all grandchildren (except 1 for fun) with the new native method' + xpathsToAllGrandChildren.remove(0) + readStopWatch.start() + def result = objectUnderTest.getDataNodes(PERF_DATASPACE, PERF_ANCHOR, xpathsToAllGrandChildren, INCLUDE_ALL_DESCENDANTS) + readStopWatch.stop() + def readDurationInMillis = readStopWatch.getTotalTimeMillis() + then: 'the returned number of entities equal to the number of children * number of grandchildren' + assert result.size() == xpathsToAllGrandChildren.size() + and: 'it took less then 4000ms' + assert readDurationInMillis < 4000 + } + + def 'Query many descendants by cps-path with #scenario'() { + when: 'query is executed with all descendants' + stopWatch.start() + def result = objectUnderTest.queryDataNodes(PERF_DATASPACE, PERF_ANCHOR, '//perf-test-grand-child-1', descendantsOption) + stopWatch.stop() + def readDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'read duration is under #allowedDuration milliseconds' + assert readDurationInMillis < allowedDuration + and: 'data node is returned with all the descendants populated' + assert result.size() == NUMBER_OF_CHILDREN + where: 'the following options are used' + scenario | descendantsOption || allowedDuration + 'omit descendants ' | OMIT_DESCENDANTS || 150 + 'include descendants (although there are none)' | INCLUDE_ALL_DESCENDANTS || 150 + } +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy new file mode 100644 index 0000000000..9b722cddae --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy @@ -0,0 +1,103 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.performance + +import org.onap.cps.spi.CpsModulePersistenceService +import org.onap.cps.spi.entities.SchemaSetEntity +import org.onap.cps.spi.impl.CpsPersistenceSpecBase +import org.onap.cps.spi.model.ModuleReference +import org.onap.cps.spi.repository.DataspaceRepository +import org.onap.cps.spi.repository.ModuleReferenceRepository +import org.onap.cps.spi.repository.SchemaSetRepository +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.test.context.jdbc.Sql +import org.springframework.util.StopWatch + +import java.util.concurrent.ThreadLocalRandom + +class CpsModuleReferenceRepositoryPerfTest extends CpsPersistenceSpecBase { + + static final String PERF_TEST_DATA = '/data/perf-test.sql' + + def NEW_RESOURCE_CONTENT = 'module stores {\n' + + ' yang-version 1.1;\n' + + ' namespace "org:onap:ccsdk:sample";\n' + + '\n' + + ' prefix book-store;\n' + + '\n' + + ' revision "2020-09-15" {\n' + + ' description\n' + + ' "Sample Model";\n' + + ' }' + + '}' + + @Autowired + CpsModulePersistenceService objectUnderTest + + @Autowired + DataspaceRepository dataspaceRepository + + @Autowired + SchemaSetRepository schemaSetRepository + + @Autowired + ModuleReferenceRepository moduleReferenceRepository + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Store new schema set with many modules'() { + when: 'a new schema set with 200 modules is stored' + def newYangResourcesNameToContentMap = [:] + (1..200).each { + def year = 2000 + it + def resourceName = "module${it}".toString() + def moduleName = "stores${it}" + def content = NEW_RESOURCE_CONTENT.replace('2020',String.valueOf(year)).replace('stores',moduleName) + newYangResourcesNameToContentMap.put(resourceName, content) + } + objectUnderTest.storeSchemaSet('PERF-DATASPACE', 'perfSchemaSet', newYangResourcesNameToContentMap) + then: 'the schema set is persisted correctly' + def dataspaceEntity = dataspaceRepository.getByName('PERF-DATASPACE') + SchemaSetEntity result = schemaSetRepository.getByDataspaceAndName(dataspaceEntity, 'perfSchemaSet') + result.yangResources.size() == 200 + and: 'identification of new module resources is fast enough (1,000 executions less then 6,000 milliseconds)' + def stopWatch = new StopWatch() + 1000.times() { + def moduleReferencesToCheck = createModuleReferencesWithRandomMatchingExistingModuleReferences() + stopWatch.start() + def newModuleReferences = moduleReferenceRepository.identifyNewModuleReferences(moduleReferencesToCheck) + stopWatch.stop() + assert newModuleReferences.size() > 0 && newModuleReferences.size() < 300 + } + assert stopWatch.getTotalTimeMillis() < 6000 + } + + def createModuleReferencesWithRandomMatchingExistingModuleReferences() { + def moduleReferences = [] + (1..250).each { + def randomNumber = ThreadLocalRandom.current().nextInt(1, 300) + def year = 2000 + randomNumber + def moduleName = "stores${randomNumber}" + moduleReferences.add(new ModuleReference(moduleName, "${year}-09-15")) + } + return moduleReferences + } + +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy deleted file mode 100644 index fb6749c3fe..0000000000 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.spi.performance - -import org.apache.commons.lang3.time.StopWatch -import org.onap.cps.spi.CpsDataPersistenceService -import org.onap.cps.spi.impl.CpsPersistenceSpecBase -import org.onap.cps.spi.model.DataNode -import org.onap.cps.spi.model.DataNodeBuilder -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.test.context.jdbc.Sql -import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS - -class CpsToDataNodePerfTest extends CpsPersistenceSpecBase { - - static final String PERF_TEST_DATA = '/data/perf-test.sql' - - @Autowired - CpsDataPersistenceService objectUnderTest - - def PERF_TEST_PARENT = '/perf-parent-1' - - def EXPECTED_NUMBER_OF_NODES = 10051 // 1 Parent + 50 Children + 10000 Grand-children - - @Sql([CLEAR_DATA, PERF_TEST_DATA]) - def 'Get data node by xpath with all descendants with many children'() { - given: 'nodes and grandchildren have been persisted' - def setupStopWatch = new StopWatch() - setupStopWatch.start() - createLineage() - setupStopWatch.stop() - def setupDurationInMillis = setupStopWatch.getTime() - and: 'setup duration is under 8000 milliseconds' - assert setupDurationInMillis < 8000 - when: 'get parent is executed with all descendants' - def readStopWatch = new StopWatch() - readStopWatch.start() - def result = objectUnderTest.getDataNode('PERF-DATASPACE', 'PERF-ANCHOR', PERF_TEST_PARENT, INCLUDE_ALL_DESCENDANTS) - readStopWatch.stop() - def readDurationInMillis = readStopWatch.getTime() - then: 'read duration is under 450 milliseconds' - assert readDurationInMillis < 450 - and: 'data node is returned with all the descendants populated' - assert countDataNodes(result) == EXPECTED_NUMBER_OF_NODES - when: 'get root is executed with all descendants' - readStopWatch.reset() - readStopWatch.start() - result = objectUnderTest.getDataNode('PERF-DATASPACE', 'PERF-ANCHOR', '', INCLUDE_ALL_DESCENDANTS) - readStopWatch.stop() - readDurationInMillis = readStopWatch.getTime() - then: 'read duration is under 450 milliseconds' - assert readDurationInMillis < 450 - and: 'data node is returned with all the descendants populated' - assert countDataNodes(result) == EXPECTED_NUMBER_OF_NODES - when: 'query is executed with all descendants' - readStopWatch.reset() - readStopWatch.start() - result = objectUnderTest.queryDataNodes('PERF-DATASPACE', 'PERF-ANCHOR', '//perf-parent-1', INCLUDE_ALL_DESCENDANTS) - readStopWatch.stop() - readDurationInMillis = readStopWatch.getTime() - then: 'read duration is under 450 milliseconds' - assert readDurationInMillis < 450 - and: 'data node is returned with all the descendants populated' - assert countDataNodes(result) == EXPECTED_NUMBER_OF_NODES - } - - def createLineage() { - def numOfChildren = 50 - def numOfGrandChildren = 200 - (1..numOfChildren).each { - def childName = "perf-test-child-${it}".toString() - def newChild = goForthAndMultiply(PERF_TEST_PARENT, childName, numOfGrandChildren) - objectUnderTest.addChildDataNode('PERF-DATASPACE', 'PERF-ANCHOR', PERF_TEST_PARENT, newChild) - } - } - - def goForthAndMultiply(parentXpath, childName, numOfGrandChildren) { - def children = [] - (1..numOfGrandChildren).each { - def child = new DataNodeBuilder().withXpath("${parentXpath}/${childName}/${it}perf-test-grand-child").build() - children.add(child) - } - return new DataNodeBuilder().withXpath("${parentXpath}/${childName}").withChildDataNodes(children).build() - } - - def countDataNodes(dataNodes) { - int nodeCount = 1 - for (DataNode parent : dataNodes) { - for (DataNode child : parent.childDataNodes) { - nodeCount = nodeCount + (countDataNodes(child)) - } - } - return nodeCount - } -} |