From dd1b77b906052911b4883c7fa515ca1f2f2e9fda Mon Sep 17 00:00:00 2001 From: danielhanrahan Date: Tue, 9 Jul 2024 12:06:20 +0100 Subject: Split integration tests into cps and ncmp packages Issue-ID: CPS-2309 Signed-off-by: danielhanrahan Change-Id: Ie97c1430d1121c4c68b3fc6dacbc2466d48e1017 --- .../CpsAnchorServiceIntegrationSpec.groovy | 121 ---- .../CpsDataServiceIntegrationSpec.groovy | 649 --------------------- .../CpsDataspaceServiceIntegrationSpec.groovy | 107 ---- .../CpsModuleServiceIntegrationSpec.groovy | 364 ------------ .../CpsQueryServiceIntegrationSpec.groovy | 425 -------------- .../NcmpBearerTokenPassthroughSpec.groovy | 101 ---- .../functional/NcmpCmHandleCreateSpec.groovy | 178 ------ .../functional/NcmpCmHandleUpgradeSpec.groovy | 177 ------ .../NcmpCmNotificationSubscriptionSpec.groovy | 112 ---- .../integration/functional/NcmpRestApiSpec.groovy | 89 --- .../SessionManagerIntegrationSpec.groovy | 73 --- .../cps/AnchorServiceIntegrationSpec.groovy | 121 ++++ .../cps/DataServiceIntegrationSpec.groovy | 649 +++++++++++++++++++++ .../cps/DataspaceServiceIntegrationSpec.groovy | 107 ++++ .../cps/ModuleServiceIntegrationSpec.groovy | 364 ++++++++++++ .../cps/QueryServiceIntegrationSpec.groovy | 425 ++++++++++++++ .../cps/SessionManagerIntegrationSpec.groovy | 73 +++ .../ncmp/BearerTokenPassthroughSpec.groovy | 101 ++++ .../functional/ncmp/CmHandleCreateSpec.groovy | 178 ++++++ .../functional/ncmp/CmHandleUpgradeSpec.groovy | 177 ++++++ .../ncmp/CmNotificationSubscriptionSpec.groovy | 112 ++++ .../integration/functional/ncmp/RestApiSpec.groovy | 89 +++ 22 files changed, 2396 insertions(+), 2396 deletions(-) delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsAnchorServiceIntegrationSpec.groovy delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataspaceServiceIntegrationSpec.groovy delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsModuleServiceIntegrationSpec.groovy delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsQueryServiceIntegrationSpec.groovy delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpBearerTokenPassthroughSpec.groovy delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmHandleCreateSpec.groovy delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmHandleUpgradeSpec.groovy delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmNotificationSubscriptionSpec.groovy delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpRestApiSpec.groovy delete mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/SessionManagerIntegrationSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/AnchorServiceIntegrationSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataServiceIntegrationSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataspaceServiceIntegrationSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/ModuleServiceIntegrationSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/QueryServiceIntegrationSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/SessionManagerIntegrationSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/BearerTokenPassthroughSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleCreateSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpgradeSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmNotificationSubscriptionSpec.groovy create mode 100644 integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/RestApiSpec.groovy (limited to 'integration-test/src') diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsAnchorServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsAnchorServiceIntegrationSpec.groovy deleted file mode 100644 index 26857799a1..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsAnchorServiceIntegrationSpec.groovy +++ /dev/null @@ -1,121 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation - * Modifications Copyright (C) 2024 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import java.time.OffsetDateTime - -import org.onap.cps.api.CpsAnchorService -import org.onap.cps.integration.base.CpsIntegrationSpecBase -import org.onap.cps.spi.FetchDescendantsOption -import org.onap.cps.spi.exceptions.AlreadyDefinedException -import org.onap.cps.spi.exceptions.AnchorNotFoundException -import org.onap.cps.utils.ContentType - -class CpsAnchorServiceIntegrationSpec extends CpsIntegrationSpecBase { - - CpsAnchorService objectUnderTest - - def setup() { objectUnderTest = cpsAnchorService } - - def 'Anchor CRUD operations.'() { - when: 'an anchor is created' - objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'newAnchor') - then: 'the anchor be read' - assert objectUnderTest.getAnchor(GENERAL_TEST_DATASPACE, 'newAnchor').name == 'newAnchor' - and: 'it can be deleted' - objectUnderTest.deleteAnchor(GENERAL_TEST_DATASPACE,'newAnchor') - then: 'the anchor no longer exists i.e. an exception is thrown if an attempt is made to retrieve it' - def thrown = null - try { - objectUnderTest.getAnchor(GENERAL_TEST_DATASPACE, 'newAnchor') - } catch(Exception exception) { - thrown = exception - } - assert thrown instanceof AnchorNotFoundException - } - - def 'Filtering multiple anchors.'() { - when: '2 anchors with bookstore schema set are created' - objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'anchor1') - objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'anchor2') - and: '1 anchor with "other" schema set is created' - createStandardBookStoreSchemaSet(GENERAL_TEST_DATASPACE, 'otherSchemaSet') - objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, 'otherSchemaSet', 'anchor3') - then: 'there are 3 anchors in the general test database' - assert objectUnderTest.getAnchors(GENERAL_TEST_DATASPACE).size() == 3 - and: 'there are 2 anchors associated with bookstore schema set' - assert objectUnderTest.getAnchorsBySchemaSetName(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET).size() == 2 - and: 'there is 1 anchor associated with other schema set' - assert objectUnderTest.getAnchorsBySchemaSetName(GENERAL_TEST_DATASPACE, 'otherSchemaSet').size() == 1 - } - - def 'Querying anchor(name)s (depends on previous test!).'() { - expect: 'there are now 3 anchors using the "stores" module (both schema sets use the same modules) ' - assert objectUnderTest.queryAnchorNames(GENERAL_TEST_DATASPACE, ['stores', 'bookstore-types']).size() == 3 - and: 'there are no anchors using both "stores" and a "unused-model"' - assert objectUnderTest.queryAnchorNames(GENERAL_TEST_DATASPACE, ['stores', 'unused-model']).size() == 0 - } - - def 'Duplicate anchors.'() { - given: 'an anchor is created' - objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'newAnchor') - when: 'attempt to create another anchor with the same name' - objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'newAnchor') - then: 'an exception is thrown that the anchor already is defined' - thrown(AlreadyDefinedException) - cleanup: - objectUnderTest.deleteAnchor(GENERAL_TEST_DATASPACE, 'newAnchor') - } - - def 'Query anchors without any known modules'() { - when: 'querying for anchors with #scenario' - def result = objectUnderTest.queryAnchorNames(GENERAL_TEST_DATASPACE, ['unknownModule']) - then: 'an empty result is returned (no error)' - assert result == [] - } - - def 'Update anchor schema set.'() { - when: 'a new schema set with tree yang model is created' - def newTreeYangModelAsString = readResourceDataFile('tree/new-test-tree.yang') - cpsModuleService.createSchemaSet(GENERAL_TEST_DATASPACE, 'newTreeSchemaSet', [tree: newTreeYangModelAsString]) - then: 'an anchor with new schema set is created' - objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, 'newTreeSchemaSet', 'anchor4') - and: 'the new tree datanode is saved' - def treeJsonData = readResourceDataFile('tree/new-test-tree.json') - cpsDataService.saveData(GENERAL_TEST_DATASPACE, 'anchor4', treeJsonData, OffsetDateTime.now()) - and: 'saved tree data node can be retrieved by its normalized xpath' - def branchName = cpsDataService.getDataNodes(GENERAL_TEST_DATASPACE, 'anchor4', "/test-tree/branch", FetchDescendantsOption.DIRECT_CHILDREN_ONLY)[0].leaves['name'] - assert branchName == 'left' - and: 'a another schema set with updated tree yang model is created' - def updatedTreeYangModelAsString = readResourceDataFile('tree/updated-test-tree.yang') - cpsModuleService.createSchemaSet(GENERAL_TEST_DATASPACE, 'anotherTreeSchemaSet', [tree: updatedTreeYangModelAsString]) - and: 'anchor4 schema set is updated with another schema set successfully' - objectUnderTest.updateAnchorSchemaSet(GENERAL_TEST_DATASPACE, 'anchor4', 'anotherTreeSchemaSet') - when: 'updated tree data node with new leaves' - def updatedTreeJsonData = readResourceDataFile('tree/updated-test-tree.json') - cpsDataService.updateNodeLeaves(GENERAL_TEST_DATASPACE, "anchor4", "/test-tree/branch[@name='left']", updatedTreeJsonData, OffsetDateTime.now(), ContentType.JSON) - then: 'updated tree data node can be retrieved by its normalized xpath' - def birdsName = cpsDataService.getDataNodes(GENERAL_TEST_DATASPACE, 'anchor4',"/test-tree/branch[@name='left']/nest", FetchDescendantsOption.DIRECT_CHILDREN_ONLY)[0].leaves['birds'] as List - assert birdsName.size() == 3 - assert birdsName.containsAll('Night Owl', 'Raven', 'Crow') - } -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy deleted file mode 100644 index 779c0b84c4..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy +++ /dev/null @@ -1,649 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation - * Modifications Copyright (C) 2023-2024 TechMahindra Ltd. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import org.onap.cps.api.CpsDataService -import org.onap.cps.integration.base.FunctionalSpecBase -import org.onap.cps.spi.FetchDescendantsOption -import org.onap.cps.spi.exceptions.AlreadyDefinedException -import org.onap.cps.spi.exceptions.AnchorNotFoundException -import org.onap.cps.spi.exceptions.CpsAdminException -import org.onap.cps.spi.exceptions.CpsPathException -import org.onap.cps.spi.exceptions.DataNodeNotFoundException -import org.onap.cps.spi.exceptions.DataNodeNotFoundExceptionBatch -import org.onap.cps.spi.exceptions.DataValidationException -import org.onap.cps.spi.exceptions.DataspaceNotFoundException -import org.onap.cps.spi.model.DeltaReport -import org.onap.cps.utils.ContentType - -import static org.onap.cps.spi.FetchDescendantsOption.DIRECT_CHILDREN_ONLY -import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS -import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS - -class CpsDataServiceIntegrationSpec extends FunctionalSpecBase { - - CpsDataService objectUnderTest - def originalCountBookstoreChildNodes - def originalCountBookstoreTopLevelListNodes - - def setup() { - objectUnderTest = cpsDataService - originalCountBookstoreChildNodes = countDataNodesInBookstore() - originalCountBookstoreTopLevelListNodes = countTopLevelListDataNodesInBookstore() - } - - def 'Read bookstore top-level container(s) using #fetchDescendantsOption.'() { - when: 'get data nodes for bookstore container' - def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', fetchDescendantsOption) - then: 'the tree consist ouf of #expectNumberOfDataNodes data nodes' - assert countDataNodesInTree(result) == expectNumberOfDataNodes - and: 'the top level data node has the expected attribute and value' - assert result.leaves['bookstore-name'] == ['Easons-1'] - and: 'they are from the correct dataspace' - assert result.dataspace == [FUNCTIONAL_TEST_DATASPACE_1] - and: 'they are from the correct anchor' - assert result.anchorName == [BOOKSTORE_ANCHOR_1] - where: 'the following option is used' - fetchDescendantsOption || expectNumberOfDataNodes - OMIT_DESCENDANTS || 1 - DIRECT_CHILDREN_ONLY || 7 - INCLUDE_ALL_DESCENDANTS || 28 - new FetchDescendantsOption(2) || 28 - } - - def 'Read bookstore top-level container(s) using "root" path variations.'() { - when: 'get data nodes for bookstore container' - def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, root, OMIT_DESCENDANTS) - then: 'the tree consist correct number of data nodes' - assert countDataNodesInTree(result) == 2 - and: 'the top level data node has the expected number of leaves' - assert result.leaves.size() == 2 - where: 'the following variations of "root" are used' - root << [ '/', '' ] - } - - def 'Read data nodes with error: #cpsPath'() { - when: 'attempt to get data nodes using invalid path' - objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, DIRECT_CHILDREN_ONLY) - then: 'a #expectedException is thrown' - thrown(expectedException) - where: - cpsPath || expectedException - 'invalid path' || CpsPathException - '/non-existing-path' || DataNodeNotFoundException - } - - def 'Read (multiple) data nodes (batch) with #cpsPath'() { - when: 'attempt to get data nodes using invalid path' - objectUnderTest.getDataNodesForMultipleXpaths(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ cpsPath ], DIRECT_CHILDREN_ONLY) - then: 'no exception is thrown' - noExceptionThrown() - where: - cpsPath << [ 'invalid path', '/non-existing-path' ] - } - - def 'Get data nodes error scenario #scenario'() { - when: 'attempt to retrieve data nodes' - objectUnderTest.getDataNodes(dataspaceName, anchorName, xpath, OMIT_DESCENDANTS) - then: 'expected exception is thrown' - thrown(expectedException) - where: 'following data is used' - scenario | dataspaceName | anchorName | xpath || expectedException - 'non existent dataspace' | 'non-existent' | 'not-relevant' | '/not-relevant' || DataspaceNotFoundException - 'non existent anchor' | FUNCTIONAL_TEST_DATASPACE_1 | 'non-existent' | '/not-relevant' || AnchorNotFoundException - 'non-existent xpath' | FUNCTIONAL_TEST_DATASPACE_1 | BOOKSTORE_ANCHOR_1| '/non-existing' || DataNodeNotFoundException - 'invalid-dataspace' | 'Invalid dataspace' | 'not-relevant' | '/not-relevant' || DataValidationException - 'invalid-dataspace' | FUNCTIONAL_TEST_DATASPACE_1 | 'Invalid Anchor' | '/not-relevant' || DataValidationException - } - - def 'Delete root data node.'() { - when: 'the "root" is deleted' - objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ '/' ], now) - and: 'attempt to get the top level data node' - objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY) - then: 'an datanode not found exception is thrown' - thrown(DataNodeNotFoundException) - cleanup: - restoreBookstoreDataAnchor(1) - } - - def 'Get whole list data' () { - def xpathForWholeList = "/bookstore/categories" - when: 'get data nodes for bookstore container' - def dataNodes = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, xpathForWholeList, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) - then: 'the tree consist ouf of #expectNumberOfDataNodes data nodes' - assert dataNodes.size() == 5 - and: 'each datanode contains the list node xpath partially in its xpath' - dataNodes.each {dataNode -> - assert dataNode.xpath.contains(xpathForWholeList) - } - } - - def 'Read (multiple) data nodes with #scenario' () { - when: 'attempt to get data nodes using multiple valid xpaths' - def dataNodes = objectUnderTest.getDataNodesForMultipleXpaths(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, xpath, OMIT_DESCENDANTS) - then: 'expected numer of data nodes are returned' - dataNodes.size() == expectedNumberOfDataNodes - where: 'the following data was used' - scenario | xpath | expectedNumberOfDataNodes - 'container-node xpath' | ['/bookstore'] | 1 - 'list-item' | ['/bookstore/categories[@code=1]'] | 1 - 'parent-list xpath' | ['/bookstore/categories'] | 5 - 'child-list xpath' | ['/bookstore/categories[@code=1]/books'] | 2 - 'both parent and child list xpath' | ['/bookstore/categories', '/bookstore/categories[@code=1]/books'] | 7 - } - - def 'Add and Delete a (container) data node using #scenario.'() { - when: 'the new datanode is saved' - objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , parentXpath, json, now) - then: 'it can be retrieved by its normalized xpath' - def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, normalizedXpathToNode, DIRECT_CHILDREN_ONLY) - assert result.size() == 1 - assert result[0].xpath == normalizedXpathToNode - and: 'there is now one extra datanode' - assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore() - when: 'the new datanode is deleted' - objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, normalizedXpathToNode, now) - then: 'the original number of data nodes is restored' - assert originalCountBookstoreChildNodes == countDataNodesInBookstore() - where: - scenario | parentXpath | json || normalizedXpathToNode - 'normalized parent xpath' | '/bookstore' | '{"webinfo": {"domain-name":"ourbookstore.com", "contact-email":"info@ourbookstore.com" }}' || "/bookstore/webinfo" - 'non-normalized parent xpath' | '/bookstore/categories[ @code="1"]' | '{"books": {"title":"new" }}' || "/bookstore/categories[@code='1']/books[@title='new']" - } - - def 'Attempt to create a top level data node using root.'() { - given: 'a new anchor' - cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_SCHEMA_SET, 'newAnchor1'); - when: 'attempt to save new top level datanode' - def json = '{"bookstore": {"bookstore-name": "New Store"} }' - objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, 'newAnchor1' , '/', json, now) - then: 'since there is no data a data node not found exception is thrown' - thrown(DataNodeNotFoundException) - } - - def 'Attempt to save top level data node that already exist'() { - when: 'attempt to save already existing top level node' - def json = '{"bookstore": {} }' - objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, json, now) - then: 'an exception that (one cps paths is) already defined is thrown ' - def exceptionThrown = thrown(AlreadyDefinedException) - exceptionThrown.alreadyDefinedObjectNames == ['/bookstore' ] as Set - cleanup: - restoreBookstoreDataAnchor(1) - } - - def 'Delete a single datanode with invalid path.'() { - when: 'attempt to delete a single datanode with invalid path' - objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/invalid path', now) - then: 'a cps path parser exception is thrown' - thrown(CpsPathException) - } - - def 'Delete multiple data nodes with invalid path.'() { - when: 'attempt to delete datanode collection with invalid path' - objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, ['/invalid path'], now) - then: 'the error is silently ignored' - noExceptionThrown() - } - - def 'Delete single data node with non-existing path.'() { - when: 'attempt to delete a single datanode non-existing invalid path' - objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/does/not/exist', now) - then: 'a datanode not found exception is thrown' - thrown(DataNodeNotFoundException) - } - - def 'Delete multiple data nodes with non-existing path(s).'() { - when: 'attempt to delete a single datanode non-existing invalid path' - objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, ['/does/not/exist'], now) - then: 'a datanode not found (batch) exception is thrown' - thrown(DataNodeNotFoundExceptionBatch) - } - - def 'Add and Delete top-level list (element) data nodes with root node.'() { - given: 'a new (multiple-data-tree:invoice) datanodes' - def json = '{"bookstore-address":[{"bookstore-name":"Easons","address":"Bangalore,India","postal-code":"560043"}]}' - when: 'the new list elements are saved' - objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/', json, now) - then: 'they can be retrieved by their xpaths' - objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore-address[@bookstore-name="Easons"]', INCLUDE_ALL_DESCENDANTS) - and: 'there is one extra datanode' - assert originalCountBookstoreTopLevelListNodes + 1 == countTopLevelListDataNodesInBookstore() - when: 'the new elements are deleted' - objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore-address[@bookstore-name="Easons"]', now) - then: 'the original number of datanodes is restored' - assert originalCountBookstoreTopLevelListNodes == countTopLevelListDataNodesInBookstore() - } - - def 'Add and Delete list (element) data nodes.'() { - given: 'two new (categories) data nodes' - def json = '{"categories": [ {"code":"new1"}, {"code":"new2" } ] }' - when: 'the new list elements are saved' - objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) - then: 'they can be retrieved by their xpaths' - objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', DIRECT_CHILDREN_ONLY).size() == 1 - objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', DIRECT_CHILDREN_ONLY).size() == 1 - and: 'there are now two extra data nodes' - assert originalCountBookstoreChildNodes + 2 == countDataNodesInBookstore() - when: 'the new elements are deleted' - objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now) - objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', now) - then: 'the original number of data nodes is restored' - assert originalCountBookstoreChildNodes == countDataNodesInBookstore() - } - - def 'Add list (element) data nodes that already exist.'() { - given: 'two (categories) data nodes, one new and one existing' - def json = '{"categories": [ {"code":"1"}, {"code":"new1"} ] }' - when: 'attempt to save the list element' - objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) - then: 'an exception that (one cps paths is) already defined is thrown ' - def exceptionThrown = thrown(AlreadyDefinedException) - exceptionThrown.alreadyDefinedObjectNames == ['/bookstore/categories[@code=\'1\']' ] as Set - and: 'there is now one extra data nodes' - assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore() - cleanup: - restoreBookstoreDataAnchor(1) - } - - def 'Add and Delete list (element) data nodes using lists specific method.'() { - given: 'a new (categories) data nodes' - def json = '{"categories": [ {"code":"new1"} ] }' - and: 'the new list element is saved' - objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) - when: 'the new element is deleted' - objectUnderTest.deleteListOrListElement(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now) - then: 'the original number of data nodes is restored' - assert originalCountBookstoreChildNodes == countDataNodesInBookstore() - } - - def 'Add and Delete a batch of list element data nodes.'() { - given: 'two new (categories) data nodes in a single batch' - def json = '{"categories": [ {"code":"new1"}, {"code":"new2"} ] }' - when: 'the batches of new list element(s) are saved' - objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) - then: 'they can be retrieved by their xpaths' - assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', DIRECT_CHILDREN_ONLY).size() == 1 - assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', DIRECT_CHILDREN_ONLY).size() == 1 - and: 'there are now two extra data nodes' - assert originalCountBookstoreChildNodes + 2 == countDataNodesInBookstore() - when: 'the new elements are deleted' - objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now) - objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', now) - then: 'the original number of data nodes is restored' - assert originalCountBookstoreChildNodes == countDataNodesInBookstore() - } - - def 'Add and Delete a batch of list element data nodes with partial success.'() { - given: 'one existing and one new (categories) data nodes in a single batch' - def json = '{"categories": [ {"code":"new1"}, {"code":"1"} ] }' - when: 'the batches of new list element(s) are saved' - objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) - then: 'an already defined (batch) exception is thrown for the existing path' - def exceptionThrown = thrown(AlreadyDefinedException) - assert exceptionThrown.alreadyDefinedObjectNames == ['/bookstore/categories[@code=\'1\']' ] as Set - and: 'there is now one extra data node' - assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore() - cleanup: - restoreBookstoreDataAnchor(1) - } - - def 'Attempt to add empty lists.'() { - when: 'the batches of new list element(s) are saved' - objectUnderTest.replaceListContent(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', [ ], now) - then: 'an admin exception is thrown' - thrown(CpsAdminException) - } - - def 'Add child error scenario: #scenario.'() { - when: 'attempt to add a child data node with #scenario' - objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, parentXpath, json, now) - then: 'a #expectedException is thrown' - thrown(expectedException) - where: 'the following data is used' - scenario | parentXpath | json || expectedException - 'parent does not exist' | '/bookstore/categories[@code="unknown"]' | '{"books": [ {"title":"new"} ] } ' || DataNodeNotFoundException - 'already existing child' | '/bookstore' | '{"categories": [ {"code":"1"} ] }' || AlreadyDefinedException - } - - def 'Add multiple child data nodes with partial success.'() { - given: 'one existing and one new list element' - def json = '{"categories": [ {"code":"1"}, {"code":"new"} ] }' - when: 'attempt to add the elements' - objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', json, now) - then: 'an already defined (batch) exception is thrown for the existing path' - def thrown = thrown(AlreadyDefinedException) - assert thrown.alreadyDefinedObjectNames == [ "/bookstore/categories[@code='1']" ] as Set - and: 'the new data node has been added i.e. can be retrieved' - assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new"]', DIRECT_CHILDREN_ONLY).size() == 1 - } - - def 'Replace list content #scenario.'() { - given: 'the bookstore categories 1 and 2 exist and have at least 1 child each ' - assert countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="1"]', DIRECT_CHILDREN_ONLY)) > 1 - assert countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="2"]', DIRECT_CHILDREN_ONLY)) > 1 - when: 'the categories list is replaced with just category "1" and without child nodes (books)' - def json = '{"categories": [ {"code":"' +categoryCode + '"' + childJson + '} ] }' - objectUnderTest.replaceListContent(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', json, now) - then: 'the new replaced category can be retrieved but has no children anymore' - assert expectedNumberOfDataNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="' +categoryCode + '"]', DIRECT_CHILDREN_ONLY)) - when: 'attempt to retrieve a category (code) not in the new list' - objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="2"]', DIRECT_CHILDREN_ONLY) - then: 'a datanode not found exception occurs' - thrown(DataNodeNotFoundException) - cleanup: - restoreBookstoreDataAnchor(1) - where: 'the following data is used' - scenario | categoryCode | childJson || expectedNumberOfDataNodes - 'existing code, no children' | '1' | '' || 1 - 'existing code, new child' | '1' | ', "books" : [ { "title": "New Book" } ]' || 2 - 'existing code, existing child' | '1' | ', "books" : [ { "title": "Matilda" } ]' || 2 - 'new code, new child' | 'new' | ', "books" : [ { "title": "New Book" } ]' || 2 - } - - def 'Update data node leaves for node that has no leaves (yet).'() { - given: 'new (webinfo) datanode without leaves' - def json = '{"webinfo": {} }' - objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) - when: 'update is performed to add a leaf' - def updatedJson = '{"webinfo": {"domain-name":"new leaf data"}}' - objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore", updatedJson, now, ContentType.JSON) - then: 'the updated data nodes are retrieved' - def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/webinfo", INCLUDE_ALL_DESCENDANTS) - and: 'the leaf value is updated as expected' - assert result.leaves['domain-name'] == ['new leaf data'] - cleanup: - restoreBookstoreDataAnchor(1) - } - - def 'Update multiple data leaves error scenario: #scenario.'() { - when: 'attempt to update data node for #scenario' - objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, xpath, 'irrelevant json data', now, ContentType.JSON) - then: 'a #expectedException is thrown' - thrown(expectedException) - where: 'the following data is used' - scenario | dataspaceName | anchorName | xpath || expectedException - 'invalid dataspace name' | 'Invalid Dataspace' | 'not-relevant' | '/not relevant' || DataValidationException - 'invalid anchor name' | FUNCTIONAL_TEST_DATASPACE_1 | 'INVALID ANCHOR' | '/not relevant' || DataValidationException - 'non-existing dataspace' | 'non-existing-dataspace' | 'not-relevant' | '/not relevant' || DataspaceNotFoundException - 'non-existing anchor' | FUNCTIONAL_TEST_DATASPACE_1 | 'non-existing-anchor' | '/not relevant' || AnchorNotFoundException - 'non-existing-xpath' | FUNCTIONAL_TEST_DATASPACE_1 | BOOKSTORE_ANCHOR_1 | '/non-existing' || DataValidationException - } - - def 'Update data nodes and descendants.'() { - given: 'some web info for the bookstore' - def json = '{"webinfo": {"domain-name":"ourbookstore.com" ,"contact-email":"info@ourbookstore.com" }}' - objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) - when: 'the webinfo (container) is updated' - json = '{"webinfo": {"domain-name":"newdomain.com" ,"contact-email":"info@newdomain.com" }}' - objectUnderTest.updateDataNodeAndDescendants(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', json, now) - then: 'webinfo has been updated with teh new details' - def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', DIRECT_CHILDREN_ONLY) - result.leaves.'domain-name'[0] == 'newdomain.com' - result.leaves.'contact-email'[0] == 'info@newdomain.com' - cleanup: - restoreBookstoreDataAnchor(1) - } - - def 'Update bookstore top-level container data node.'() { - when: 'the bookstore top-level container is updated' - def json = '{ "bookstore": { "bookstore-name": "new bookstore" }}' - objectUnderTest.updateDataNodeAndDescendants(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/', json, now) - then: 'bookstore name has been updated' - def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY) - result.leaves.'bookstore-name'[0] == 'new bookstore' - cleanup: - restoreBookstoreDataAnchor(1) - } - - def 'Update multiple data node leaves.'() { - given: 'Updated json for bookstore data' - def jsonData = "{'book-store:books':{'lang':'English/French','price':100,'title':'Matilda'}}" - when: 'update is performed for leaves' - objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code='1']", jsonData, now, ContentType.JSON) - then: 'the updated data nodes are retrieved' - def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code=1]/books[@title='Matilda']", INCLUDE_ALL_DESCENDANTS) - and: 'the leaf values are updated as expected' - assert result[0].leaves['lang'] == 'English/French' - assert result[0].leaves['price'] == 100 - cleanup: - restoreBookstoreDataAnchor(2) - } - - def 'Order of leaf-list elements is preserved when "ordered-by user" is set in the YANG model.'() { - given: 'Updated json for bookstore data' - def jsonData = "{'book-store:books':{'title':'Matilda', 'authors': ['beta', 'alpha', 'gamma', 'delta']}}" - when: 'update is performed for leaves' - objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code='1']", jsonData, now, ContentType.JSON) - and: 'the updated data nodes are retrieved' - def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code=1]/books[@title='Matilda']", INCLUDE_ALL_DESCENDANTS) - then: 'the leaf-list values have expected order' - assert result[0].leaves['authors'] == ['beta', 'alpha', 'gamma', 'delta'] - cleanup: - restoreBookstoreDataAnchor(2) - } - - def 'Leaf-list elements are sorted when "ordered-by user" is not set in the YANG model.'() { - given: 'Updated json for bookstore data' - def jsonData = "{'book-store:books':{'title':'Matilda', 'editions': [2011, 1988, 2001, 2022, 2025]}}" - when: 'update is performed for leaves' - objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code='1']", jsonData, now, ContentType.JSON) - and: 'the updated data nodes are retrieved' - def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code=1]/books[@title='Matilda']", INCLUDE_ALL_DESCENDANTS) - then: 'the leaf-list values have natural order' - assert result[0].leaves['editions'] == [1988, 2001, 2011, 2022, 2025] - cleanup: - restoreBookstoreDataAnchor(2) - } - - def 'Get delta between 2 anchors'() { - when: 'attempt to get delta report between anchors' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, '/', OMIT_DESCENDANTS) - then: 'delta report contains expected number of changes' - result.size() == 3 - and: 'delta report contains UPDATE action with expected xpath' - assert result[0].getAction() == 'update' - assert result[0].getXpath() == '/bookstore' - and: 'delta report contains REMOVE action with expected xpath' - assert result[1].getAction() == 'remove' - assert result[1].getXpath() == "/bookstore-address[@bookstore-name='Easons-1']" - and: 'delta report contains ADD action with expected xpath' - assert result[2].getAction() == 'add' - assert result[2].getXpath() == "/bookstore-address[@bookstore-name='Crossword Bookstores']" - } - - def 'Get delta between 2 anchors returns empty response when #scenario'() { - when: 'attempt to get delta report between anchors' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, targetAnchor, xpath, INCLUDE_ALL_DESCENDANTS) - then: 'delta report is empty' - assert result.isEmpty() - where: 'following data was used' - scenario | targetAnchor | xpath - 'anchors with identical data are queried' | BOOKSTORE_ANCHOR_4 | '/' - 'same anchor name is passed as parameter' | BOOKSTORE_ANCHOR_3 | '/' - 'non existing xpath' | BOOKSTORE_ANCHOR_5 | '/non-existing-xpath' - } - - def 'Get delta between anchors error scenario: #scenario'() { - when: 'attempt to get delta between anchors' - objectUnderTest.getDeltaByDataspaceAndAnchors(dataspaceName, sourceAnchor, targetAnchor, '/some-xpath', INCLUDE_ALL_DESCENDANTS) - then: 'expected exception is thrown' - thrown(expectedException) - where: 'following data was used' - scenario | dataspaceName | sourceAnchor | targetAnchor || expectedException - 'invalid dataspace name' | 'Invalid dataspace' | 'not-relevant' | 'not-relevant' || DataValidationException - 'invalid anchor 1 name' | FUNCTIONAL_TEST_DATASPACE_3 | 'invalid anchor' | 'not-relevant' || DataValidationException - 'invalid anchor 2 name' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | 'invalid anchor' || DataValidationException - 'non-existing dataspace' | 'non-existing' | 'not-relevant1' | 'not-relevant2' || DataspaceNotFoundException - 'non-existing dataspace with same anchor name' | 'non-existing' | 'not-relevant' | 'not-relevant' || DataspaceNotFoundException - 'non-existing anchor 1' | FUNCTIONAL_TEST_DATASPACE_3 | 'non-existing-anchor' | 'not-relevant' || AnchorNotFoundException - 'non-existing anchor 2' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | 'non-existing-anchor' || AnchorNotFoundException - } - - def 'Get delta between anchors for remove action, where source data node #scenario'() { - when: 'attempt to get delta between leaves of data nodes present in 2 anchors' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_5, BOOKSTORE_ANCHOR_3, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) - then: 'expected action is present in delta report' - assert result.get(0).getAction() == 'remove' - where: 'following data was used' - scenario | parentNodeXpath - 'has leaves and child nodes' | "/bookstore/categories[@code='6']" - 'has leaves only' | "/bookstore/categories[@code='5']/books[@title='Book 11']" - 'has child data node only' | "/bookstore/support-info/contact-emails" - 'is empty' | "/bookstore/container-without-leaves" - } - - def 'Get delta between anchors for add action, where target data node #scenario'() { - when: 'attempt to get delta between leaves of data nodes present in 2 anchors' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) - then: 'the expected action is present in delta report' - result.get(0).getAction() == 'add' - and: 'the expected xapth is present in delta report' - result.get(0).getXpath() == parentNodeXpath - where: 'following data was used' - scenario | parentNodeXpath - 'has leaves and child nodes' | "/bookstore/categories[@code='6']" - 'has leaves only' | "/bookstore/categories[@code='5']/books[@title='Book 11']" - 'has child data node only' | "/bookstore/support-info/contact-emails" - 'is empty' | "/bookstore/container-without-leaves" - } - - def 'Get delta between anchors when leaves of existing data nodes are updated,: #scenario'() { - when: 'attempt to get delta between leaves of existing data nodes' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, sourceAnchor, targetAnchor, xpath, OMIT_DESCENDANTS) - then: 'expected action is update' - assert result[0].getAction() == 'update' - and: 'the payload has expected leaf values' - def sourceData = result[0].getSourceData() - def targetData = result[0].getTargetData() - assert sourceData == expectedSourceValue - assert targetData == expectedTargetValue - where: 'following data was used' - scenario | sourceAnchor | targetAnchor | xpath || expectedSourceValue | expectedTargetValue - 'leaf is updated in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore' || ['bookstore-name': 'Easons-1'] | ['bookstore-name': 'Crossword Bookstores'] - 'leaf is removed in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | "/bookstore/categories[@code='5']/books[@title='Book 1']" || [price:1] | null - 'leaf is added in target anchor' | BOOKSTORE_ANCHOR_5 | BOOKSTORE_ANCHOR_3 | "/bookstore/categories[@code='5']/books[@title='Book 1']" || null | [price:1] - } - - def 'Get delta between anchors when child data nodes under existing parent data nodes are updated: #scenario'() { - when: 'attempt to get delta between leaves of existing data nodes' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, sourceAnchor, targetAnchor, xpath, DIRECT_CHILDREN_ONLY) - then: 'expected action is update' - assert result[0].getAction() == 'update' - and: 'the delta report has expected child node xpaths' - def deltaReportEntities = getDeltaReportEntities(result) - def childNodeXpathsInDeltaReport = deltaReportEntities.get('xpaths') - assert childNodeXpathsInDeltaReport.contains(expectedChildNodeXpath) - where: 'following data was used' - scenario | sourceAnchor | targetAnchor | xpath || expectedChildNodeXpath - 'source and target anchors have child data nodes' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore/premises' || '/bookstore/premises/addresses[@house-number=\'2\' and @street=\'Main Street\']' - 'removed child data nodes in target anchor' | BOOKSTORE_ANCHOR_5 | BOOKSTORE_ANCHOR_3 | '/bookstore' || '/bookstore/support-info' - 'added child data nodes in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore' || '/bookstore/support-info' - } - - def 'Get delta between anchors where source and target data nodes have leaves and child data nodes'() { - given: 'parent node xpath and expected data in delta report' - def parentNodeXpath = "/bookstore/categories[@code='1']" - def expectedSourceDataInParentNode = ['name':'Children'] - def expectedTargetDataInParentNode = ['name':'Kids'] - def expectedSourceDataInChildNode = [['lang' : 'English'],['price':20, 'editions':[1988, 2000]]] - def expectedTargetDataInChildNode = [['lang':'English/German'], ['price':200, 'editions':[1988, 2000, 2023]]] - when: 'attempt to get delta between leaves of existing data nodes' - def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) - def deltaReportEntities = getDeltaReportEntities(result) - then: 'expected action is update' - assert result[0].getAction() == 'update' - and: 'the payload has expected parent node xpath' - assert deltaReportEntities.get('xpaths').contains(parentNodeXpath) - and: 'delta report has expected source and target data' - assert deltaReportEntities.get('sourcePayload').contains(expectedSourceDataInParentNode) - assert deltaReportEntities.get('targetPayload').contains(expectedTargetDataInParentNode) - and: 'the delta report also has expected child node xpaths' - assert deltaReportEntities.get('xpaths').containsAll(["/bookstore/categories[@code='1']/books[@title='The Gruffalo']", "/bookstore/categories[@code='1']/books[@title='Matilda']"]) - and: 'the delta report also has expected source and target data of child nodes' - assert deltaReportEntities.get('sourcePayload').containsAll(expectedSourceDataInChildNode) - assert deltaReportEntities.get('targetPayload').containsAll(expectedTargetDataInChildNode) - } - - def 'Get delta between anchor and JSON payload'() { - when: 'attempt to get delta report between anchor and JSON payload' - def jsonPayload = "{\"book-store:bookstore\":{\"bookstore-name\":\"Crossword Bookstores\"},\"book-store:bookstore-address\":{\"address\":\"Bangalore, India\",\"postal-code\":\"560062\",\"bookstore-name\":\"Crossword Bookstores\"}}" - def result = objectUnderTest.getDeltaByDataspaceAnchorAndPayload(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, '/', [:], jsonPayload, OMIT_DESCENDANTS) - then: 'delta report contains expected number of changes' - result.size() == 3 - and: 'delta report contains UPDATE action with expected xpath' - assert result[0].getAction() == 'update' - assert result[0].getXpath() == '/bookstore' - and: 'delta report contains REMOVE action with expected xpath' - assert result[1].getAction() == 'remove' - assert result[1].getXpath() == "/bookstore-address[@bookstore-name='Easons-1']" - and: 'delta report contains ADD action with expected xpath' - assert result[2].getAction() == 'add' - assert result[2].getXpath() == "/bookstore-address[@bookstore-name='Crossword Bookstores']" - } - - def 'Get delta between anchor and payload returns empty response when JSON payload is identical to anchor data'() { - when: 'attempt to get delta report between anchor and JSON payload (replacing the string Easons with Easons-1 because the data in JSON file is modified, to append anchor number, during the setup process of the integration tests)' - def jsonPayload = readResourceDataFile('bookstore/bookstoreData.json').replace('Easons', 'Easons-1') - def result = objectUnderTest.getDeltaByDataspaceAnchorAndPayload(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, '/', [:], jsonPayload, INCLUDE_ALL_DESCENDANTS) - then: 'delta report is empty' - assert result.isEmpty() - } - - def 'Get delta between anchor and payload error scenario: #scenario'() { - when: 'attempt to get delta between anchor and json payload' - objectUnderTest.getDeltaByDataspaceAnchorAndPayload(dataspaceName, sourceAnchor, xpath, [:], jsonPayload, INCLUDE_ALL_DESCENDANTS) - then: 'expected exception is thrown' - thrown(expectedException) - where: 'following data was used' - scenario | dataspaceName | sourceAnchor | xpath | jsonPayload || expectedException - 'invalid dataspace name' | 'Invalid dataspace' | 'not-relevant' | '/' | '{some-json}' || DataValidationException - 'invalid anchor name' | FUNCTIONAL_TEST_DATASPACE_3 | 'invalid anchor' | '/' | '{some-json}' || DataValidationException - 'non-existing dataspace' | 'non-existing' | 'not-relevant' | '/' | '{some-json}' || DataspaceNotFoundException - 'non-existing anchor' | FUNCTIONAL_TEST_DATASPACE_3 | 'non-existing-anchor' | '/' | '{some-json}' || AnchorNotFoundException - 'empty json payload with root node xpath' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | '/' | '' || DataValidationException - 'empty json payload with non-root node xpath' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | '/bookstore' | '' || DataValidationException - } - - def getDeltaReportEntities(List deltaReport) { - def xpaths = [] - def action = [] - def sourcePayload = [] - def targetPayload = [] - deltaReport.each { - delta -> xpaths.add(delta.getXpath()) - action.add(delta.getAction()) - sourcePayload.add(delta.getSourceData()) - targetPayload.add(delta.getTargetData()) - } - return ['xpaths':xpaths, 'action':action, 'sourcePayload':sourcePayload, 'targetPayload':targetPayload] - } - - def countDataNodesInBookstore() { - return countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', INCLUDE_ALL_DESCENDANTS)) - } - - def countTopLevelListDataNodesInBookstore() { - return countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/', INCLUDE_ALL_DESCENDANTS)) - } -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataspaceServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataspaceServiceIntegrationSpec.groovy deleted file mode 100644 index 739e802244..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataspaceServiceIntegrationSpec.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import org.onap.cps.api.CpsDataspaceService -import org.onap.cps.integration.base.CpsIntegrationSpecBase -import org.onap.cps.spi.exceptions.AlreadyDefinedException -import org.onap.cps.spi.exceptions.DataspaceInUseException -import org.onap.cps.spi.exceptions.DataspaceNotFoundException - -class CpsDataspaceServiceIntegrationSpec extends CpsIntegrationSpecBase { - - CpsDataspaceService objectUnderTest - - def setup() { objectUnderTest = cpsDataspaceService } - - def 'Dataspace CRUD operations.'() { - when: 'a dataspace is created' - objectUnderTest.createDataspace('newDataspace') - then: 'the dataspace can be read' - assert objectUnderTest.getDataspace('newDataspace').name == 'newDataspace' - and: 'it can be deleted' - objectUnderTest.deleteDataspace('newDataspace') - then: 'the dataspace no longer exists i.e. an exception is thrown if an attempt is made to retrieve it' - def thrown = null - try { - objectUnderTest.getDataspace('newDataspace') - } catch(Exception exception) { - thrown = exception - } - assert thrown instanceof DataspaceNotFoundException - } - - def 'Attempt to delete a non-existing dataspace'() { - when: 'attempt to delete a non-existing dataspace' - objectUnderTest.deleteDataspace('non-existing-name') - then: 'a not found exception is thrown with the relevant dataspace name' - def thrownException = thrown(DataspaceNotFoundException) - assert thrownException.details.contains('non-existing-name does not exist') - } - - def 'Attempt Delete dataspace with a schema set and anchor'() { - setup: 'a dataspace with a schema set and anchor' - objectUnderTest.createDataspace('targetDataspace') - cpsModuleService.createSchemaSet('targetDataspace','someSchemaSet',[:]) - cpsAnchorService.createAnchor('targetDataspace', 'someSchemaSet', 'some_anchor') - when: 'attempt to delete dataspace' - objectUnderTest.deleteDataspace('targetDataspace') - then: 'an in-use exception is thrown mentioning anchors' - def thrownException = thrown(DataspaceInUseException) - assert thrownException.details.contains('contains 1 anchor(s)') - cleanup: - cpsModuleService.deleteSchemaSetsWithCascade('targetDataspace',['someSchemaSet']) - objectUnderTest.deleteDataspace('targetDataspace') - } - - def 'Attempt to delete dataspace with just a schema set'() { - setup: 'a dataspace with a schema set' - objectUnderTest.createDataspace('targetDataspace') - cpsModuleService.createSchemaSet('targetDataspace','someSchemaSet',[:]) - when: 'attempt to delete dataspace' - objectUnderTest.deleteDataspace('targetDataspace') - then: 'an in-use exception is thrown mentioning schemasets' - def thrownException = thrown(DataspaceInUseException) - assert thrownException.details.contains('contains 1 schemaset(s)') - cleanup: - cpsModuleService.deleteSchemaSetsWithCascade('targetDataspace',['someSchemaSet']) - objectUnderTest.deleteDataspace('targetDataspace') - } - - def 'Retrieve all dataspaces (depends on total test suite).'() { - given: 'two addtional dataspaces are created' - objectUnderTest.createDataspace('dataspace1') - objectUnderTest.createDataspace('dataspace2') - when: 'all datespaces are retreived' - def result = objectUnderTest.getAllDataspaces() - then: 'there are at least 3 dataspaces (2 new ones plus the general test dataspace)' - result.size() >= 3 - assert result.name.containsAll([GENERAL_TEST_DATASPACE, 'dataspace1', 'dataspace2']) - } - - def 'Duplicate dataspaces.'() { - when: 'attempting to create a dataspace with the same name as an existing one' - objectUnderTest.createDataspace(GENERAL_TEST_DATASPACE) - then: 'an exception is thrown indicating the dataspace already exists' - thrown(AlreadyDefinedException) - } - -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsModuleServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsModuleServiceIntegrationSpec.groovy deleted file mode 100644 index b7b6fa11a7..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsModuleServiceIntegrationSpec.groovy +++ /dev/null @@ -1,364 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import org.onap.cps.api.CpsModuleService -import org.onap.cps.integration.base.FunctionalSpecBase -import org.onap.cps.spi.CascadeDeleteAllowed -import org.onap.cps.spi.exceptions.AlreadyDefinedException -import org.onap.cps.spi.exceptions.DataspaceNotFoundException -import org.onap.cps.spi.exceptions.ModelValidationException -import org.onap.cps.spi.exceptions.SchemaSetInUseException -import org.onap.cps.spi.exceptions.SchemaSetNotFoundException -import org.onap.cps.spi.model.ModuleDefinition -import org.onap.cps.spi.model.ModuleReference - -class CpsModuleServiceIntegrationSpec extends FunctionalSpecBase { - - CpsModuleService objectUnderTest - - private static def originalNumberOfModuleReferences = 2 // bookstore has two modules - private static def bookStoreModuleReference = new ModuleReference('stores','2024-02-08') - private static def bookStoreModuleReferenceWithNamespace = new ModuleReference('stores','2024-02-08', 'org:onap:cps:sample') - private static def bookStoreTypesModuleReference = new ModuleReference('bookstore-types','2024-01-30') - private static def bookStoreTypesModuleReferenceWithNamespace = new ModuleReference('bookstore-types','2024-01-30', 'org:onap:cps:types:sample') - static def NEW_RESOURCE_REVISION = '2023-05-10' - static def NEW_RESOURCE_CONTENT = 'module test_module {\n' + - ' yang-version 1.1;\n' + - ' namespace "org:onap:ccsdk:sample";\n' + - '\n' + - ' prefix book-store;\n' + - '\n' + - ' revision "2023-05-10" {\n' + - ' description\n' + - ' "Sample Model";\n' + - ' }' + - '}' - - def newYangResourcesNameToContentMap = [:] - def moduleReferences = [] - def noNewModules = [:] - def bookstoreModelFileContent = readResourceDataFile('bookstore/bookstore.yang') - def bookstoreTypesFileContent = readResourceDataFile('bookstore/bookstore-types.yang') - - def setup() { - objectUnderTest = cpsModuleService - } - - /* - C R E A T E S C H E M A S E T U S E - C A S E S - */ - - def 'Create new schema set from yang resources with #scenario'() { - given: 'a new schema set with #numberOfModules modules' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(numberOfNewModules) - when: 'the new schema set is created' - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', newYangResourcesNameToContentMap) - then: 'the number of module references has increased by #numberOfNewModules' - def yangResourceModuleReferences = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1) - originalNumberOfModuleReferences + numberOfNewModules == yangResourceModuleReferences.size() - cleanup: - objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, [ 'newSchemaSet' ]) - where: 'the following parameters are use' - scenario | numberOfNewModules - 'two valid new modules' | 2 - 'empty schema set' | 0 - 'over max batch size #modules' | 101 - } - - def 'Create new schema set with recommended filename format but invalid yang'() { - given: 'a filename using RFC6020 recommended format (for coverage only)' - def fileName = 'test@2023-05-11.yang' - when: 'attempt to create a schema set with invalid Yang' - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', [(fileName) :'invalid yang']) - then: 'a model validation exception' - thrown(ModelValidationException) - } - - def 'Create new schema set from modules with #scenario'() { - given: 'a new schema set with #numberOfNewModules modules' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(numberOfNewModules) - and: 'add existing module references (optional)' - moduleReferences.addAll(existingModuleReferences) - when: 'the new schema set is created' - def schemaSetName = "NewSchemaWith${numberOfNewModules}Modules" - objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, schemaSetName, newYangResourcesNameToContentMap, moduleReferences) - and: 'associated with a new anchor' - cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, schemaSetName, 'newAnchor') - then: 'the new anchor has the correct number of modules' - def yangResourceModuleReferences = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'newAnchor') - assert expectedNumberOfModulesForAnchor == yangResourceModuleReferences.size() - cleanup: - objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, [ schemaSetName.toString() ]) - where: 'the following module references are provided' - scenario | numberOfNewModules | existingModuleReferences || expectedNumberOfModulesForAnchor - 'empty schema set' | 0 | [ ] || 0 - 'one existing module' | 0 | [bookStoreModuleReference ] || 1 - 'two new modules' | 2 | [ ] || 2 - 'two new modules, one existing' | 2 | [bookStoreModuleReference ] || 3 - 'over max batch size #modules' | 101 | [ ] || 101 - 'two valid, one invalid module' | 2 | [ new ModuleReference('NOT EXIST','IRRELEVANT') ] || 2 - } - - def 'Duplicate schema content.'() { - given: 'a map of yang resources' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) - when: 'a new schema set is created' - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema1', newYangResourcesNameToContentMap) - then: 'the dataspace has one new module (reference)' - def numberOfModuleReferencesAfterFirstSchemaSetHasBeenAdded = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).size() - assert numberOfModuleReferencesAfterFirstSchemaSetHasBeenAdded == originalNumberOfModuleReferences + 1 - when: 'a second new schema set is created' - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema2', newYangResourcesNameToContentMap) - then: 'the dataspace has no additional module (reference)' - assert numberOfModuleReferencesAfterFirstSchemaSetHasBeenAdded == objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).size() - cleanup: - objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, [ 'newSchema1', 'newSchema2']) - } - - def 'Create schema set error scenario: #scenario.'() { - when: 'attempt to store schema set #schemaSetName in dataspace #dataspaceName' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(0) - objectUnderTest.createSchemaSet(dataspaceName, schemaSetName, newYangResourcesNameToContentMap) - then: 'an #expectedException is thrown' - thrown(expectedException) - where: 'the following data is used' - scenario | dataspaceName | schemaSetName || expectedException - 'dataspace does not exist' | 'unknown' | 'not-relevant' || DataspaceNotFoundException - 'schema set already exists' | FUNCTIONAL_TEST_DATASPACE_1 | BOOKSTORE_SCHEMA_SET || AlreadyDefinedException - } - - /* - R E A D S C H E M A S E T I N F O U S E - C A S E S - */ - - def 'Retrieving module definitions by anchor.'() { - when: 'the module definitions for an anchor are retrieved' - def result = objectUnderTest.getModuleDefinitionsByAnchorName(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1) - then: 'the correct module definitions are returned' - assert result.size() == 2 - assert result.contains(new ModuleDefinition('stores','2024-02-08',bookstoreModelFileContent)) - assert result.contains(new ModuleDefinition('bookstore-types','2024-01-30', bookstoreTypesFileContent)) - } - - def 'Retrieving module definitions: #scenarios'() { - when: 'module definitions for module name are retrieved' - def result = objectUnderTest.getModuleDefinitionsByAnchorAndModule(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, moduleName, moduleRevision) - then: 'the correct module definitions are returned' - if (expectedNumberOfDefinitions > 0) { - assert result.size() == expectedNumberOfDefinitions - def expectedModuleDefinition = new ModuleDefinition('stores', '2024-02-08', bookstoreModelFileContent) - assert result[0] == expectedModuleDefinition - } - where: 'following parameters are used' - scenarios | moduleName | moduleRevision || expectedNumberOfDefinitions - 'correct module name and revision' | 'stores' | '2024-02-08' || 1 - 'correct module name' | 'stores' | null || 1 - 'incorrect module name' | 'other' | null || 0 - 'incorrect revision' | 'stores' | '2025-11-22' || 0 - } - - def 'Retrieving yang resource module references by anchor.'() { - when: 'the yang resource module references for an anchor are retrieved' - def result = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1) - then: 'the correct module references are returned' - assert result.size() == 2 - assert result.containsAll(bookStoreModuleReference, bookStoreTypesModuleReference) - } - - def 'Identifying new module references with #scenario'() { - when: 'identifyNewModuleReferences is called' - def result = objectUnderTest.identifyNewModuleReferences(moduleReferences) - then: 'the correct module references are returned' - assert result.size() == expectedResult.size() - assert result.containsAll(expectedResult) - where: 'the following data is used' - scenario | moduleReferences || expectedResult - 'just new module references' | [new ModuleReference('new1', 'r1'), new ModuleReference('new2', 'r1')] || [new ModuleReference('new1', 'r1'), new ModuleReference('new2', 'r1')] - 'one new module,one existing reference' | [new ModuleReference('new1', 'r1'), bookStoreModuleReference] || [new ModuleReference('new1', 'r1')] - 'no new module references' | [bookStoreModuleReference] || [] - 'no module references' | [] || [] - 'module references collection is null' | null || [] - } - - def 'Retrieve schema set.'() { - when: 'a specific schema set is retrieved' - def result = objectUnderTest.getSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_SCHEMA_SET) - then: 'the result has the correct name and module(s)' - assert result.name == 'bookstoreSchemaSet' - assert result.moduleReferences.size() == 2 - assert result.moduleReferences.containsAll(bookStoreModuleReferenceWithNamespace, bookStoreTypesModuleReferenceWithNamespace) - } - - def 'Retrieve all schema sets.'() { - given: 'an extra schema set is stored' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema1', newYangResourcesNameToContentMap) - when: 'all schema sets are retrieved' - def result = objectUnderTest.getSchemaSets(FUNCTIONAL_TEST_DATASPACE_1) - then: 'the result contains all expected schema sets' - assert result.name == [ 'bookstoreSchemaSet', 'newSchema1' ] - cleanup: - objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchema1']) - } - - /* - D E L E T E S C H E M A S E T U S E - C A S E S - */ - - def 'Delete schema sets with(out) cascade.'() { - given: 'a schema set' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', newYangResourcesNameToContentMap) - and: 'optionally create anchor for the schema set' - if (associateWithAnchor) { - cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', 'newAnchor') - } - when: 'attempt to delete the schema set' - try { - objectUnderTest.deleteSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', cascadeDeleteAllowedOption) - } - catch (Exception e) { // only accept correct exception when schema set cannot be deleted - assert e instanceof SchemaSetInUseException && expectSchemaSetStillPresent - } - then: 'check if the dataspace still contains the new schema set or not' - def remainingSchemaSetNames = objectUnderTest.getSchemaSets(FUNCTIONAL_TEST_DATASPACE_1).name - assert remainingSchemaSetNames.contains('newSchemaSet') == expectSchemaSetStillPresent - cleanup: - objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchemaSet']) - where: 'the following options are used' - associateWithAnchor | cascadeDeleteAllowedOption || expectSchemaSetStillPresent - false | CascadeDeleteAllowed.CASCADE_DELETE_ALLOWED || false - false | CascadeDeleteAllowed.CASCADE_DELETE_PROHIBITED || false - true | CascadeDeleteAllowed.CASCADE_DELETE_ALLOWED || false - true | CascadeDeleteAllowed.CASCADE_DELETE_PROHIBITED || true - } - - def 'Delete schema sets with shared resources.'() { - given: 'a new schema set' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet1', newYangResourcesNameToContentMap) - and: 'another schema set which shares one yang resource (module)' - populateNewYangResourcesNameToContentMapAndAllModuleReferences(2) - objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet2', newYangResourcesNameToContentMap) - when: 'all schema sets are retrieved' - def moduleRevisions = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).revision - then: 'both modules (revisions) are present' - assert moduleRevisions.containsAll(['2000-01-01', '2000-01-01']) - when: 'delete the second schema set that has two resources one of which is a shared resource' - objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchemaSet2']) - then: 'only the second schema set is deleted' - def remainingSchemaSetNames = objectUnderTest.getSchemaSets(FUNCTIONAL_TEST_DATASPACE_1).name - assert remainingSchemaSetNames.contains('newSchemaSet1') - assert !remainingSchemaSetNames.contains('newSchemaSet2') - and: 'only the shared module (revision) remains' - def remainingModuleRevisions = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).revision - assert remainingModuleRevisions.contains('2000-01-01') - assert !remainingModuleRevisions.contains('2001-01-01') - cleanup: - objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchemaSet1']) - } - - def 'Delete schema set error scenario: #scenario.'() { - when: 'attempt to delete a schema set where #scenario' - objectUnderTest.deleteSchemaSet(dataspaceName, schemaSetName, CascadeDeleteAllowed.CASCADE_DELETE_PROHIBITED) - then: 'an #expectedException is thrown' - thrown(expectedException) - where: 'the following data is used' - scenario | dataspaceName | schemaSetName || expectedException - 'dataspace does not exist' | 'unknown' | 'not-relevant' || DataspaceNotFoundException - 'schema set does not exists' | FUNCTIONAL_TEST_DATASPACE_1 | 'unknown' || SchemaSetNotFoundException - } - - /* - U P G R A D E - */ - - def 'Upgrade schema set (with existing and new modules, no matching module set tag in NCMP)'() { - given: 'an anchor and schema set with 2 modules (to be upgraded)' - populateNewYangResourcesNameToContentMapAndAllModuleReferences('original', 2) - objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', newYangResourcesNameToContentMap, []) - cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', 'targetAnchor') - def yangResourceModuleReferencesBeforeUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') - assert yangResourceModuleReferencesBeforeUpgrade.size() == 2 - assert yangResourceModuleReferencesBeforeUpgrade.containsAll([new ModuleReference('original_0','2000-01-01'),new ModuleReference('original_1','2001-01-01')]) - and: 'two new 2 modules (from node)' - populateNewYangResourcesNameToContentMapAndAllModuleReferences('new', 2) - def newModuleReferences = [new ModuleReference('new_0','2000-01-01'),new ModuleReference('new_1','2001-01-01')] - and: 'a list of all module references (normally retrieved from node)' - def allModuleReferences = [] - allModuleReferences.add(bookStoreModuleReference) - allModuleReferences.addAll(newModuleReferences) - when: 'the schema set is upgraded' - objectUnderTest.upgradeSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', newYangResourcesNameToContentMap, allModuleReferences) - then: 'the new anchor has the correct new and existing modules' - def yangResourceModuleReferencesAfterUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') - assert yangResourceModuleReferencesAfterUpgrade.size() == 3 - assert yangResourceModuleReferencesAfterUpgrade.contains(bookStoreModuleReference) - assert yangResourceModuleReferencesAfterUpgrade.containsAll(newModuleReferences); - cleanup: - objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['targetSchema']) - } - - def 'Upgrade existing schema set from another anchor (used in NCMP for matching module set tag)'() { - given: 'an anchor and schema set with 1 module (target)' - populateNewYangResourcesNameToContentMapAndAllModuleReferences('target', 1) - objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', newYangResourcesNameToContentMap, []) - cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', 'targetAnchor') - def moduleReferencesBeforeUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') - assert moduleReferencesBeforeUpgrade.size() == 1 - and: 'another anchor and schema set with 2 other modules (source for upgrade)' - populateNewYangResourcesNameToContentMapAndAllModuleReferences('source', 2) - objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'sourceSchema', newYangResourcesNameToContentMap, []) - cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'sourceSchema', 'sourceAnchor') - assert objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'sourceAnchor').size() == 2 - when: 'the target schema is upgraded using the module references from the source anchor' - def moduleReferencesFromSourceAnchor = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'sourceAnchor') - objectUnderTest.upgradeSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', noNewModules, moduleReferencesFromSourceAnchor) - then: 'the target schema now refers to the source modules (with namespace) modules' - def schemaSetModuleReferencesAfterUpgrade = getObjectUnderTest().getSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema').moduleReferences - assert schemaSetModuleReferencesAfterUpgrade.containsAll([new ModuleReference('source_0','2000-01-01','org:onap:ccsdk:sample'),new ModuleReference('source_1','2001-01-01','org:onap:ccsdk:sample')]); - and: 'the associated target anchor has the same module references (without namespace but that is a legacy issue)' - def anchorModuleReferencesAfterUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') - assert anchorModuleReferencesAfterUpgrade.containsAll([new ModuleReference('source_0','2000-01-01'),new ModuleReference('source_1','2001-01-01')]); - cleanup: - objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['sourceSchema', 'targetSchema']) - } - - /* - H E L P E R M E T H O D S - */ - - def populateNewYangResourcesNameToContentMapAndAllModuleReferences(numberOfModules) { - populateNewYangResourcesNameToContentMapAndAllModuleReferences('name', numberOfModules) - } - - def populateNewYangResourcesNameToContentMapAndAllModuleReferences(namePrefix, numberOfModules) { - numberOfModules.times { - def uniqueName = namePrefix + '_' + it - def uniqueRevision = String.valueOf(2000 + it) + '-01-01' - moduleReferences.add(new ModuleReference(uniqueName, uniqueRevision)) - def uniqueContent = NEW_RESOURCE_CONTENT.replace(NEW_RESOURCE_REVISION, uniqueRevision).replace('module test_module', 'module '+uniqueName) - newYangResourcesNameToContentMap.put(uniqueRevision, uniqueContent) - } - } - -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsQueryServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsQueryServiceIntegrationSpec.groovy deleted file mode 100644 index ad3ebd83ec..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsQueryServiceIntegrationSpec.groovy +++ /dev/null @@ -1,425 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023-2024 Nordix Foundation - * Modifications Copyright (C) 2023 TechMahindra Ltd - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import java.time.OffsetDateTime -import org.onap.cps.api.CpsQueryService -import org.onap.cps.integration.base.FunctionalSpecBase -import org.onap.cps.spi.FetchDescendantsOption -import org.onap.cps.spi.PaginationOption -import org.onap.cps.spi.exceptions.CpsPathException - -import static org.onap.cps.spi.FetchDescendantsOption.DIRECT_CHILDREN_ONLY -import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS -import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS -import static org.onap.cps.spi.PaginationOption.NO_PAGINATION - -class CpsQueryServiceIntegrationSpec extends FunctionalSpecBase { - - CpsQueryService objectUnderTest - - def setup() { objectUnderTest = cpsQueryService } - - def 'Query bookstore using CPS path where #scenario.'() { - when: 'query data nodes for bookstore container' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, INCLUDE_ALL_DESCENDANTS) - then: 'the result contains expected number of nodes' - assert result.size() == expectedResultSize - and: 'the result contains the expected leaf values' - result.leaves.forEach( dataNodeLeaves -> { - expectedLeaves.forEach( (expectedLeafKey,expectedLeafValue) -> { - assert dataNodeLeaves[expectedLeafKey] == expectedLeafValue - }) - }) - where: - scenario | cpsPath || expectedResultSize | expectedLeaves - 'the AND condition is used' | '//books[@lang="English" and @price=15]' || 2 | [lang:"English", price:15] - 'the AND is used where result does not exist' | '//books[@lang="English" and @price=1000]' || 0 | [] - } - - def 'Cps Path query using comparative and boolean operators.'() { - given: 'a cps path query in the discount category' - def cpsPath = "/bookstore/categories[@code='5']/books" + leafCondition - when: 'a query is executed to get response by the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, - cpsPath, OMIT_DESCENDANTS) - then: 'the cps-path of queryDataNodes has the expectedLeaves' - def bookPrices = result.collect { it.getLeaves().get('price') } - assert bookPrices.sort() == expectedBookPrices.sort() - where: 'the following data is used' - leafCondition || expectedBookPrices - '[@price = 5]' || [5] - '[@price < 5]' || [1, 2, 3, 4] - '[@price > 5]' || [6, 7, 8, 9, 10] - '[@price <= 5]' || [1, 2, 3, 4, 5] - '[@price >= 5]' || [5, 6, 7, 8, 9, 10] - '[@price > 10]' || [] - '[@price = 3 or @price = 7]' || [3, 7] - '[@price = 3 and @price = 7]' || [] - '[@price > 3 and @price <= 6]' || [4, 5, 6] - '[@price < 3 or @price > 8]' || [1, 2, 9, 10] - '[@price = 1 or @price = 3 or @price = 5]' || [1, 3, 5] - '[@price = 1 or @price >= 8 and @price < 10]' || [1, 8, 9] - '[@price >= 3 and @price <= 5 or @price > 9]' || [3, 4, 5, 10] - } - - def 'Cps Path query for leaf value(s) with #scenario.'() { - when: 'a query is executed to get a data node by the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, fetchDescendantsOption) - then: 'the correct number of parent nodes are returned' - assert result.size() == expectedNumberOfParentNodes - and: 'the correct total number of data nodes are returned' - assert countDataNodesInTree(result) == expectedTotalNumberOfNodes - where: 'the following data is used' - scenario | cpsPath | fetchDescendantsOption || expectedNumberOfParentNodes | expectedTotalNumberOfNodes - 'string and no descendants' | '/bookstore/categories[@code="1"]/books[@title="Matilda"]' | OMIT_DESCENDANTS || 1 | 1 - 'integer and descendants' | '/bookstore/categories[@code="1"]/books[@price=15]' | INCLUDE_ALL_DESCENDANTS || 1 | 1 - 'no condition and no descendants' | '/bookstore/categories' | OMIT_DESCENDANTS || 5 | 5 - 'no condition and level 1 descendants' | '/bookstore' | new FetchDescendantsOption(1) || 1 | 7 - 'no condition and level 2 descendants' | '/bookstore' | new FetchDescendantsOption(2) || 1 | 28 - } - - def 'Query for attribute by cps path with cps paths that return no data because of #scenario.'() { - when: 'a query is executed to get data nodes for the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) - then: 'no data is returned' - assert result.isEmpty() - where: 'following cps queries are performed' - scenario | cpsPath - 'cps path is incomplete' | '/bookstore[@title="Matilda"]' - 'leaf value does not exist' | '/bookstore/categories[@code="1"]/books[@title=\'does not exist\']' - 'incomplete end of xpath prefix' | '/bookstore/categories/books[@price=15]' - } - - def 'Cps Path query using descendant anywhere and #type (further) descendants.'() { - when: 'a query is executed to get a data node by the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="1"]', fetchDescendantsOption) - then: 'the data node has the correct number of children' - assert result[0].childDataNodes.xpath.sort() == expectedChildNodes.sort() - where: 'the following data is used' - type | fetchDescendantsOption || expectedChildNodes - 'omit' | OMIT_DESCENDANTS || [] - 'include' | INCLUDE_ALL_DESCENDANTS || ["/bookstore/categories[@code='1']/books[@title='Matilda']", - "/bookstore/categories[@code='1']/books[@title='The Gruffalo']"] - } - - def 'Cps Path query for all books.'() { - when: 'a query is executed to get all books' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '//books', OMIT_DESCENDANTS) - then: 'the expected number of books are returned' - assert result.size() == 19 - } - - def 'Cps Path query using descendant anywhere with #scenario.'() { - when: 'a query is executed to get a data node by the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) - then: 'xpaths of the retrieved data nodes are as expected' - def bookTitles = result.collect { it.getLeaves().get('title') } - assert bookTitles.sort() == expectedBookTitles.sort() - where: 'the following data is used' - scenario | cpsPath || expectedBookTitles - 'string leaf condition' | '//books[@title="Matilda"]' || ["Matilda"] - 'text condition on leaf' | '//books/title[text()="Matilda"]' || ["Matilda"] - 'text condition case mismatch' | '//books/title[text()="matilda"]' || [] - 'text condition on int leaf' | '//books/price[text()="20"]' || ["A Book with No Language", "Matilda"] - 'text condition on leaf-list' | '//books/authors[text()="Terry Pratchett"]' || ["Good Omens", "The Colour of Magic", "The Light Fantastic"] - 'text condition partial match' | '//books/authors[text()="Terry"]' || [] - 'text condition (existing) empty string' | '//books/lang[text()=""]' || ["A Book with No Language"] - 'text condition on int leaf-list' | '//books/editions[text()="2000"]' || ["Matilda"] - 'match of leaf containing /' | '//books[@lang="N/A"]' || ["Logarithm tables"] - 'text condition on leaf containing /' | '//books/lang[text()="N/A"]' || ["Logarithm tables"] - 'match of key containing /' | '//books[@title="Debian GNU/Linux"]' || ["Debian GNU/Linux"] - 'text condition on key containing /' | '//books/title[text()="Debian GNU/Linux"]' || ["Debian GNU/Linux"] - } - - def 'Query for attribute by cps path using contains condition #scenario.'() { - when: 'a query is executed to get response by the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, INCLUDE_ALL_DESCENDANTS) - then: 'xpaths of the retrieved data nodes are as expected' - def bookTitles = result.collect { it.getLeaves().get('title') } - assert bookTitles.sort() == expectedBookTitles.sort() - where: 'the following data is used' - scenario | cpsPath || expectedBookTitles - 'contains condition with leaf' | '//books[contains(@title,"Mat")]' || ["Matilda"] - 'contains condition with case-sensitive' | '//books[contains(@title,"Ti")]' || [] - 'contains condition with Integer Value' | '//books[contains(@price,"15")]' || ["Annihilation", "The Gruffalo"] - } - - def 'Query for attribute by cps path using contains condition with no value.'() { - when: 'a query is executed to get response by the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '//books[contains(@title,"")]', OMIT_DESCENDANTS) - then: 'all books are returned' - assert result.size() == 19 - } - - def 'Cps Path query using descendant anywhere with #scenario condition for a container element.'() { - when: 'a query is executed to get a data node by the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) - then: 'book titles from the retrieved data nodes are as expected' - def bookTitles = result.collect { it.getLeaves().get('title') } - assert bookTitles.sort() == expectedBookTitles.sort() - where: 'the following data is used' - scenario | cpsPath || expectedBookTitles - 'one leaf' | '//books[@price=14]' || ['The Light Fantastic'] - 'one leaf with ">" condition' | '//books[@price>14]' || ['A Book with No Language', 'Annihilation', 'Debian GNU/Linux', 'Matilda', 'The Gruffalo'] - 'one text' | '//books/authors[text()="Terry Pratchett"]' || ['Good Omens', 'The Colour of Magic', 'The Light Fantastic'] - 'more than one leaf' | '//books[@price=12 and @lang="English"]' || ['The Colour of Magic'] - 'more than one leaf has "OR" condition' | '//books[@lang="English" or @price=15]' || ['Annihilation', 'Good Omens', 'Matilda', 'The Colour of Magic', 'The Gruffalo', 'The Light Fantastic'] - 'more than one leaf has "OR" condition with non-json data' | '//books[@title="xyz" or @price=13]' || ['Good Omens'] - 'more than one leaf has multiple AND' | '//books[@lang="English" and @price=13 and @edition=1983]' || [] - 'more than one leaf has multiple OR' | '//books[ @title="Matilda" or @price=15 or @edition=2006]' || ['Annihilation', 'Matilda', 'The Gruffalo'] - 'leaves reversed in order' | '//books[@lang="English" and @price=12]' || ['The Colour of Magic'] - 'more than one leaf has combination of AND/OR' | '//books[@edition=1983 and @price=13 or @title="Good Omens"]' || ['Good Omens'] - 'more than one leaf has OR/AND' | '//books[@title="The Light Fantastic" or @price=11 and @edition=1983]' || ['The Light Fantastic'] - 'leaf and text' | '//books[@price=14]/authors[text()="Terry Pratchett"]' || ['The Light Fantastic'] - 'leaf and contains' | '//books[contains(@price,"13")]' || ['Good Omens'] - } - - def 'Cps Path query using descendant anywhere with #scenario condition(s) for a list element.'() { - when: 'a query is executed to get a data node by the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, INCLUDE_ALL_DESCENDANTS) - then: 'xpaths of the retrieved data nodes are as expected' - result.xpath.toList() == ["/bookstore/premises/addresses[@house-number='2' and @street='Main Street']"] - where: 'the following data is used' - scenario | cpsPath - 'full composite key' | '//addresses[@house-number=2 and @street="Main Street"]' - 'one partial key leaf' | '//addresses[@house-number=2]' - 'one non key leaf' | '//addresses[@county="Kildare"]' - 'mix of partial key and non key leaf' | '//addresses[@street="Main Street" and @county="Kildare"]' - } - - def 'Query for attribute by cps path of type ancestor with #scenario.'() { - when: 'the given cps path is parsed' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) - then: 'the xpaths of the retrieved data nodes are as expected' - assert result.xpath.sort() == expectedXPaths.sort() - where: 'the following data is used' - scenario | cpsPath || expectedXPaths - 'multiple list-ancestors' | '//books/ancestor::categories' || ["/bookstore/categories[@code='1']", "/bookstore/categories[@code='2']", "/bookstore/categories[@code='3']", "/bookstore/categories[@code='4']", "/bookstore/categories[@code='5']"] - 'one ancestor with list value' | '//books/ancestor::categories[@code="1"]' || ["/bookstore/categories[@code='1']"] - 'top ancestor' | '//books/ancestor::bookstore' || ["/bookstore"] - 'list with index value in the xpath prefix' | '//categories[@code="1"]/books/ancestor::bookstore' || ["/bookstore"] - 'ancestor with parent list' | '//books/ancestor::bookstore/categories' || ["/bookstore/categories[@code='1']", "/bookstore/categories[@code='2']", "/bookstore/categories[@code='3']", "/bookstore/categories[@code='4']", "/bookstore/categories[@code='5']"] - 'ancestor with parent' | '//books/ancestor::bookstore/categories[@code="2"]' || ["/bookstore/categories[@code='2']"] - 'ancestor combined with text condition' | '//books/title[text()="Matilda"]/ancestor::bookstore' || ["/bookstore"] - 'ancestor with parent that does not exist' | '//books/ancestor::parentDoesNoExist/categories' || [] - 'ancestor does not exist' | '//books/ancestor::ancestorDoesNotExist' || [] - 'ancestor combined with contains condition' | '//books[contains(@title,"Mat")]/ancestor::bookstore' || ["/bookstore"] - } - - def 'Query for attribute by cps path of type ancestor with #scenario descendants.'() { - when: 'the given cps path is parsed' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '//books/ancestor::bookstore', fetchDescendantsOption) - then: 'the xpaths of the retrieved data nodes are as expected' - assert countDataNodesInTree(result) == expectedNumberOfNodes - where: 'the following data is used' - scenario | fetchDescendantsOption || expectedNumberOfNodes - 'no' | OMIT_DESCENDANTS || 1 - 'direct' | DIRECT_CHILDREN_ONLY || 7 - 'all' | INCLUDE_ALL_DESCENDANTS || 28 - } - - def 'Cps Path query with #scenario throws a CPS Path Exception.'() { - when: 'trying to execute a query with a syntax (parsing) error' - objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) - then: 'a cps path exception is thrown' - thrown(CpsPathException) - where: 'the following data is used' - scenario | cpsPath - 'cpsPath that cannot be parsed' | 'cpsPath that cannot be parsed' - 'String with comparative operator' | '//books[@lang>"German" and @price>10]' - } - - def 'Cps Path query across anchors with #scenario.'() { - when: 'a query is executed to get a data nodes across anchors by the given CpsPath' - def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, cpsPath, OMIT_DESCENDANTS, NO_PAGINATION) - then: 'the correct dataspace is queried' - assert result.dataspace.toSet() == [FUNCTIONAL_TEST_DATASPACE_1].toSet() - and: 'correct anchors are queried' - assert result.anchorName.toSet() == [BOOKSTORE_ANCHOR_1, BOOKSTORE_ANCHOR_2].toSet() - and: 'the correct number of nodes is returned' - assert result.size() == expectedXpathsPerAnchor.size() * NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA - and: 'the queried nodes have expected xpaths' - assert result.xpath.toSet() == expectedXpathsPerAnchor.toSet() - where: 'the following data is used' - scenario | cpsPath || expectedXpathsPerAnchor - 'container node' | '/bookstore' || ["/bookstore"] - 'list node' | '/bookstore/categories' || ["/bookstore/categories[@code='1']", "/bookstore/categories[@code='2']", "/bookstore/categories[@code='3']", "/bookstore/categories[@code='4']", "/bookstore/categories[@code='5']"] - 'integer leaf-condition' | '/bookstore/categories[@code="1"]/books[@price=15]' || ["/bookstore/categories[@code='1']/books[@title='The Gruffalo']"] - 'multiple list-ancestors' | '//books/ancestor::categories' || ["/bookstore/categories[@code='1']", "/bookstore/categories[@code='2']", "/bookstore/categories[@code='3']", "/bookstore/categories[@code='4']", "/bookstore/categories[@code='5']"] - 'one ancestor with list value' | '//books/ancestor::categories[@code="1"]' || ["/bookstore/categories[@code='1']"] - 'list with index value in the xpath prefix' | '//categories[@code="1"]/books/ancestor::bookstore' || ["/bookstore"] - 'ancestor with parent list' | '//books/ancestor::bookstore/categories' || ["/bookstore/categories[@code='1']", "/bookstore/categories[@code='2']", "/bookstore/categories[@code='3']", "/bookstore/categories[@code='4']", "/bookstore/categories[@code='5']"] - 'ancestor with parent list element' | '//books/ancestor::bookstore/categories[@code="2"]' || ["/bookstore/categories[@code='2']"] - 'ancestor combined with text condition' | '//books/title[text()="Matilda"]/ancestor::bookstore' || ["/bookstore"] - 'ancestor same as target type' | '//books/title[text()="Matilda"]/ancestor::books' || ["/bookstore/categories[@code='1']/books[@title='Matilda']"] - } - - def 'Cps Path query across anchors with #scenario descendants.'() { - when: 'a query is executed to get a data node by the given cps path' - def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, '/bookstore', fetchDescendantsOption, NO_PAGINATION) - then: 'the correct dataspace was queried' - assert result.dataspace.toSet() == [FUNCTIONAL_TEST_DATASPACE_1].toSet() - and: 'correct number of datanodes are returned' - assert countDataNodesInTree(result) == expectedNumberOfNodesPerAnchor * NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA - where: 'the following data is used' - scenario | fetchDescendantsOption || expectedNumberOfNodesPerAnchor - 'no' | OMIT_DESCENDANTS || 1 - 'direct' | DIRECT_CHILDREN_ONLY || 7 - 'all' | INCLUDE_ALL_DESCENDANTS || 28 - } - - def 'Cps Path query across anchors with ancestors and #scenario descendants.'() { - when: 'a query is executed to get a data node by the given cps path' - def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, '//books/ancestor::bookstore', fetchDescendantsOption, NO_PAGINATION) - then: 'the correct dataspace was queried' - assert result.dataspace.toSet() == [FUNCTIONAL_TEST_DATASPACE_1].toSet() - and: 'correct number of datanodes are returned' - assert countDataNodesInTree(result) == expectedNumberOfNodesPerAnchor * NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA - where: 'the following data is used' - scenario | fetchDescendantsOption || expectedNumberOfNodesPerAnchor - 'no' | OMIT_DESCENDANTS || 1 - 'direct' | DIRECT_CHILDREN_ONLY || 7 - 'all' | INCLUDE_ALL_DESCENDANTS || 28 - } - - def 'Cps Path query across anchors with syntax error throws a CPS Path Exception.'() { - when: 'trying to execute a query with a syntax (parsing) error' - objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, 'cpsPath that cannot be parsed' , OMIT_DESCENDANTS, NO_PAGINATION) - then: 'a cps path exception is thrown' - thrown(CpsPathException) - } - - def 'Cps Path querys with all descendants including descendants that are list entries: #scenario.'() { - when: 'a query is executed to get a data node by the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, INCLUDE_ALL_DESCENDANTS) - then: 'correct number of datanodes are returned' - assert countDataNodesInTree(result) == expectedNumberOfDataNodes - where: - scenario | cpsPath || expectedNumberOfDataNodes - 'absolute path all list entries' | '/bookstore/categories' || 24 - 'absolute path 1 list entry by key' | '/bookstore/categories[@code="3"]' || 5 - 'absolute path 1 list entry by name' | '/bookstore/categories[@name="Comedy"]' || 5 - 'relative path all list entries' | '//categories' || 24 - 'relative path 1 list entry by key' | '//categories[@code="3"]' || 5 - 'relative path 1 list entry by leaf' | '//categories[@name="Comedy"]' || 5 - 'incomplete absolute path' | '/categories' || 0 - 'incomplete absolute 1 list entry' | '/categories[@code="3"]' || 0 - } - - def 'Cps Path query contains #wildcard.'() { - when: 'a query is executed with a wildcard in the given cps path' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, INCLUDE_ALL_DESCENDANTS) - then: 'no results are returned, as Cps Path query does not interpret wildcard characters' - assert result.isEmpty() - where: - wildcard | cpsPath - ' sql wildcard in parent path list index' | '/bookstore/categories[@code="%"]/books' - 'regex wildcard in parent path list index' | '/bookstore/categories[@code=".*"]/books' - ' sql wildcard in leaf-condition' | '/bookstore/categories[@code="1"]/books[@title="%"]' - 'regex wildcard in leaf-condition' | '/bookstore/categories[@code="1"]/books[@title=".*"]' - ' sql wildcard in text-condition' | '/bookstore/categories[@code="1"]/books/title[text()="%"]' - 'regex wildcard in text-condition' | '/bookstore/categories[@code="1"]/books/title[text()=".*"]' - ' sql wildcard in contains-condition' | '/bookstore/categories[@code="1"]/books[contains(@title, "%")]' - 'regex wildcard in contains-condition' | '/bookstore/categories[@code="1"]/books[contains(@title, ".*")]' - } - - def 'Cps Path query can return a data node containing [@ in xpath #scenario.'() { - given: 'a book with special characters [@ and ] in title' - cpsDataService.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/categories[@code='1']", '{"books": [ {"title":"[@hello=world]"} ] }', OffsetDateTime.now()) - when: 'a query is executed' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) - then: 'the node is returned' - assert result.size() == 1 - cleanup: 'the new datanode' - cpsDataService.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/categories[@code='1']/books[@title='[@hello=world]']", OffsetDateTime.now()) - where: - scenario || cpsPath - 'leaf-condition' || "/bookstore/categories[@code='1']/books[@title='[@hello=world]']" - 'text-condition' || "/bookstore/categories[@code='1']/books/title[text()='[@hello=world]']" - 'contains-condition' || "/bookstore/categories[@code='1']/books[contains(@title, '[@hello=world]')]" - } - - def 'Cps Path get and query can handle apostrophe inside #quotes.'() { - given: 'a book with special characters in title' - cpsDataService.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/categories[@code='1']", - '{"books": [ {"title":"I\'m escaping"} ] }', OffsetDateTime.now()) - when: 'a query is executed' - def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) - then: 'the node is returned' - assert result.size() == 1 - assert result[0].xpath == "/bookstore/categories[@code='1']/books[@title='I''m escaping']" - cleanup: 'the new datanode' - cpsDataService.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/categories[@code='1']/books[@title='I''m escaping']", OffsetDateTime.now()) - where: - quotes || cpsPath - 'single quotes' || "/bookstore/categories[@code='1']/books[@title='I''m escaping']" - 'double quotes' || '/bookstore/categories[@code="1"]/books[@title="I\'m escaping"]' - 'text-condition' || "/bookstore/categories[@code='1']/books/title[text()='I''m escaping']" - 'contains-condition' || "/bookstore/categories[@code='1']/books[contains(@title, 'I''m escaping')]" - } - - def 'Cps Path query across anchors using pagination option with #scenario.'() { - when: 'a query is executed to get a data nodes across anchors by the given CpsPath and pagination option' - def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, '/bookstore', OMIT_DESCENDANTS, new PaginationOption(pageIndex, pageSize)) - then: 'correct bookstore names are queried' - def bookstoreNames = result.collect { it.getLeaves().get('bookstore-name') } - assert bookstoreNames.toList() == expectedBookstoreNames - and: 'the correct number of page size is returned' - assert result.size() == expectedPageSize - and: 'the queried nodes have expected anchor names' - assert result.anchorName.toSet() == expectedAnchors.toSet() - where: 'the following data is used' - scenario | pageIndex | pageSize || expectedPageSize || expectedAnchors || expectedBookstoreNames - '1st page with one anchor' | 1 | 1 || 1 || [BOOKSTORE_ANCHOR_1] || ['Easons-1'] - '1st page with two anchor' | 1 | 2 || 2 || [BOOKSTORE_ANCHOR_1, BOOKSTORE_ANCHOR_2] || ['Easons-1', 'Easons-2'] - '2nd page' | 2 | 1 || 1 || [BOOKSTORE_ANCHOR_2] || ['Easons-2'] - 'no 2nd page due to page size' | 2 | 2 || 0 || [] || [] - } - - def 'Cps Path query across anchors using pagination option for ancestor axis.'() { - when: 'a query is executed to get a data nodes across anchors by the given CpsPath and pagination option' - def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, '//books/ancestor::categories', INCLUDE_ALL_DESCENDANTS, new PaginationOption(1, 2)) - then: 'correct category codes are queried' - def categoryNames = result.collect { it.getLeaves().get('name') } - assert categoryNames.toSet() == ['Discount books', 'Computing', 'Comedy', 'Thriller', 'Children'].toSet() - and: 'the queried nodes have expected anchors' - assert result.anchorName.toSet() == [BOOKSTORE_ANCHOR_1, BOOKSTORE_ANCHOR_2].toSet() - } - - def 'Count number of anchors for given dataspace name and cps path'() { - expect: '/bookstore is present in two anchors' - assert objectUnderTest.countAnchorsForDataspaceAndCpsPath(FUNCTIONAL_TEST_DATASPACE_1, '/bookstore') == 2 - } - - def 'Cps Path query across anchors using no pagination'() { - when: 'a query is executed to get a data nodes across anchors by the given CpsPath and pagination option' - def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, '/bookstore', OMIT_DESCENDANTS, NO_PAGINATION) - then: 'all bookstore names are queried' - def bookstoreNames = result.collect { it.getLeaves().get('bookstore-name') } - assert bookstoreNames.toSet() == ['Easons-1', 'Easons-2'].toSet() - and: 'the correct number of page size is returned' - assert result.size() == 2 - and: 'the queried nodes have expected bookstore names' - assert result.anchorName.toSet() == [BOOKSTORE_ANCHOR_1, BOOKSTORE_ANCHOR_2].toSet() - } -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpBearerTokenPassthroughSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpBearerTokenPassthroughSpec.groovy deleted file mode 100644 index 664fca82e5..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpBearerTokenPassthroughSpec.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import org.onap.cps.integration.base.CpsIntegrationSpecBase -import org.springframework.http.HttpHeaders -import org.springframework.http.MediaType -import spock.util.concurrent.PollingConditions - -import static org.springframework.http.HttpMethod.DELETE -import static org.springframework.http.HttpMethod.GET -import static org.springframework.http.HttpMethod.PATCH -import static org.springframework.http.HttpMethod.POST -import static org.springframework.http.HttpMethod.PUT -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.request -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status - -class NcmpBearerTokenPassthroughSpec extends CpsIntegrationSpecBase { - - def setup() { - dmiDispatcher.moduleNamesPerCmHandleId['ch-1'] = ['M1', 'M2'] - registerCmHandle(DMI_URL, 'ch-1', NO_MODULE_SET_TAG) - } - - def cleanup() { - deregisterCmHandle(DMI_URL, 'ch-1') - } - - def 'Bearer token is passed from NCMP to DMI in pass-through data operations.'() { - when: 'a pass-through data request is sent to NCMP with a bearer token' - mvc.perform(request(httpMethod, '/ncmp/v1/ch/ch-1/data/ds/ncmp-datastore:passthrough-running') - .queryParam('resourceIdentifier', 'my-resource-id') - .contentType(MediaType.APPLICATION_JSON) - .content('{ "some-json": "data" }') - .header(HttpHeaders.AUTHORIZATION, 'Bearer some-bearer-token')) - .andExpect(status().is2xxSuccessful()) - - then: 'DMI has received request with bearer token' - assert dmiDispatcher.lastAuthHeaderReceived == 'Bearer some-bearer-token' - - where: 'all HTTP operations are applied' - httpMethod << [GET, POST, PUT, PATCH, DELETE] - } - - def 'Basic auth header is NOT passed from NCMP to DMI in pass-through data operations.'() { - when: 'a pass-through data request is sent to NCMP with basic authentication' - mvc.perform(request(httpMethod, '/ncmp/v1/ch/ch-1/data/ds/ncmp-datastore:passthrough-running') - .queryParam('resourceIdentifier', 'my-resource-id') - .contentType(MediaType.APPLICATION_JSON) - .content('{ "some-json": "data" }') - .header(HttpHeaders.AUTHORIZATION, 'Basic Y3BzdXNlcjpjcHNyMGNrcyE=')) - .andExpect(status().is2xxSuccessful()) - - then: 'DMI has received request with no authorization header' - assert dmiDispatcher.lastAuthHeaderReceived == null - - where: 'all HTTP operations are applied' - httpMethod << [GET, POST, PUT, PATCH, DELETE] - } - - def 'Bearer token is passed from NCMP to DMI in async batch pass-through data operation.'() { - when: 'a pass-through async data request is sent to NCMP with a bearer token' - def requestBody = """{"operations": [{ - "operation": "read", - "operationId": "operational-1", - "datastore": "ncmp-datastore:passthrough-running", - "resourceIdentifier": "my-resource-id", - "targetIds": ["ch-1"] - }]}""" - mvc.perform(request(POST, '/ncmp/v1/data') - .queryParam('topic', 'my-topic') - .contentType(MediaType.APPLICATION_JSON) - .content(requestBody) - .header(HttpHeaders.AUTHORIZATION, 'Bearer some-bearer-token')) - .andExpect(status().is2xxSuccessful()) - - then: 'DMI will receive the async request with bearer token' - new PollingConditions().within(3, () -> { - assert dmiDispatcher.lastAuthHeaderReceived == 'Bearer some-bearer-token' - }) - } - -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmHandleCreateSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmHandleCreateSpec.groovy deleted file mode 100644 index 26782708a5..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmHandleCreateSpec.groovy +++ /dev/null @@ -1,178 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import org.apache.kafka.common.TopicPartition -import org.apache.kafka.common.serialization.StringDeserializer -import org.onap.cps.integration.KafkaTestContainer -import org.onap.cps.integration.base.CpsIntegrationSpecBase -import org.onap.cps.ncmp.api.inventory.NetworkCmProxyInventoryFacade -import org.onap.cps.ncmp.api.inventory.models.CmHandleRegistrationResponse -import org.onap.cps.ncmp.api.inventory.models.DmiPluginRegistration -import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle -import org.onap.cps.ncmp.events.lcm.v1.LcmEvent -import org.onap.cps.ncmp.impl.inventory.models.CmHandleState -import org.onap.cps.ncmp.impl.inventory.models.LockReasonCategory -import spock.util.concurrent.PollingConditions - -import java.time.Duration -import java.time.OffsetDateTime - -class NcmpCmHandleCreateSpec extends CpsIntegrationSpecBase { - - NetworkCmProxyInventoryFacade objectUnderTest - - def kafkaConsumer = KafkaTestContainer.getConsumer('ncmp-group', StringDeserializer.class) - - def setup() { - objectUnderTest = networkCmProxyInventoryFacade - } - - def 'CM Handle registration is successful.'() { - given: 'DMI will return modules when requested' - dmiDispatcher.moduleNamesPerCmHandleId['ch-1'] = ['M1', 'M2'] - - and: 'consumer subscribed to topic' - kafkaConsumer.subscribe(['ncmp-events']) - - when: 'a CM-handle is registered for creation' - def cmHandleToCreate = new NcmpServiceCmHandle(cmHandleId: 'ch-1') - def dmiPluginRegistration = new DmiPluginRegistration(dmiPlugin: DMI_URL, createdCmHandles: [cmHandleToCreate]) - def dmiPluginRegistrationResponse = objectUnderTest.updateDmiRegistrationAndSyncModule(dmiPluginRegistration) - - then: 'registration gives successful response' - assert dmiPluginRegistrationResponse.createdCmHandles == [CmHandleRegistrationResponse.createSuccessResponse('ch-1')] - - and: 'CM-handle is initially in ADVISED state' - assert CmHandleState.ADVISED == objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState - - and: 'CM-handle goes to READY state after module sync' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState - }) - - and: 'the messages is polled' - def message = kafkaConsumer.poll(Duration.ofMillis(10000)) - def records = message.records(new TopicPartition('ncmp-events', 0)) - - and: 'the newest lcm event notification is received with READY state' - def notificationMessage = jsonObjectMapper.convertJsonString(records.last().value().toString(), LcmEvent) - assert notificationMessage.event.newValues.cmHandleState.value() == 'READY' - - and: 'the CM-handle has expected modules' - assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences('ch-1').moduleName.sort() - - cleanup: 'deregister CM handle' - deregisterCmHandle(DMI_URL, 'ch-1') - } - - def 'CM Handle goes to LOCKED state when DMI gives error during module sync.'() { - given: 'DMI is not available to handle requests' - dmiDispatcher.isAvailable = false - - when: 'a CM-handle is registered for creation' - def cmHandleToCreate = new NcmpServiceCmHandle(cmHandleId: 'ch-1') - def dmiPluginRegistration = new DmiPluginRegistration(dmiPlugin: DMI_URL, createdCmHandles: [cmHandleToCreate]) - objectUnderTest.updateDmiRegistrationAndSyncModule(dmiPluginRegistration) - - then: 'CM-handle goes to LOCKED state with reason MODULE_SYNC_FAILED' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - def cmHandleCompositeState = objectUnderTest.getCmHandleCompositeState('ch-1') - assert cmHandleCompositeState.cmHandleState == CmHandleState.LOCKED - assert cmHandleCompositeState.lockReason.lockReasonCategory == LockReasonCategory.MODULE_SYNC_FAILED - }) - - and: 'CM-handle has no modules' - assert objectUnderTest.getYangResourcesModuleReferences('ch-1').empty - - cleanup: 'deregister CM handle' - deregisterCmHandle(DMI_URL, 'ch-1') - } - - def 'Create a CM-handle with existing moduleSetTag.'() { - given: 'DMI will return modules when requested' - dmiDispatcher.moduleNamesPerCmHandleId = ['ch-1': ['M1', 'M2'], 'ch-2': ['M1', 'M3']] - and: 'existing CM-handles cm-1 with moduleSetTag "A", and cm-2 with moduleSetTag "B"' - registerCmHandle(DMI_URL, 'ch-1', 'A') - registerCmHandle(DMI_URL, 'ch-2', 'B') - - when: 'a CM-handle is registered for creation with moduleSetTag "B"' - def cmHandleToCreate = new NcmpServiceCmHandle(cmHandleId: 'ch-3', moduleSetTag: 'B') - objectUnderTest.updateDmiRegistrationAndSyncModule(new DmiPluginRegistration(dmiPlugin: DMI_URL, createdCmHandles: [cmHandleToCreate])) - - then: 'the CM-handle goes to READY state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState('ch-3').cmHandleState - }) - - and: 'the CM-handle has expected moduleSetTag' - assert objectUnderTest.getNcmpServiceCmHandle('ch-3').moduleSetTag == 'B' - - and: 'the CM-handle has expected modules from module set "B": M1 and M3' - assert ['M1', 'M3'] == objectUnderTest.getYangResourcesModuleReferences('ch-3').moduleName.sort() - - cleanup: 'deregister CM handles' - deregisterCmHandles(DMI_URL, ['ch-1', 'ch-2', 'ch-3']) - } - - def 'CM Handle retry after failed module sync.'() { - given: 'DMI is not initially available to handle requests' - dmiDispatcher.isAvailable = false - - when: 'CM-handles are registered for creation' - def cmHandlesToCreate = [new NcmpServiceCmHandle(cmHandleId: 'ch-1'), new NcmpServiceCmHandle(cmHandleId: 'ch-2')] - def dmiPluginRegistration = new DmiPluginRegistration(dmiPlugin: DMI_URL, createdCmHandles: cmHandlesToCreate) - objectUnderTest.updateDmiRegistrationAndSyncModule(dmiPluginRegistration) - then: 'CM-handles go to LOCKED state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState == CmHandleState.LOCKED - assert objectUnderTest.getCmHandleCompositeState('ch-2').cmHandleState == CmHandleState.LOCKED - }) - - when: 'we wait for LOCKED CM handle retry time (actually just subtract 3 minutes from handles lastUpdateTime)' - overrideCmHandleLastUpdateTime('ch-1', OffsetDateTime.now().minusMinutes(3)) - overrideCmHandleLastUpdateTime('ch-2', OffsetDateTime.now().minusMinutes(3)) - then: 'CM-handles go to ADVISED state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState == CmHandleState.ADVISED - assert objectUnderTest.getCmHandleCompositeState('ch-2').cmHandleState == CmHandleState.ADVISED - }) - - when: 'DMI will return expected modules' - dmiDispatcher.moduleNamesPerCmHandleId = ['ch-1': ['M1', 'M2'], 'ch-2': ['M1', 'M3']] - and: 'DMI is available for retry' - dmiDispatcher.isAvailable = true - then: 'CM-handles go to READY state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState == CmHandleState.READY - assert objectUnderTest.getCmHandleCompositeState('ch-2').cmHandleState == CmHandleState.READY - }) - and: 'CM-handles have expected modules' - assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences('ch-1').moduleName.sort() - assert ['M1', 'M3'] == objectUnderTest.getYangResourcesModuleReferences('ch-2').moduleName.sort() - and: 'CM-handles have expected module set tags (blank)' - assert objectUnderTest.getNcmpServiceCmHandle('ch-1').moduleSetTag == '' - assert objectUnderTest.getNcmpServiceCmHandle('ch-2').moduleSetTag == '' - - cleanup: 'deregister CM handle' - deregisterCmHandles(DMI_URL, ['ch-1', 'ch-2']) - } -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmHandleUpgradeSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmHandleUpgradeSpec.groovy deleted file mode 100644 index 3a08cfd0c6..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmHandleUpgradeSpec.groovy +++ /dev/null @@ -1,177 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import org.onap.cps.integration.base.CpsIntegrationSpecBase -import org.onap.cps.ncmp.api.inventory.NetworkCmProxyInventoryFacade -import org.onap.cps.ncmp.api.inventory.models.CmHandleRegistrationResponse -import org.onap.cps.ncmp.api.inventory.models.DmiPluginRegistration -import org.onap.cps.ncmp.api.inventory.models.UpgradedCmHandles -import org.onap.cps.ncmp.impl.inventory.models.CmHandleState -import org.onap.cps.ncmp.impl.inventory.models.LockReasonCategory -import spock.util.concurrent.PollingConditions - -class NcmpCmHandleUpgradeSpec extends CpsIntegrationSpecBase { - - NetworkCmProxyInventoryFacade objectUnderTest - - static final CM_HANDLE_ID = 'ch-1' - static final CM_HANDLE_ID_WITH_EXISTING_MODULE_SET_TAG = 'ch-2' - - def setup() { - objectUnderTest = networkCmProxyInventoryFacade - } - - def 'Upgrade CM-handle with new moduleSetTag or no moduleSetTag.'() { - given: 'a CM-handle is created with expected initial modules: M1 and M2' - dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID] = ['M1', 'M2'] - registerCmHandle(DMI_URL, CM_HANDLE_ID, initialModuleSetTag) - assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() - - when: "the CM-handle is upgraded with given moduleSetTag '${updatedModuleSetTag}'" - def cmHandlesToUpgrade = new UpgradedCmHandles(cmHandles: [CM_HANDLE_ID], moduleSetTag: updatedModuleSetTag) - def dmiPluginRegistrationResponse = objectUnderTest.updateDmiRegistrationAndSyncModule( - new DmiPluginRegistration(dmiPlugin: DMI_URL, upgradedCmHandles: cmHandlesToUpgrade)) - - then: 'registration gives successful response' - assert dmiPluginRegistrationResponse.upgradedCmHandles == [CmHandleRegistrationResponse.createSuccessResponse(CM_HANDLE_ID)] - - and: 'CM-handle is in LOCKED state due to MODULE_UPGRADE' - def cmHandleCompositeState = objectUnderTest.getCmHandleCompositeState(CM_HANDLE_ID) - assert cmHandleCompositeState.cmHandleState == CmHandleState.LOCKED - assert cmHandleCompositeState.lockReason.lockReasonCategory == LockReasonCategory.MODULE_UPGRADE - assert cmHandleCompositeState.lockReason.details == "Upgrade to ModuleSetTag: ${updatedModuleSetTag}" - - when: 'DMI will return different modules for upgrade: M1 and M3' - dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID] = ['M1', 'M3'] - - then: 'CM-handle goes to READY state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(CM_HANDLE_ID).cmHandleState - }) - - and: 'the CM-handle has expected moduleSetTag' - assert objectUnderTest.getNcmpServiceCmHandle(CM_HANDLE_ID).moduleSetTag == updatedModuleSetTag - - and: 'CM-handle has expected updated modules: M1 and M3' - assert ['M1', 'M3'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() - - cleanup: 'deregister CM-handle' - deregisterCmHandle(DMI_URL, CM_HANDLE_ID) - - where: - initialModuleSetTag | updatedModuleSetTag - NO_MODULE_SET_TAG | NO_MODULE_SET_TAG - NO_MODULE_SET_TAG | 'new' - 'initial' | NO_MODULE_SET_TAG - 'initial' | 'new' - } - - def 'Upgrade CM-handle with existing moduleSetTag.'() { - given: 'DMI will return modules for registration' - dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID] = ['M1', 'M2'] - dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID_WITH_EXISTING_MODULE_SET_TAG] = ['M1', 'M3'] - and: "an existing CM-handle handle with moduleSetTag '${updatedModuleSetTag}'" - registerCmHandle(DMI_URL, CM_HANDLE_ID_WITH_EXISTING_MODULE_SET_TAG, updatedModuleSetTag) - assert ['M1', 'M3'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID_WITH_EXISTING_MODULE_SET_TAG).moduleName.sort() - and: "a CM-handle with moduleSetTag '${initialModuleSetTag}' which will be upgraded" - registerCmHandle(DMI_URL, CM_HANDLE_ID, initialModuleSetTag) - assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() - - when: "CM-handle is upgraded to moduleSetTag '${updatedModuleSetTag}'" - def cmHandlesToUpgrade = new UpgradedCmHandles(cmHandles: [CM_HANDLE_ID], moduleSetTag: updatedModuleSetTag) - def dmiPluginRegistrationResponse = objectUnderTest.updateDmiRegistrationAndSyncModule( - new DmiPluginRegistration(dmiPlugin: DMI_URL, upgradedCmHandles: cmHandlesToUpgrade)) - - then: 'registration gives successful response' - assert dmiPluginRegistrationResponse.upgradedCmHandles == [CmHandleRegistrationResponse.createSuccessResponse(CM_HANDLE_ID)] - - and: 'CM-handle goes to READY state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(CM_HANDLE_ID).cmHandleState - }) - - and: 'the CM-handle has expected moduleSetTag' - assert objectUnderTest.getNcmpServiceCmHandle(CM_HANDLE_ID).moduleSetTag == updatedModuleSetTag - - and: 'CM-handle has expected updated modules: M1 and M3' - assert ['M1', 'M3'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() - - cleanup: 'deregister CM-handle' - deregisterCmHandles(DMI_URL, [CM_HANDLE_ID, CM_HANDLE_ID_WITH_EXISTING_MODULE_SET_TAG]) - - where: - initialModuleSetTag | updatedModuleSetTag - NO_MODULE_SET_TAG | 'moduleSet2' - 'moduleSet1' | 'moduleSet2' - } - - def 'Skip upgrade of CM-handle with same moduleSetTag as before.'() { - given: 'an existing CM-handle with expected initial modules: M1 and M2' - dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID] = ['M1', 'M2'] - registerCmHandle(DMI_URL, CM_HANDLE_ID, 'same') - assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() - - when: 'CM-handle is upgraded with the same moduleSetTag' - def cmHandlesToUpgrade = new UpgradedCmHandles(cmHandles: [CM_HANDLE_ID], moduleSetTag: 'same') - objectUnderTest.updateDmiRegistrationAndSyncModule( - new DmiPluginRegistration(dmiPlugin: DMI_URL, upgradedCmHandles: cmHandlesToUpgrade)) - - then: 'CM-handle remains in READY state' - assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(CM_HANDLE_ID).cmHandleState - - and: 'the CM-handle has same moduleSetTag as before' - assert objectUnderTest.getNcmpServiceCmHandle(CM_HANDLE_ID).moduleSetTag == 'same' - - then: 'CM-handle has same modules as before: M1 and M2' - assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() - - cleanup: 'deregister CM-handle' - deregisterCmHandle(DMI_URL, CM_HANDLE_ID) - } - - def 'Upgrade of CM-handle fails due to DMI error.'() { - given: 'a CM-handle exists' - dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID] = ['M1', 'M2'] - registerCmHandle(DMI_URL, CM_HANDLE_ID, 'oldTag') - and: 'DMI is not available for upgrade' - dmiDispatcher.isAvailable = false - - when: 'the CM-handle is upgraded' - def cmHandlesToUpgrade = new UpgradedCmHandles(cmHandles: [CM_HANDLE_ID], moduleSetTag: 'newTag') - objectUnderTest.updateDmiRegistrationAndSyncModule( - new DmiPluginRegistration(dmiPlugin: DMI_URL, upgradedCmHandles: cmHandlesToUpgrade)) - - then: 'CM-handle goes to LOCKED state with reason MODULE_UPGRADE_FAILED' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - def cmHandleCompositeState = objectUnderTest.getCmHandleCompositeState(CM_HANDLE_ID) - assert cmHandleCompositeState.cmHandleState == CmHandleState.LOCKED - assert cmHandleCompositeState.lockReason.lockReasonCategory == LockReasonCategory.MODULE_UPGRADE_FAILED - }) - - and: 'the CM-handle has same moduleSetTag as before' - assert objectUnderTest.getNcmpServiceCmHandle(CM_HANDLE_ID).moduleSetTag == 'oldTag' - - cleanup: 'deregister CM-handle' - deregisterCmHandle(DMI_URL, CM_HANDLE_ID) - } - -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmNotificationSubscriptionSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmNotificationSubscriptionSpec.groovy deleted file mode 100644 index 49a4b4d60b..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpCmNotificationSubscriptionSpec.groovy +++ /dev/null @@ -1,112 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import org.onap.cps.integration.base.CpsIntegrationSpecBase -import org.onap.cps.ncmp.impl.cmnotificationsubscription.utils.CmSubscriptionPersistenceService -import org.springframework.beans.factory.annotation.Autowired - -import static org.onap.cps.ncmp.api.data.models.DatastoreType.PASSTHROUGH_RUNNING - -class NcmpCmNotificationSubscriptionSpec extends CpsIntegrationSpecBase { - - @Autowired - CmSubscriptionPersistenceService cmSubscriptionPersistenceService - - def 'Adding a new cm notification subscription'() { - given: 'there is no ongoing cm subscription for the following' - def datastoreType = PASSTHROUGH_RUNNING - def cmHandleId = 'ch-1' - def xpath = '/x/y' - assert cmSubscriptionPersistenceService. - getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath).size() == 0 - when: 'we add a new cm notification subscription' - cmSubscriptionPersistenceService.addCmSubscription(datastoreType, cmHandleId, xpath, - 'subId-1') - then: 'there is an ongoing cm subscription for that CM handle and xpath' - assert cmSubscriptionPersistenceService.isOngoingCmSubscription(datastoreType, cmHandleId, xpath) - and: 'only one subscription id is related to now ongoing cm subscription' - assert cmSubscriptionPersistenceService.getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath).size() == 1 - } - - def 'Adding a cm notification subscription to the already existing cm handle but non existing xpath'() { - given: 'an ongoing cm subscription with the following details' - def datastoreType = PASSTHROUGH_RUNNING - def cmHandleId = 'ch-1' - def existingXpath = '/x/y' - assert cmSubscriptionPersistenceService.isOngoingCmSubscription(datastoreType, cmHandleId, existingXpath) - and: 'a non existing cm subscription with same datastore name and cm handle but different xpath' - def nonExistingXpath = '/x2/y2' - assert !cmSubscriptionPersistenceService.isOngoingCmSubscription(datastoreType, cmHandleId, nonExistingXpath) - when: 'a new cm notification subscription is made for the existing cm handle and non existing xpath' - cmSubscriptionPersistenceService.addCmSubscription(datastoreType, cmHandleId, nonExistingXpath, - 'subId-2') - then: 'there is an ongoing cm subscription for that CM handle and xpath' - assert cmSubscriptionPersistenceService.isOngoingCmSubscription(datastoreType, cmHandleId, nonExistingXpath) - and: 'only one subscription id is related to now ongoing cm subscription' - assert cmSubscriptionPersistenceService.getOngoingCmSubscriptionIds(datastoreType, cmHandleId, nonExistingXpath).size() == 1 - } - - def 'Adding a cm notification subscription to the already existing cm handle and xpath'() { - given: 'an ongoing cm subscription with the following details' - def datastoreType = PASSTHROUGH_RUNNING - def cmHandleId = 'ch-1' - def xpath = '/x/y' - when: 'a new cm notification subscription is made for the SAME CM handle and xpath' - cmSubscriptionPersistenceService.addCmSubscription(datastoreType, cmHandleId, xpath, - 'subId-3') - then: 'it is added to the ongoing list of subscription ids' - def subscriptionIds = cmSubscriptionPersistenceService.getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath) - assert subscriptionIds.size() == 2 - and: 'both subscription ids exists for the CM handle and xpath' - assert subscriptionIds.contains("subId-1") && subscriptionIds.contains("subId-3") - } - - def 'Removing cm notification subscriber among other subscribers'() { - given: 'an ongoing cm subscription with the following details' - def datastoreType = PASSTHROUGH_RUNNING - def cmHandleId = 'ch-1' - def xpath = '/x/y' - and: 'the number of subscribers is as follows' - def originalNumberOfSubscribers = - cmSubscriptionPersistenceService.getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath).size() - when: 'a subscriber is removed' - cmSubscriptionPersistenceService.removeCmSubscription(datastoreType, cmHandleId, xpath, 'subId-3') - then: 'the number of subscribers is reduced by 1' - def updatedNumberOfSubscribers = cmSubscriptionPersistenceService.getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath).size() - assert updatedNumberOfSubscribers == originalNumberOfSubscribers - 1 - } - - def 'Removing the LAST cm notification subscriber for a given cm handle, datastore and xpath'() { - given: 'an ongoing cm subscription with the following details' - def datastoreType = PASSTHROUGH_RUNNING - def cmHandleId = 'ch-1' - def xpath = '/x/y' - and: 'there is only one subscriber' - assert cmSubscriptionPersistenceService - .getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath).size() == 1 - when: 'only subscriber is removed' - cmSubscriptionPersistenceService.removeCmSubscription(datastoreType, cmHandleId, xpath, 'subId-1') - then: 'there are no longer any subscriptions for the cm handle, datastore and xpath' - assert !cmSubscriptionPersistenceService.isOngoingCmSubscription(datastoreType, cmHandleId, xpath) - } - -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpRestApiSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpRestApiSpec.groovy deleted file mode 100644 index fbfebabd7f..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/NcmpRestApiSpec.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2024 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import static org.hamcrest.Matchers.containsInAnyOrder -import static org.hamcrest.Matchers.hasSize -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status - -import org.onap.cps.integration.base.CpsIntegrationSpecBase -import org.springframework.http.MediaType -import spock.util.concurrent.PollingConditions - -class NcmpRestApiSpec extends CpsIntegrationSpecBase { - - def 'Register CM Handles using REST API.'() { - given: 'DMI will return modules' - dmiDispatcher.moduleNamesPerCmHandleId = [ - 'ch-1': ['M1', 'M2'], - 'ch-2': ['M1', 'M2'], - 'ch-3': ['M1', 'M3'] - ] - when: 'a POST request is made to register the CM Handles' - def requestBody = '{"dmiPlugin":"'+DMI_URL+'","createdCmHandles":[{"cmHandle":"ch-1"},{"cmHandle":"ch-2"},{"cmHandle":"ch-3"}]}' - mvc.perform(post('/ncmpInventory/v1/ch').contentType(MediaType.APPLICATION_JSON).content(requestBody)) - .andExpect(status().is2xxSuccessful()) - then: 'CM-handles go to READY state' - new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { - (1..3).each { - mvc.perform(get('/ncmp/v1/ch/ch-'+it)) - .andExpect(status().isOk()) - .andExpect(jsonPath('$.state.cmHandleState').value('READY')) - } - }) - } - - def 'Search for CM Handles by module using REST API.'() { - given: 'a JSON request body containing search parameter' - def requestBodyWithModuleCondition = """{ - "cmHandleQueryParameters": [ - { - "conditionName": "hasAllModules", - "conditionParameters": [ {"moduleName": "%s"} ] - } - ] - }""".formatted(moduleName) - expect: "a search for module ${moduleName} returns expected CM handles" - mvc.perform(post('/ncmp/v1/ch/id-searches').contentType(MediaType.APPLICATION_JSON).content(requestBodyWithModuleCondition)) - .andExpect(status().is2xxSuccessful()) - .andExpect(jsonPath('$[*]', containsInAnyOrder(expectedCmHandles.toArray()))) - .andExpect(jsonPath('$', hasSize(expectedCmHandles.size()))); - where: - moduleName || expectedCmHandles - 'M1' || ['ch-1', 'ch-2', 'ch-3'] - 'M2' || ['ch-1', 'ch-2'] - 'M3' || ['ch-3'] - } - - def 'De-register CM handles using REST API.'() { - when: 'a POST request is made to deregister the CM Handle' - def requestBody = '{"dmiPlugin":"'+DMI_URL+'", "removedCmHandles": ["ch-1", "ch-2", "ch-3"]}' - mvc.perform(post('/ncmpInventory/v1/ch').contentType(MediaType.APPLICATION_JSON).content(requestBody)) - .andExpect(status().is2xxSuccessful()) - then: 'the CM handles are not found using GET' - (1..3).each { - mvc.perform(get('/ncmp/v1/ch/ch-'+it)).andExpect(status().is4xxClientError()) - } - } -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/SessionManagerIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/SessionManagerIntegrationSpec.groovy deleted file mode 100644 index e0a2602b23..0000000000 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/SessionManagerIntegrationSpec.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2022-2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.integration.functional - -import org.onap.cps.integration.base.FunctionalSpecBase -import org.onap.cps.spi.exceptions.SessionManagerException -import org.onap.cps.spi.utils.SessionManager - -class SessionManagerIntegrationSpec extends FunctionalSpecBase { - - SessionManager objectUnderTest - - def shortTimeoutForTesting = 300L - def sessionId - - def setup() { - objectUnderTest = sessionManager - sessionId = objectUnderTest.startSession() - } - - def cleanup(){ - objectUnderTest.closeSession(sessionId, objectUnderTest.WITH_COMMIT) - } - - def 'Lock anchor.'(){ - when: 'session tries to acquire anchor lock by passing anchor entity details' - objectUnderTest.lockAnchor(sessionId, FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, shortTimeoutForTesting) - then: 'no exception is thrown' - noExceptionThrown() - } - - def 'Attempt to lock anchor when another session is holding the lock.'(){ - given: 'another session that holds an anchor lock' - def otherSessionId = objectUnderTest.startSession() - objectUnderTest.lockAnchor(otherSessionId,FUNCTIONAL_TEST_DATASPACE_1,BOOKSTORE_ANCHOR_1,shortTimeoutForTesting) - when: 'a session tries to acquire the same anchor lock' - objectUnderTest.lockAnchor(sessionId,FUNCTIONAL_TEST_DATASPACE_1,BOOKSTORE_ANCHOR_1,shortTimeoutForTesting) - then: 'a session manager exception is thrown specifying operation reached timeout' - def thrown = thrown(SessionManagerException) - thrown.message.contains('Timeout') - then: 'when the other session holding the lock is closed, lock can finally be acquired' - objectUnderTest.closeSession(otherSessionId, objectUnderTest.WITH_COMMIT) - objectUnderTest.lockAnchor(sessionId,FUNCTIONAL_TEST_DATASPACE_1,BOOKSTORE_ANCHOR_1,shortTimeoutForTesting) - } - - def 'Lock anchor twice using the same session.'(){ - given: 'session that already holds an anchor lock' - objectUnderTest.lockAnchor(sessionId, FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, shortTimeoutForTesting) - when: 'same session tries to acquire same anchor lock' - objectUnderTest.lockAnchor(sessionId, FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, shortTimeoutForTesting) - then: 'no exception is thrown' - noExceptionThrown() - } - -} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/AnchorServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/AnchorServiceIntegrationSpec.groovy new file mode 100644 index 0000000000..240ff5114b --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/AnchorServiceIntegrationSpec.groovy @@ -0,0 +1,121 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023-2024 Nordix Foundation + * Modifications Copyright (C) 2024 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.cps + +import java.time.OffsetDateTime + +import org.onap.cps.api.CpsAnchorService +import org.onap.cps.integration.base.FunctionalSpecBase +import org.onap.cps.spi.FetchDescendantsOption +import org.onap.cps.spi.exceptions.AlreadyDefinedException +import org.onap.cps.spi.exceptions.AnchorNotFoundException +import org.onap.cps.utils.ContentType + +class AnchorServiceIntegrationSpec extends FunctionalSpecBase { + + CpsAnchorService objectUnderTest + + def setup() { objectUnderTest = cpsAnchorService } + + def 'Anchor CRUD operations.'() { + when: 'an anchor is created' + objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'newAnchor') + then: 'the anchor be read' + assert objectUnderTest.getAnchor(GENERAL_TEST_DATASPACE, 'newAnchor').name == 'newAnchor' + and: 'it can be deleted' + objectUnderTest.deleteAnchor(GENERAL_TEST_DATASPACE,'newAnchor') + then: 'the anchor no longer exists i.e. an exception is thrown if an attempt is made to retrieve it' + def thrown = null + try { + objectUnderTest.getAnchor(GENERAL_TEST_DATASPACE, 'newAnchor') + } catch(Exception exception) { + thrown = exception + } + assert thrown instanceof AnchorNotFoundException + } + + def 'Filtering multiple anchors.'() { + when: '2 anchors with bookstore schema set are created' + objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'anchor1') + objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'anchor2') + and: '1 anchor with "other" schema set is created' + createStandardBookStoreSchemaSet(GENERAL_TEST_DATASPACE, 'otherSchemaSet') + objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, 'otherSchemaSet', 'anchor3') + then: 'there are 3 anchors in the general test database' + assert objectUnderTest.getAnchors(GENERAL_TEST_DATASPACE).size() == 3 + and: 'there are 2 anchors associated with bookstore schema set' + assert objectUnderTest.getAnchorsBySchemaSetName(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET).size() == 2 + and: 'there is 1 anchor associated with other schema set' + assert objectUnderTest.getAnchorsBySchemaSetName(GENERAL_TEST_DATASPACE, 'otherSchemaSet').size() == 1 + } + + def 'Querying anchor(name)s (depends on previous test!).'() { + expect: 'there are now 3 anchors using the "stores" module (both schema sets use the same modules) ' + assert objectUnderTest.queryAnchorNames(GENERAL_TEST_DATASPACE, ['stores', 'bookstore-types']).size() == 3 + and: 'there are no anchors using both "stores" and a "unused-model"' + assert objectUnderTest.queryAnchorNames(GENERAL_TEST_DATASPACE, ['stores', 'unused-model']).size() == 0 + } + + def 'Duplicate anchors.'() { + given: 'an anchor is created' + objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'newAnchor') + when: 'attempt to create another anchor with the same name' + objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'newAnchor') + then: 'an exception is thrown that the anchor already is defined' + thrown(AlreadyDefinedException) + cleanup: + objectUnderTest.deleteAnchor(GENERAL_TEST_DATASPACE, 'newAnchor') + } + + def 'Query anchors without any known modules'() { + when: 'querying for anchors with #scenario' + def result = objectUnderTest.queryAnchorNames(GENERAL_TEST_DATASPACE, ['unknownModule']) + then: 'an empty result is returned (no error)' + assert result == [] + } + + def 'Update anchor schema set.'() { + when: 'a new schema set with tree yang model is created' + def newTreeYangModelAsString = readResourceDataFile('tree/new-test-tree.yang') + cpsModuleService.createSchemaSet(GENERAL_TEST_DATASPACE, 'newTreeSchemaSet', [tree: newTreeYangModelAsString]) + then: 'an anchor with new schema set is created' + objectUnderTest.createAnchor(GENERAL_TEST_DATASPACE, 'newTreeSchemaSet', 'anchor4') + and: 'the new tree datanode is saved' + def treeJsonData = readResourceDataFile('tree/new-test-tree.json') + cpsDataService.saveData(GENERAL_TEST_DATASPACE, 'anchor4', treeJsonData, OffsetDateTime.now()) + and: 'saved tree data node can be retrieved by its normalized xpath' + def branchName = cpsDataService.getDataNodes(GENERAL_TEST_DATASPACE, 'anchor4', "/test-tree/branch", FetchDescendantsOption.DIRECT_CHILDREN_ONLY)[0].leaves['name'] + assert branchName == 'left' + and: 'a another schema set with updated tree yang model is created' + def updatedTreeYangModelAsString = readResourceDataFile('tree/updated-test-tree.yang') + cpsModuleService.createSchemaSet(GENERAL_TEST_DATASPACE, 'anotherTreeSchemaSet', [tree: updatedTreeYangModelAsString]) + and: 'anchor4 schema set is updated with another schema set successfully' + objectUnderTest.updateAnchorSchemaSet(GENERAL_TEST_DATASPACE, 'anchor4', 'anotherTreeSchemaSet') + when: 'updated tree data node with new leaves' + def updatedTreeJsonData = readResourceDataFile('tree/updated-test-tree.json') + cpsDataService.updateNodeLeaves(GENERAL_TEST_DATASPACE, "anchor4", "/test-tree/branch[@name='left']", updatedTreeJsonData, OffsetDateTime.now(), ContentType.JSON) + then: 'updated tree data node can be retrieved by its normalized xpath' + def birdsName = cpsDataService.getDataNodes(GENERAL_TEST_DATASPACE, 'anchor4',"/test-tree/branch[@name='left']/nest", FetchDescendantsOption.DIRECT_CHILDREN_ONLY)[0].leaves['birds'] as List + assert birdsName.size() == 3 + assert birdsName.containsAll('Night Owl', 'Raven', 'Crow') + } +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataServiceIntegrationSpec.groovy new file mode 100644 index 0000000000..a488b3b836 --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataServiceIntegrationSpec.groovy @@ -0,0 +1,649 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023-2024 Nordix Foundation + * Modifications Copyright (C) 2023-2024 TechMahindra Ltd. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.cps + +import org.onap.cps.api.CpsDataService +import org.onap.cps.integration.base.FunctionalSpecBase +import org.onap.cps.spi.FetchDescendantsOption +import org.onap.cps.spi.exceptions.AlreadyDefinedException +import org.onap.cps.spi.exceptions.AnchorNotFoundException +import org.onap.cps.spi.exceptions.CpsAdminException +import org.onap.cps.spi.exceptions.CpsPathException +import org.onap.cps.spi.exceptions.DataNodeNotFoundException +import org.onap.cps.spi.exceptions.DataNodeNotFoundExceptionBatch +import org.onap.cps.spi.exceptions.DataValidationException +import org.onap.cps.spi.exceptions.DataspaceNotFoundException +import org.onap.cps.spi.model.DeltaReport +import org.onap.cps.utils.ContentType + +import static org.onap.cps.spi.FetchDescendantsOption.DIRECT_CHILDREN_ONLY +import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS +import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS + +class DataServiceIntegrationSpec extends FunctionalSpecBase { + + CpsDataService objectUnderTest + def originalCountBookstoreChildNodes + def originalCountBookstoreTopLevelListNodes + + def setup() { + objectUnderTest = cpsDataService + originalCountBookstoreChildNodes = countDataNodesInBookstore() + originalCountBookstoreTopLevelListNodes = countTopLevelListDataNodesInBookstore() + } + + def 'Read bookstore top-level container(s) using #fetchDescendantsOption.'() { + when: 'get data nodes for bookstore container' + def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', fetchDescendantsOption) + then: 'the tree consist ouf of #expectNumberOfDataNodes data nodes' + assert countDataNodesInTree(result) == expectNumberOfDataNodes + and: 'the top level data node has the expected attribute and value' + assert result.leaves['bookstore-name'] == ['Easons-1'] + and: 'they are from the correct dataspace' + assert result.dataspace == [FUNCTIONAL_TEST_DATASPACE_1] + and: 'they are from the correct anchor' + assert result.anchorName == [BOOKSTORE_ANCHOR_1] + where: 'the following option is used' + fetchDescendantsOption || expectNumberOfDataNodes + OMIT_DESCENDANTS || 1 + DIRECT_CHILDREN_ONLY || 7 + INCLUDE_ALL_DESCENDANTS || 28 + new FetchDescendantsOption(2) || 28 + } + + def 'Read bookstore top-level container(s) using "root" path variations.'() { + when: 'get data nodes for bookstore container' + def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, root, OMIT_DESCENDANTS) + then: 'the tree consist correct number of data nodes' + assert countDataNodesInTree(result) == 2 + and: 'the top level data node has the expected number of leaves' + assert result.leaves.size() == 2 + where: 'the following variations of "root" are used' + root << [ '/', '' ] + } + + def 'Read data nodes with error: #cpsPath'() { + when: 'attempt to get data nodes using invalid path' + objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, DIRECT_CHILDREN_ONLY) + then: 'a #expectedException is thrown' + thrown(expectedException) + where: + cpsPath || expectedException + 'invalid path' || CpsPathException + '/non-existing-path' || DataNodeNotFoundException + } + + def 'Read (multiple) data nodes (batch) with #cpsPath'() { + when: 'attempt to get data nodes using invalid path' + objectUnderTest.getDataNodesForMultipleXpaths(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ cpsPath ], DIRECT_CHILDREN_ONLY) + then: 'no exception is thrown' + noExceptionThrown() + where: + cpsPath << [ 'invalid path', '/non-existing-path' ] + } + + def 'Get data nodes error scenario #scenario'() { + when: 'attempt to retrieve data nodes' + objectUnderTest.getDataNodes(dataspaceName, anchorName, xpath, OMIT_DESCENDANTS) + then: 'expected exception is thrown' + thrown(expectedException) + where: 'following data is used' + scenario | dataspaceName | anchorName | xpath || expectedException + 'non existent dataspace' | 'non-existent' | 'not-relevant' | '/not-relevant' || DataspaceNotFoundException + 'non existent anchor' | FUNCTIONAL_TEST_DATASPACE_1 | 'non-existent' | '/not-relevant' || AnchorNotFoundException + 'non-existent xpath' | FUNCTIONAL_TEST_DATASPACE_1 | BOOKSTORE_ANCHOR_1| '/non-existing' || DataNodeNotFoundException + 'invalid-dataspace' | 'Invalid dataspace' | 'not-relevant' | '/not-relevant' || DataValidationException + 'invalid-dataspace' | FUNCTIONAL_TEST_DATASPACE_1 | 'Invalid Anchor' | '/not-relevant' || DataValidationException + } + + def 'Delete root data node.'() { + when: 'the "root" is deleted' + objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ '/' ], now) + and: 'attempt to get the top level data node' + objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY) + then: 'an datanode not found exception is thrown' + thrown(DataNodeNotFoundException) + cleanup: + restoreBookstoreDataAnchor(1) + } + + def 'Get whole list data' () { + def xpathForWholeList = "/bookstore/categories" + when: 'get data nodes for bookstore container' + def dataNodes = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, xpathForWholeList, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + then: 'the tree consist ouf of #expectNumberOfDataNodes data nodes' + assert dataNodes.size() == 5 + and: 'each datanode contains the list node xpath partially in its xpath' + dataNodes.each {dataNode -> + assert dataNode.xpath.contains(xpathForWholeList) + } + } + + def 'Read (multiple) data nodes with #scenario' () { + when: 'attempt to get data nodes using multiple valid xpaths' + def dataNodes = objectUnderTest.getDataNodesForMultipleXpaths(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, xpath, OMIT_DESCENDANTS) + then: 'expected numer of data nodes are returned' + dataNodes.size() == expectedNumberOfDataNodes + where: 'the following data was used' + scenario | xpath | expectedNumberOfDataNodes + 'container-node xpath' | ['/bookstore'] | 1 + 'list-item' | ['/bookstore/categories[@code=1]'] | 1 + 'parent-list xpath' | ['/bookstore/categories'] | 5 + 'child-list xpath' | ['/bookstore/categories[@code=1]/books'] | 2 + 'both parent and child list xpath' | ['/bookstore/categories', '/bookstore/categories[@code=1]/books'] | 7 + } + + def 'Add and Delete a (container) data node using #scenario.'() { + when: 'the new datanode is saved' + objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , parentXpath, json, now) + then: 'it can be retrieved by its normalized xpath' + def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, normalizedXpathToNode, DIRECT_CHILDREN_ONLY) + assert result.size() == 1 + assert result[0].xpath == normalizedXpathToNode + and: 'there is now one extra datanode' + assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore() + when: 'the new datanode is deleted' + objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, normalizedXpathToNode, now) + then: 'the original number of data nodes is restored' + assert originalCountBookstoreChildNodes == countDataNodesInBookstore() + where: + scenario | parentXpath | json || normalizedXpathToNode + 'normalized parent xpath' | '/bookstore' | '{"webinfo": {"domain-name":"ourbookstore.com", "contact-email":"info@ourbookstore.com" }}' || "/bookstore/webinfo" + 'non-normalized parent xpath' | '/bookstore/categories[ @code="1"]' | '{"books": {"title":"new" }}' || "/bookstore/categories[@code='1']/books[@title='new']" + } + + def 'Attempt to create a top level data node using root.'() { + given: 'a new anchor' + cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_SCHEMA_SET, 'newAnchor1'); + when: 'attempt to save new top level datanode' + def json = '{"bookstore": {"bookstore-name": "New Store"} }' + objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, 'newAnchor1' , '/', json, now) + then: 'since there is no data a data node not found exception is thrown' + thrown(DataNodeNotFoundException) + } + + def 'Attempt to save top level data node that already exist'() { + when: 'attempt to save already existing top level node' + def json = '{"bookstore": {} }' + objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, json, now) + then: 'an exception that (one cps paths is) already defined is thrown ' + def exceptionThrown = thrown(AlreadyDefinedException) + exceptionThrown.alreadyDefinedObjectNames == ['/bookstore' ] as Set + cleanup: + restoreBookstoreDataAnchor(1) + } + + def 'Delete a single datanode with invalid path.'() { + when: 'attempt to delete a single datanode with invalid path' + objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/invalid path', now) + then: 'a cps path parser exception is thrown' + thrown(CpsPathException) + } + + def 'Delete multiple data nodes with invalid path.'() { + when: 'attempt to delete datanode collection with invalid path' + objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, ['/invalid path'], now) + then: 'the error is silently ignored' + noExceptionThrown() + } + + def 'Delete single data node with non-existing path.'() { + when: 'attempt to delete a single datanode non-existing invalid path' + objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/does/not/exist', now) + then: 'a datanode not found exception is thrown' + thrown(DataNodeNotFoundException) + } + + def 'Delete multiple data nodes with non-existing path(s).'() { + when: 'attempt to delete a single datanode non-existing invalid path' + objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, ['/does/not/exist'], now) + then: 'a datanode not found (batch) exception is thrown' + thrown(DataNodeNotFoundExceptionBatch) + } + + def 'Add and Delete top-level list (element) data nodes with root node.'() { + given: 'a new (multiple-data-tree:invoice) datanodes' + def json = '{"bookstore-address":[{"bookstore-name":"Easons","address":"Bangalore,India","postal-code":"560043"}]}' + when: 'the new list elements are saved' + objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/', json, now) + then: 'they can be retrieved by their xpaths' + objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore-address[@bookstore-name="Easons"]', INCLUDE_ALL_DESCENDANTS) + and: 'there is one extra datanode' + assert originalCountBookstoreTopLevelListNodes + 1 == countTopLevelListDataNodesInBookstore() + when: 'the new elements are deleted' + objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore-address[@bookstore-name="Easons"]', now) + then: 'the original number of datanodes is restored' + assert originalCountBookstoreTopLevelListNodes == countTopLevelListDataNodesInBookstore() + } + + def 'Add and Delete list (element) data nodes.'() { + given: 'two new (categories) data nodes' + def json = '{"categories": [ {"code":"new1"}, {"code":"new2" } ] }' + when: 'the new list elements are saved' + objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) + then: 'they can be retrieved by their xpaths' + objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', DIRECT_CHILDREN_ONLY).size() == 1 + objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', DIRECT_CHILDREN_ONLY).size() == 1 + and: 'there are now two extra data nodes' + assert originalCountBookstoreChildNodes + 2 == countDataNodesInBookstore() + when: 'the new elements are deleted' + objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now) + objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', now) + then: 'the original number of data nodes is restored' + assert originalCountBookstoreChildNodes == countDataNodesInBookstore() + } + + def 'Add list (element) data nodes that already exist.'() { + given: 'two (categories) data nodes, one new and one existing' + def json = '{"categories": [ {"code":"1"}, {"code":"new1"} ] }' + when: 'attempt to save the list element' + objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) + then: 'an exception that (one cps paths is) already defined is thrown ' + def exceptionThrown = thrown(AlreadyDefinedException) + exceptionThrown.alreadyDefinedObjectNames == ['/bookstore/categories[@code=\'1\']' ] as Set + and: 'there is now one extra data nodes' + assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore() + cleanup: + restoreBookstoreDataAnchor(1) + } + + def 'Add and Delete list (element) data nodes using lists specific method.'() { + given: 'a new (categories) data nodes' + def json = '{"categories": [ {"code":"new1"} ] }' + and: 'the new list element is saved' + objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) + when: 'the new element is deleted' + objectUnderTest.deleteListOrListElement(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now) + then: 'the original number of data nodes is restored' + assert originalCountBookstoreChildNodes == countDataNodesInBookstore() + } + + def 'Add and Delete a batch of list element data nodes.'() { + given: 'two new (categories) data nodes in a single batch' + def json = '{"categories": [ {"code":"new1"}, {"code":"new2"} ] }' + when: 'the batches of new list element(s) are saved' + objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) + then: 'they can be retrieved by their xpaths' + assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', DIRECT_CHILDREN_ONLY).size() == 1 + assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', DIRECT_CHILDREN_ONLY).size() == 1 + and: 'there are now two extra data nodes' + assert originalCountBookstoreChildNodes + 2 == countDataNodesInBookstore() + when: 'the new elements are deleted' + objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now) + objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', now) + then: 'the original number of data nodes is restored' + assert originalCountBookstoreChildNodes == countDataNodesInBookstore() + } + + def 'Add and Delete a batch of list element data nodes with partial success.'() { + given: 'one existing and one new (categories) data nodes in a single batch' + def json = '{"categories": [ {"code":"new1"}, {"code":"1"} ] }' + when: 'the batches of new list element(s) are saved' + objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) + then: 'an already defined (batch) exception is thrown for the existing path' + def exceptionThrown = thrown(AlreadyDefinedException) + assert exceptionThrown.alreadyDefinedObjectNames == ['/bookstore/categories[@code=\'1\']' ] as Set + and: 'there is now one extra data node' + assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore() + cleanup: + restoreBookstoreDataAnchor(1) + } + + def 'Attempt to add empty lists.'() { + when: 'the batches of new list element(s) are saved' + objectUnderTest.replaceListContent(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', [ ], now) + then: 'an admin exception is thrown' + thrown(CpsAdminException) + } + + def 'Add child error scenario: #scenario.'() { + when: 'attempt to add a child data node with #scenario' + objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, parentXpath, json, now) + then: 'a #expectedException is thrown' + thrown(expectedException) + where: 'the following data is used' + scenario | parentXpath | json || expectedException + 'parent does not exist' | '/bookstore/categories[@code="unknown"]' | '{"books": [ {"title":"new"} ] } ' || DataNodeNotFoundException + 'already existing child' | '/bookstore' | '{"categories": [ {"code":"1"} ] }' || AlreadyDefinedException + } + + def 'Add multiple child data nodes with partial success.'() { + given: 'one existing and one new list element' + def json = '{"categories": [ {"code":"1"}, {"code":"new"} ] }' + when: 'attempt to add the elements' + objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', json, now) + then: 'an already defined (batch) exception is thrown for the existing path' + def thrown = thrown(AlreadyDefinedException) + assert thrown.alreadyDefinedObjectNames == [ "/bookstore/categories[@code='1']" ] as Set + and: 'the new data node has been added i.e. can be retrieved' + assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new"]', DIRECT_CHILDREN_ONLY).size() == 1 + } + + def 'Replace list content #scenario.'() { + given: 'the bookstore categories 1 and 2 exist and have at least 1 child each ' + assert countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="1"]', DIRECT_CHILDREN_ONLY)) > 1 + assert countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="2"]', DIRECT_CHILDREN_ONLY)) > 1 + when: 'the categories list is replaced with just category "1" and without child nodes (books)' + def json = '{"categories": [ {"code":"' +categoryCode + '"' + childJson + '} ] }' + objectUnderTest.replaceListContent(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', json, now) + then: 'the new replaced category can be retrieved but has no children anymore' + assert expectedNumberOfDataNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="' +categoryCode + '"]', DIRECT_CHILDREN_ONLY)) + when: 'attempt to retrieve a category (code) not in the new list' + objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="2"]', DIRECT_CHILDREN_ONLY) + then: 'a datanode not found exception occurs' + thrown(DataNodeNotFoundException) + cleanup: + restoreBookstoreDataAnchor(1) + where: 'the following data is used' + scenario | categoryCode | childJson || expectedNumberOfDataNodes + 'existing code, no children' | '1' | '' || 1 + 'existing code, new child' | '1' | ', "books" : [ { "title": "New Book" } ]' || 2 + 'existing code, existing child' | '1' | ', "books" : [ { "title": "Matilda" } ]' || 2 + 'new code, new child' | 'new' | ', "books" : [ { "title": "New Book" } ]' || 2 + } + + def 'Update data node leaves for node that has no leaves (yet).'() { + given: 'new (webinfo) datanode without leaves' + def json = '{"webinfo": {} }' + objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) + when: 'update is performed to add a leaf' + def updatedJson = '{"webinfo": {"domain-name":"new leaf data"}}' + objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore", updatedJson, now, ContentType.JSON) + then: 'the updated data nodes are retrieved' + def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/webinfo", INCLUDE_ALL_DESCENDANTS) + and: 'the leaf value is updated as expected' + assert result.leaves['domain-name'] == ['new leaf data'] + cleanup: + restoreBookstoreDataAnchor(1) + } + + def 'Update multiple data leaves error scenario: #scenario.'() { + when: 'attempt to update data node for #scenario' + objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, xpath, 'irrelevant json data', now, ContentType.JSON) + then: 'a #expectedException is thrown' + thrown(expectedException) + where: 'the following data is used' + scenario | dataspaceName | anchorName | xpath || expectedException + 'invalid dataspace name' | 'Invalid Dataspace' | 'not-relevant' | '/not relevant' || DataValidationException + 'invalid anchor name' | FUNCTIONAL_TEST_DATASPACE_1 | 'INVALID ANCHOR' | '/not relevant' || DataValidationException + 'non-existing dataspace' | 'non-existing-dataspace' | 'not-relevant' | '/not relevant' || DataspaceNotFoundException + 'non-existing anchor' | FUNCTIONAL_TEST_DATASPACE_1 | 'non-existing-anchor' | '/not relevant' || AnchorNotFoundException + 'non-existing-xpath' | FUNCTIONAL_TEST_DATASPACE_1 | BOOKSTORE_ANCHOR_1 | '/non-existing' || DataValidationException + } + + def 'Update data nodes and descendants.'() { + given: 'some web info for the bookstore' + def json = '{"webinfo": {"domain-name":"ourbookstore.com" ,"contact-email":"info@ourbookstore.com" }}' + objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) + when: 'the webinfo (container) is updated' + json = '{"webinfo": {"domain-name":"newdomain.com" ,"contact-email":"info@newdomain.com" }}' + objectUnderTest.updateDataNodeAndDescendants(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', json, now) + then: 'webinfo has been updated with teh new details' + def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', DIRECT_CHILDREN_ONLY) + result.leaves.'domain-name'[0] == 'newdomain.com' + result.leaves.'contact-email'[0] == 'info@newdomain.com' + cleanup: + restoreBookstoreDataAnchor(1) + } + + def 'Update bookstore top-level container data node.'() { + when: 'the bookstore top-level container is updated' + def json = '{ "bookstore": { "bookstore-name": "new bookstore" }}' + objectUnderTest.updateDataNodeAndDescendants(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/', json, now) + then: 'bookstore name has been updated' + def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY) + result.leaves.'bookstore-name'[0] == 'new bookstore' + cleanup: + restoreBookstoreDataAnchor(1) + } + + def 'Update multiple data node leaves.'() { + given: 'Updated json for bookstore data' + def jsonData = "{'book-store:books':{'lang':'English/French','price':100,'title':'Matilda'}}" + when: 'update is performed for leaves' + objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code='1']", jsonData, now, ContentType.JSON) + then: 'the updated data nodes are retrieved' + def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code=1]/books[@title='Matilda']", INCLUDE_ALL_DESCENDANTS) + and: 'the leaf values are updated as expected' + assert result[0].leaves['lang'] == 'English/French' + assert result[0].leaves['price'] == 100 + cleanup: + restoreBookstoreDataAnchor(2) + } + + def 'Order of leaf-list elements is preserved when "ordered-by user" is set in the YANG model.'() { + given: 'Updated json for bookstore data' + def jsonData = "{'book-store:books':{'title':'Matilda', 'authors': ['beta', 'alpha', 'gamma', 'delta']}}" + when: 'update is performed for leaves' + objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code='1']", jsonData, now, ContentType.JSON) + and: 'the updated data nodes are retrieved' + def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code=1]/books[@title='Matilda']", INCLUDE_ALL_DESCENDANTS) + then: 'the leaf-list values have expected order' + assert result[0].leaves['authors'] == ['beta', 'alpha', 'gamma', 'delta'] + cleanup: + restoreBookstoreDataAnchor(2) + } + + def 'Leaf-list elements are sorted when "ordered-by user" is not set in the YANG model.'() { + given: 'Updated json for bookstore data' + def jsonData = "{'book-store:books':{'title':'Matilda', 'editions': [2011, 1988, 2001, 2022, 2025]}}" + when: 'update is performed for leaves' + objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code='1']", jsonData, now, ContentType.JSON) + and: 'the updated data nodes are retrieved' + def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code=1]/books[@title='Matilda']", INCLUDE_ALL_DESCENDANTS) + then: 'the leaf-list values have natural order' + assert result[0].leaves['editions'] == [1988, 2001, 2011, 2022, 2025] + cleanup: + restoreBookstoreDataAnchor(2) + } + + def 'Get delta between 2 anchors'() { + when: 'attempt to get delta report between anchors' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, '/', OMIT_DESCENDANTS) + then: 'delta report contains expected number of changes' + result.size() == 3 + and: 'delta report contains UPDATE action with expected xpath' + assert result[0].getAction() == 'update' + assert result[0].getXpath() == '/bookstore' + and: 'delta report contains REMOVE action with expected xpath' + assert result[1].getAction() == 'remove' + assert result[1].getXpath() == "/bookstore-address[@bookstore-name='Easons-1']" + and: 'delta report contains ADD action with expected xpath' + assert result[2].getAction() == 'add' + assert result[2].getXpath() == "/bookstore-address[@bookstore-name='Crossword Bookstores']" + } + + def 'Get delta between 2 anchors returns empty response when #scenario'() { + when: 'attempt to get delta report between anchors' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, targetAnchor, xpath, INCLUDE_ALL_DESCENDANTS) + then: 'delta report is empty' + assert result.isEmpty() + where: 'following data was used' + scenario | targetAnchor | xpath + 'anchors with identical data are queried' | BOOKSTORE_ANCHOR_4 | '/' + 'same anchor name is passed as parameter' | BOOKSTORE_ANCHOR_3 | '/' + 'non existing xpath' | BOOKSTORE_ANCHOR_5 | '/non-existing-xpath' + } + + def 'Get delta between anchors error scenario: #scenario'() { + when: 'attempt to get delta between anchors' + objectUnderTest.getDeltaByDataspaceAndAnchors(dataspaceName, sourceAnchor, targetAnchor, '/some-xpath', INCLUDE_ALL_DESCENDANTS) + then: 'expected exception is thrown' + thrown(expectedException) + where: 'following data was used' + scenario | dataspaceName | sourceAnchor | targetAnchor || expectedException + 'invalid dataspace name' | 'Invalid dataspace' | 'not-relevant' | 'not-relevant' || DataValidationException + 'invalid anchor 1 name' | FUNCTIONAL_TEST_DATASPACE_3 | 'invalid anchor' | 'not-relevant' || DataValidationException + 'invalid anchor 2 name' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | 'invalid anchor' || DataValidationException + 'non-existing dataspace' | 'non-existing' | 'not-relevant1' | 'not-relevant2' || DataspaceNotFoundException + 'non-existing dataspace with same anchor name' | 'non-existing' | 'not-relevant' | 'not-relevant' || DataspaceNotFoundException + 'non-existing anchor 1' | FUNCTIONAL_TEST_DATASPACE_3 | 'non-existing-anchor' | 'not-relevant' || AnchorNotFoundException + 'non-existing anchor 2' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | 'non-existing-anchor' || AnchorNotFoundException + } + + def 'Get delta between anchors for remove action, where source data node #scenario'() { + when: 'attempt to get delta between leaves of data nodes present in 2 anchors' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_5, BOOKSTORE_ANCHOR_3, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) + then: 'expected action is present in delta report' + assert result.get(0).getAction() == 'remove' + where: 'following data was used' + scenario | parentNodeXpath + 'has leaves and child nodes' | "/bookstore/categories[@code='6']" + 'has leaves only' | "/bookstore/categories[@code='5']/books[@title='Book 11']" + 'has child data node only' | "/bookstore/support-info/contact-emails" + 'is empty' | "/bookstore/container-without-leaves" + } + + def 'Get delta between anchors for add action, where target data node #scenario'() { + when: 'attempt to get delta between leaves of data nodes present in 2 anchors' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) + then: 'the expected action is present in delta report' + result.get(0).getAction() == 'add' + and: 'the expected xapth is present in delta report' + result.get(0).getXpath() == parentNodeXpath + where: 'following data was used' + scenario | parentNodeXpath + 'has leaves and child nodes' | "/bookstore/categories[@code='6']" + 'has leaves only' | "/bookstore/categories[@code='5']/books[@title='Book 11']" + 'has child data node only' | "/bookstore/support-info/contact-emails" + 'is empty' | "/bookstore/container-without-leaves" + } + + def 'Get delta between anchors when leaves of existing data nodes are updated,: #scenario'() { + when: 'attempt to get delta between leaves of existing data nodes' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, sourceAnchor, targetAnchor, xpath, OMIT_DESCENDANTS) + then: 'expected action is update' + assert result[0].getAction() == 'update' + and: 'the payload has expected leaf values' + def sourceData = result[0].getSourceData() + def targetData = result[0].getTargetData() + assert sourceData == expectedSourceValue + assert targetData == expectedTargetValue + where: 'following data was used' + scenario | sourceAnchor | targetAnchor | xpath || expectedSourceValue | expectedTargetValue + 'leaf is updated in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore' || ['bookstore-name': 'Easons-1'] | ['bookstore-name': 'Crossword Bookstores'] + 'leaf is removed in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | "/bookstore/categories[@code='5']/books[@title='Book 1']" || [price:1] | null + 'leaf is added in target anchor' | BOOKSTORE_ANCHOR_5 | BOOKSTORE_ANCHOR_3 | "/bookstore/categories[@code='5']/books[@title='Book 1']" || null | [price:1] + } + + def 'Get delta between anchors when child data nodes under existing parent data nodes are updated: #scenario'() { + when: 'attempt to get delta between leaves of existing data nodes' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, sourceAnchor, targetAnchor, xpath, DIRECT_CHILDREN_ONLY) + then: 'expected action is update' + assert result[0].getAction() == 'update' + and: 'the delta report has expected child node xpaths' + def deltaReportEntities = getDeltaReportEntities(result) + def childNodeXpathsInDeltaReport = deltaReportEntities.get('xpaths') + assert childNodeXpathsInDeltaReport.contains(expectedChildNodeXpath) + where: 'following data was used' + scenario | sourceAnchor | targetAnchor | xpath || expectedChildNodeXpath + 'source and target anchors have child data nodes' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore/premises' || '/bookstore/premises/addresses[@house-number=\'2\' and @street=\'Main Street\']' + 'removed child data nodes in target anchor' | BOOKSTORE_ANCHOR_5 | BOOKSTORE_ANCHOR_3 | '/bookstore' || '/bookstore/support-info' + 'added child data nodes in target anchor' | BOOKSTORE_ANCHOR_3 | BOOKSTORE_ANCHOR_5 | '/bookstore' || '/bookstore/support-info' + } + + def 'Get delta between anchors where source and target data nodes have leaves and child data nodes'() { + given: 'parent node xpath and expected data in delta report' + def parentNodeXpath = "/bookstore/categories[@code='1']" + def expectedSourceDataInParentNode = ['name':'Children'] + def expectedTargetDataInParentNode = ['name':'Kids'] + def expectedSourceDataInChildNode = [['lang' : 'English'],['price':20, 'editions':[1988, 2000]]] + def expectedTargetDataInChildNode = [['lang':'English/German'], ['price':200, 'editions':[1988, 2000, 2023]]] + when: 'attempt to get delta between leaves of existing data nodes' + def result = objectUnderTest.getDeltaByDataspaceAndAnchors(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, BOOKSTORE_ANCHOR_5, parentNodeXpath, INCLUDE_ALL_DESCENDANTS) + def deltaReportEntities = getDeltaReportEntities(result) + then: 'expected action is update' + assert result[0].getAction() == 'update' + and: 'the payload has expected parent node xpath' + assert deltaReportEntities.get('xpaths').contains(parentNodeXpath) + and: 'delta report has expected source and target data' + assert deltaReportEntities.get('sourcePayload').contains(expectedSourceDataInParentNode) + assert deltaReportEntities.get('targetPayload').contains(expectedTargetDataInParentNode) + and: 'the delta report also has expected child node xpaths' + assert deltaReportEntities.get('xpaths').containsAll(["/bookstore/categories[@code='1']/books[@title='The Gruffalo']", "/bookstore/categories[@code='1']/books[@title='Matilda']"]) + and: 'the delta report also has expected source and target data of child nodes' + assert deltaReportEntities.get('sourcePayload').containsAll(expectedSourceDataInChildNode) + assert deltaReportEntities.get('targetPayload').containsAll(expectedTargetDataInChildNode) + } + + def 'Get delta between anchor and JSON payload'() { + when: 'attempt to get delta report between anchor and JSON payload' + def jsonPayload = "{\"book-store:bookstore\":{\"bookstore-name\":\"Crossword Bookstores\"},\"book-store:bookstore-address\":{\"address\":\"Bangalore, India\",\"postal-code\":\"560062\",\"bookstore-name\":\"Crossword Bookstores\"}}" + def result = objectUnderTest.getDeltaByDataspaceAnchorAndPayload(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, '/', [:], jsonPayload, OMIT_DESCENDANTS) + then: 'delta report contains expected number of changes' + result.size() == 3 + and: 'delta report contains UPDATE action with expected xpath' + assert result[0].getAction() == 'update' + assert result[0].getXpath() == '/bookstore' + and: 'delta report contains REMOVE action with expected xpath' + assert result[1].getAction() == 'remove' + assert result[1].getXpath() == "/bookstore-address[@bookstore-name='Easons-1']" + and: 'delta report contains ADD action with expected xpath' + assert result[2].getAction() == 'add' + assert result[2].getXpath() == "/bookstore-address[@bookstore-name='Crossword Bookstores']" + } + + def 'Get delta between anchor and payload returns empty response when JSON payload is identical to anchor data'() { + when: 'attempt to get delta report between anchor and JSON payload (replacing the string Easons with Easons-1 because the data in JSON file is modified, to append anchor number, during the setup process of the integration tests)' + def jsonPayload = readResourceDataFile('bookstore/bookstoreData.json').replace('Easons', 'Easons-1') + def result = objectUnderTest.getDeltaByDataspaceAnchorAndPayload(FUNCTIONAL_TEST_DATASPACE_3, BOOKSTORE_ANCHOR_3, '/', [:], jsonPayload, INCLUDE_ALL_DESCENDANTS) + then: 'delta report is empty' + assert result.isEmpty() + } + + def 'Get delta between anchor and payload error scenario: #scenario'() { + when: 'attempt to get delta between anchor and json payload' + objectUnderTest.getDeltaByDataspaceAnchorAndPayload(dataspaceName, sourceAnchor, xpath, [:], jsonPayload, INCLUDE_ALL_DESCENDANTS) + then: 'expected exception is thrown' + thrown(expectedException) + where: 'following data was used' + scenario | dataspaceName | sourceAnchor | xpath | jsonPayload || expectedException + 'invalid dataspace name' | 'Invalid dataspace' | 'not-relevant' | '/' | '{some-json}' || DataValidationException + 'invalid anchor name' | FUNCTIONAL_TEST_DATASPACE_3 | 'invalid anchor' | '/' | '{some-json}' || DataValidationException + 'non-existing dataspace' | 'non-existing' | 'not-relevant' | '/' | '{some-json}' || DataspaceNotFoundException + 'non-existing anchor' | FUNCTIONAL_TEST_DATASPACE_3 | 'non-existing-anchor' | '/' | '{some-json}' || AnchorNotFoundException + 'empty json payload with root node xpath' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | '/' | '' || DataValidationException + 'empty json payload with non-root node xpath' | FUNCTIONAL_TEST_DATASPACE_3 | BOOKSTORE_ANCHOR_3 | '/bookstore' | '' || DataValidationException + } + + def getDeltaReportEntities(List deltaReport) { + def xpaths = [] + def action = [] + def sourcePayload = [] + def targetPayload = [] + deltaReport.each { + delta -> xpaths.add(delta.getXpath()) + action.add(delta.getAction()) + sourcePayload.add(delta.getSourceData()) + targetPayload.add(delta.getTargetData()) + } + return ['xpaths':xpaths, 'action':action, 'sourcePayload':sourcePayload, 'targetPayload':targetPayload] + } + + def countDataNodesInBookstore() { + return countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', INCLUDE_ALL_DESCENDANTS)) + } + + def countTopLevelListDataNodesInBookstore() { + return countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/', INCLUDE_ALL_DESCENDANTS)) + } +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataspaceServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataspaceServiceIntegrationSpec.groovy new file mode 100644 index 0000000000..d69f6cca0c --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/DataspaceServiceIntegrationSpec.groovy @@ -0,0 +1,107 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.cps + +import org.onap.cps.api.CpsDataspaceService +import org.onap.cps.integration.base.FunctionalSpecBase +import org.onap.cps.spi.exceptions.AlreadyDefinedException +import org.onap.cps.spi.exceptions.DataspaceInUseException +import org.onap.cps.spi.exceptions.DataspaceNotFoundException + +class DataspaceServiceIntegrationSpec extends FunctionalSpecBase { + + CpsDataspaceService objectUnderTest + + def setup() { objectUnderTest = cpsDataspaceService } + + def 'Dataspace CRUD operations.'() { + when: 'a dataspace is created' + objectUnderTest.createDataspace('newDataspace') + then: 'the dataspace can be read' + assert objectUnderTest.getDataspace('newDataspace').name == 'newDataspace' + and: 'it can be deleted' + objectUnderTest.deleteDataspace('newDataspace') + then: 'the dataspace no longer exists i.e. an exception is thrown if an attempt is made to retrieve it' + def thrown = null + try { + objectUnderTest.getDataspace('newDataspace') + } catch(Exception exception) { + thrown = exception + } + assert thrown instanceof DataspaceNotFoundException + } + + def 'Attempt to delete a non-existing dataspace'() { + when: 'attempt to delete a non-existing dataspace' + objectUnderTest.deleteDataspace('non-existing-name') + then: 'a not found exception is thrown with the relevant dataspace name' + def thrownException = thrown(DataspaceNotFoundException) + assert thrownException.details.contains('non-existing-name does not exist') + } + + def 'Attempt Delete dataspace with a schema set and anchor'() { + setup: 'a dataspace with a schema set and anchor' + objectUnderTest.createDataspace('targetDataspace') + cpsModuleService.createSchemaSet('targetDataspace','someSchemaSet',[:]) + cpsAnchorService.createAnchor('targetDataspace', 'someSchemaSet', 'some_anchor') + when: 'attempt to delete dataspace' + objectUnderTest.deleteDataspace('targetDataspace') + then: 'an in-use exception is thrown mentioning anchors' + def thrownException = thrown(DataspaceInUseException) + assert thrownException.details.contains('contains 1 anchor(s)') + cleanup: + cpsModuleService.deleteSchemaSetsWithCascade('targetDataspace',['someSchemaSet']) + objectUnderTest.deleteDataspace('targetDataspace') + } + + def 'Attempt to delete dataspace with just a schema set'() { + setup: 'a dataspace with a schema set' + objectUnderTest.createDataspace('targetDataspace') + cpsModuleService.createSchemaSet('targetDataspace','someSchemaSet',[:]) + when: 'attempt to delete dataspace' + objectUnderTest.deleteDataspace('targetDataspace') + then: 'an in-use exception is thrown mentioning schemasets' + def thrownException = thrown(DataspaceInUseException) + assert thrownException.details.contains('contains 1 schemaset(s)') + cleanup: + cpsModuleService.deleteSchemaSetsWithCascade('targetDataspace',['someSchemaSet']) + objectUnderTest.deleteDataspace('targetDataspace') + } + + def 'Retrieve all dataspaces (depends on total test suite).'() { + given: 'two addtional dataspaces are created' + objectUnderTest.createDataspace('dataspace1') + objectUnderTest.createDataspace('dataspace2') + when: 'all datespaces are retreived' + def result = objectUnderTest.getAllDataspaces() + then: 'there are at least 3 dataspaces (2 new ones plus the general test dataspace)' + result.size() >= 3 + assert result.name.containsAll([GENERAL_TEST_DATASPACE, 'dataspace1', 'dataspace2']) + } + + def 'Duplicate dataspaces.'() { + when: 'attempting to create a dataspace with the same name as an existing one' + objectUnderTest.createDataspace(GENERAL_TEST_DATASPACE) + then: 'an exception is thrown indicating the dataspace already exists' + thrown(AlreadyDefinedException) + } + +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/ModuleServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/ModuleServiceIntegrationSpec.groovy new file mode 100644 index 0000000000..0e465d84a0 --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/ModuleServiceIntegrationSpec.groovy @@ -0,0 +1,364 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023-2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.cps + +import org.onap.cps.api.CpsModuleService +import org.onap.cps.integration.base.FunctionalSpecBase +import org.onap.cps.spi.CascadeDeleteAllowed +import org.onap.cps.spi.exceptions.AlreadyDefinedException +import org.onap.cps.spi.exceptions.DataspaceNotFoundException +import org.onap.cps.spi.exceptions.ModelValidationException +import org.onap.cps.spi.exceptions.SchemaSetInUseException +import org.onap.cps.spi.exceptions.SchemaSetNotFoundException +import org.onap.cps.spi.model.ModuleDefinition +import org.onap.cps.spi.model.ModuleReference + +class ModuleServiceIntegrationSpec extends FunctionalSpecBase { + + CpsModuleService objectUnderTest + + private static def originalNumberOfModuleReferences = 2 // bookstore has two modules + private static def bookStoreModuleReference = new ModuleReference('stores','2024-02-08') + private static def bookStoreModuleReferenceWithNamespace = new ModuleReference('stores','2024-02-08', 'org:onap:cps:sample') + private static def bookStoreTypesModuleReference = new ModuleReference('bookstore-types','2024-01-30') + private static def bookStoreTypesModuleReferenceWithNamespace = new ModuleReference('bookstore-types','2024-01-30', 'org:onap:cps:types:sample') + static def NEW_RESOURCE_REVISION = '2023-05-10' + static def NEW_RESOURCE_CONTENT = 'module test_module {\n' + + ' yang-version 1.1;\n' + + ' namespace "org:onap:ccsdk:sample";\n' + + '\n' + + ' prefix book-store;\n' + + '\n' + + ' revision "2023-05-10" {\n' + + ' description\n' + + ' "Sample Model";\n' + + ' }' + + '}' + + def newYangResourcesNameToContentMap = [:] + def moduleReferences = [] + def noNewModules = [:] + def bookstoreModelFileContent = readResourceDataFile('bookstore/bookstore.yang') + def bookstoreTypesFileContent = readResourceDataFile('bookstore/bookstore-types.yang') + + def setup() { + objectUnderTest = cpsModuleService + } + + /* + C R E A T E S C H E M A S E T U S E - C A S E S + */ + + def 'Create new schema set from yang resources with #scenario'() { + given: 'a new schema set with #numberOfModules modules' + populateNewYangResourcesNameToContentMapAndAllModuleReferences(numberOfNewModules) + when: 'the new schema set is created' + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', newYangResourcesNameToContentMap) + then: 'the number of module references has increased by #numberOfNewModules' + def yangResourceModuleReferences = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1) + originalNumberOfModuleReferences + numberOfNewModules == yangResourceModuleReferences.size() + cleanup: + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, [ 'newSchemaSet' ]) + where: 'the following parameters are use' + scenario | numberOfNewModules + 'two valid new modules' | 2 + 'empty schema set' | 0 + 'over max batch size #modules' | 101 + } + + def 'Create new schema set with recommended filename format but invalid yang'() { + given: 'a filename using RFC6020 recommended format (for coverage only)' + def fileName = 'test@2023-05-11.yang' + when: 'attempt to create a schema set with invalid Yang' + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', [(fileName) :'invalid yang']) + then: 'a model validation exception' + thrown(ModelValidationException) + } + + def 'Create new schema set from modules with #scenario'() { + given: 'a new schema set with #numberOfNewModules modules' + populateNewYangResourcesNameToContentMapAndAllModuleReferences(numberOfNewModules) + and: 'add existing module references (optional)' + moduleReferences.addAll(existingModuleReferences) + when: 'the new schema set is created' + def schemaSetName = "NewSchemaWith${numberOfNewModules}Modules" + objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, schemaSetName, newYangResourcesNameToContentMap, moduleReferences) + and: 'associated with a new anchor' + cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, schemaSetName, 'newAnchor') + then: 'the new anchor has the correct number of modules' + def yangResourceModuleReferences = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'newAnchor') + assert expectedNumberOfModulesForAnchor == yangResourceModuleReferences.size() + cleanup: + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, [ schemaSetName.toString() ]) + where: 'the following module references are provided' + scenario | numberOfNewModules | existingModuleReferences || expectedNumberOfModulesForAnchor + 'empty schema set' | 0 | [ ] || 0 + 'one existing module' | 0 | [bookStoreModuleReference ] || 1 + 'two new modules' | 2 | [ ] || 2 + 'two new modules, one existing' | 2 | [bookStoreModuleReference ] || 3 + 'over max batch size #modules' | 101 | [ ] || 101 + 'two valid, one invalid module' | 2 | [ new ModuleReference('NOT EXIST','IRRELEVANT') ] || 2 + } + + def 'Duplicate schema content.'() { + given: 'a map of yang resources' + populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) + when: 'a new schema set is created' + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema1', newYangResourcesNameToContentMap) + then: 'the dataspace has one new module (reference)' + def numberOfModuleReferencesAfterFirstSchemaSetHasBeenAdded = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).size() + assert numberOfModuleReferencesAfterFirstSchemaSetHasBeenAdded == originalNumberOfModuleReferences + 1 + when: 'a second new schema set is created' + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema2', newYangResourcesNameToContentMap) + then: 'the dataspace has no additional module (reference)' + assert numberOfModuleReferencesAfterFirstSchemaSetHasBeenAdded == objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).size() + cleanup: + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, [ 'newSchema1', 'newSchema2']) + } + + def 'Create schema set error scenario: #scenario.'() { + when: 'attempt to store schema set #schemaSetName in dataspace #dataspaceName' + populateNewYangResourcesNameToContentMapAndAllModuleReferences(0) + objectUnderTest.createSchemaSet(dataspaceName, schemaSetName, newYangResourcesNameToContentMap) + then: 'an #expectedException is thrown' + thrown(expectedException) + where: 'the following data is used' + scenario | dataspaceName | schemaSetName || expectedException + 'dataspace does not exist' | 'unknown' | 'not-relevant' || DataspaceNotFoundException + 'schema set already exists' | FUNCTIONAL_TEST_DATASPACE_1 | BOOKSTORE_SCHEMA_SET || AlreadyDefinedException + } + + /* + R E A D S C H E M A S E T I N F O U S E - C A S E S + */ + + def 'Retrieving module definitions by anchor.'() { + when: 'the module definitions for an anchor are retrieved' + def result = objectUnderTest.getModuleDefinitionsByAnchorName(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1) + then: 'the correct module definitions are returned' + assert result.size() == 2 + assert result.contains(new ModuleDefinition('stores','2024-02-08',bookstoreModelFileContent)) + assert result.contains(new ModuleDefinition('bookstore-types','2024-01-30', bookstoreTypesFileContent)) + } + + def 'Retrieving module definitions: #scenarios'() { + when: 'module definitions for module name are retrieved' + def result = objectUnderTest.getModuleDefinitionsByAnchorAndModule(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, moduleName, moduleRevision) + then: 'the correct module definitions are returned' + if (expectedNumberOfDefinitions > 0) { + assert result.size() == expectedNumberOfDefinitions + def expectedModuleDefinition = new ModuleDefinition('stores', '2024-02-08', bookstoreModelFileContent) + assert result[0] == expectedModuleDefinition + } + where: 'following parameters are used' + scenarios | moduleName | moduleRevision || expectedNumberOfDefinitions + 'correct module name and revision' | 'stores' | '2024-02-08' || 1 + 'correct module name' | 'stores' | null || 1 + 'incorrect module name' | 'other' | null || 0 + 'incorrect revision' | 'stores' | '2025-11-22' || 0 + } + + def 'Retrieving yang resource module references by anchor.'() { + when: 'the yang resource module references for an anchor are retrieved' + def result = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1) + then: 'the correct module references are returned' + assert result.size() == 2 + assert result.containsAll(bookStoreModuleReference, bookStoreTypesModuleReference) + } + + def 'Identifying new module references with #scenario'() { + when: 'identifyNewModuleReferences is called' + def result = objectUnderTest.identifyNewModuleReferences(moduleReferences) + then: 'the correct module references are returned' + assert result.size() == expectedResult.size() + assert result.containsAll(expectedResult) + where: 'the following data is used' + scenario | moduleReferences || expectedResult + 'just new module references' | [new ModuleReference('new1', 'r1'), new ModuleReference('new2', 'r1')] || [new ModuleReference('new1', 'r1'), new ModuleReference('new2', 'r1')] + 'one new module,one existing reference' | [new ModuleReference('new1', 'r1'), bookStoreModuleReference] || [new ModuleReference('new1', 'r1')] + 'no new module references' | [bookStoreModuleReference] || [] + 'no module references' | [] || [] + 'module references collection is null' | null || [] + } + + def 'Retrieve schema set.'() { + when: 'a specific schema set is retrieved' + def result = objectUnderTest.getSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_SCHEMA_SET) + then: 'the result has the correct name and module(s)' + assert result.name == 'bookstoreSchemaSet' + assert result.moduleReferences.size() == 2 + assert result.moduleReferences.containsAll(bookStoreModuleReferenceWithNamespace, bookStoreTypesModuleReferenceWithNamespace) + } + + def 'Retrieve all schema sets.'() { + given: 'an extra schema set is stored' + populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchema1', newYangResourcesNameToContentMap) + when: 'all schema sets are retrieved' + def result = objectUnderTest.getSchemaSets(FUNCTIONAL_TEST_DATASPACE_1) + then: 'the result contains all expected schema sets' + assert result.name == [ 'bookstoreSchemaSet', 'newSchema1' ] + cleanup: + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchema1']) + } + + /* + D E L E T E S C H E M A S E T U S E - C A S E S + */ + + def 'Delete schema sets with(out) cascade.'() { + given: 'a schema set' + populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', newYangResourcesNameToContentMap) + and: 'optionally create anchor for the schema set' + if (associateWithAnchor) { + cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', 'newAnchor') + } + when: 'attempt to delete the schema set' + try { + objectUnderTest.deleteSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet', cascadeDeleteAllowedOption) + } + catch (Exception e) { // only accept correct exception when schema set cannot be deleted + assert e instanceof SchemaSetInUseException && expectSchemaSetStillPresent + } + then: 'check if the dataspace still contains the new schema set or not' + def remainingSchemaSetNames = objectUnderTest.getSchemaSets(FUNCTIONAL_TEST_DATASPACE_1).name + assert remainingSchemaSetNames.contains('newSchemaSet') == expectSchemaSetStillPresent + cleanup: + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchemaSet']) + where: 'the following options are used' + associateWithAnchor | cascadeDeleteAllowedOption || expectSchemaSetStillPresent + false | CascadeDeleteAllowed.CASCADE_DELETE_ALLOWED || false + false | CascadeDeleteAllowed.CASCADE_DELETE_PROHIBITED || false + true | CascadeDeleteAllowed.CASCADE_DELETE_ALLOWED || false + true | CascadeDeleteAllowed.CASCADE_DELETE_PROHIBITED || true + } + + def 'Delete schema sets with shared resources.'() { + given: 'a new schema set' + populateNewYangResourcesNameToContentMapAndAllModuleReferences(1) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet1', newYangResourcesNameToContentMap) + and: 'another schema set which shares one yang resource (module)' + populateNewYangResourcesNameToContentMapAndAllModuleReferences(2) + objectUnderTest.createSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'newSchemaSet2', newYangResourcesNameToContentMap) + when: 'all schema sets are retrieved' + def moduleRevisions = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).revision + then: 'both modules (revisions) are present' + assert moduleRevisions.containsAll(['2000-01-01', '2000-01-01']) + when: 'delete the second schema set that has two resources one of which is a shared resource' + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchemaSet2']) + then: 'only the second schema set is deleted' + def remainingSchemaSetNames = objectUnderTest.getSchemaSets(FUNCTIONAL_TEST_DATASPACE_1).name + assert remainingSchemaSetNames.contains('newSchemaSet1') + assert !remainingSchemaSetNames.contains('newSchemaSet2') + and: 'only the shared module (revision) remains' + def remainingModuleRevisions = objectUnderTest.getYangResourceModuleReferences(FUNCTIONAL_TEST_DATASPACE_1).revision + assert remainingModuleRevisions.contains('2000-01-01') + assert !remainingModuleRevisions.contains('2001-01-01') + cleanup: + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['newSchemaSet1']) + } + + def 'Delete schema set error scenario: #scenario.'() { + when: 'attempt to delete a schema set where #scenario' + objectUnderTest.deleteSchemaSet(dataspaceName, schemaSetName, CascadeDeleteAllowed.CASCADE_DELETE_PROHIBITED) + then: 'an #expectedException is thrown' + thrown(expectedException) + where: 'the following data is used' + scenario | dataspaceName | schemaSetName || expectedException + 'dataspace does not exist' | 'unknown' | 'not-relevant' || DataspaceNotFoundException + 'schema set does not exists' | FUNCTIONAL_TEST_DATASPACE_1 | 'unknown' || SchemaSetNotFoundException + } + + /* + U P G R A D E + */ + + def 'Upgrade schema set (with existing and new modules, no matching module set tag in NCMP)'() { + given: 'an anchor and schema set with 2 modules (to be upgraded)' + populateNewYangResourcesNameToContentMapAndAllModuleReferences('original', 2) + objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', newYangResourcesNameToContentMap, []) + cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', 'targetAnchor') + def yangResourceModuleReferencesBeforeUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') + assert yangResourceModuleReferencesBeforeUpgrade.size() == 2 + assert yangResourceModuleReferencesBeforeUpgrade.containsAll([new ModuleReference('original_0','2000-01-01'),new ModuleReference('original_1','2001-01-01')]) + and: 'two new 2 modules (from node)' + populateNewYangResourcesNameToContentMapAndAllModuleReferences('new', 2) + def newModuleReferences = [new ModuleReference('new_0','2000-01-01'),new ModuleReference('new_1','2001-01-01')] + and: 'a list of all module references (normally retrieved from node)' + def allModuleReferences = [] + allModuleReferences.add(bookStoreModuleReference) + allModuleReferences.addAll(newModuleReferences) + when: 'the schema set is upgraded' + objectUnderTest.upgradeSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', newYangResourcesNameToContentMap, allModuleReferences) + then: 'the new anchor has the correct new and existing modules' + def yangResourceModuleReferencesAfterUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') + assert yangResourceModuleReferencesAfterUpgrade.size() == 3 + assert yangResourceModuleReferencesAfterUpgrade.contains(bookStoreModuleReference) + assert yangResourceModuleReferencesAfterUpgrade.containsAll(newModuleReferences); + cleanup: + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['targetSchema']) + } + + def 'Upgrade existing schema set from another anchor (used in NCMP for matching module set tag)'() { + given: 'an anchor and schema set with 1 module (target)' + populateNewYangResourcesNameToContentMapAndAllModuleReferences('target', 1) + objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', newYangResourcesNameToContentMap, []) + cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', 'targetAnchor') + def moduleReferencesBeforeUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') + assert moduleReferencesBeforeUpgrade.size() == 1 + and: 'another anchor and schema set with 2 other modules (source for upgrade)' + populateNewYangResourcesNameToContentMapAndAllModuleReferences('source', 2) + objectUnderTest.createSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'sourceSchema', newYangResourcesNameToContentMap, []) + cpsAnchorService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, 'sourceSchema', 'sourceAnchor') + assert objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'sourceAnchor').size() == 2 + when: 'the target schema is upgraded using the module references from the source anchor' + def moduleReferencesFromSourceAnchor = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'sourceAnchor') + objectUnderTest.upgradeSchemaSetFromModules(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema', noNewModules, moduleReferencesFromSourceAnchor) + then: 'the target schema now refers to the source modules (with namespace) modules' + def schemaSetModuleReferencesAfterUpgrade = getObjectUnderTest().getSchemaSet(FUNCTIONAL_TEST_DATASPACE_1, 'targetSchema').moduleReferences + assert schemaSetModuleReferencesAfterUpgrade.containsAll([new ModuleReference('source_0','2000-01-01','org:onap:ccsdk:sample'),new ModuleReference('source_1','2001-01-01','org:onap:ccsdk:sample')]); + and: 'the associated target anchor has the same module references (without namespace but that is a legacy issue)' + def anchorModuleReferencesAfterUpgrade = objectUnderTest.getYangResourcesModuleReferences(FUNCTIONAL_TEST_DATASPACE_1, 'targetAnchor') + assert anchorModuleReferencesAfterUpgrade.containsAll([new ModuleReference('source_0','2000-01-01'),new ModuleReference('source_1','2001-01-01')]); + cleanup: + objectUnderTest.deleteSchemaSetsWithCascade(FUNCTIONAL_TEST_DATASPACE_1, ['sourceSchema', 'targetSchema']) + } + + /* + H E L P E R M E T H O D S + */ + + def populateNewYangResourcesNameToContentMapAndAllModuleReferences(numberOfModules) { + populateNewYangResourcesNameToContentMapAndAllModuleReferences('name', numberOfModules) + } + + def populateNewYangResourcesNameToContentMapAndAllModuleReferences(namePrefix, numberOfModules) { + numberOfModules.times { + def uniqueName = namePrefix + '_' + it + def uniqueRevision = String.valueOf(2000 + it) + '-01-01' + moduleReferences.add(new ModuleReference(uniqueName, uniqueRevision)) + def uniqueContent = NEW_RESOURCE_CONTENT.replace(NEW_RESOURCE_REVISION, uniqueRevision).replace('module test_module', 'module '+uniqueName) + newYangResourcesNameToContentMap.put(uniqueRevision, uniqueContent) + } + } + +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/QueryServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/QueryServiceIntegrationSpec.groovy new file mode 100644 index 0000000000..fd9aa54051 --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/QueryServiceIntegrationSpec.groovy @@ -0,0 +1,425 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023-2024 Nordix Foundation + * Modifications Copyright (C) 2023 TechMahindra Ltd + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.cps + +import java.time.OffsetDateTime +import org.onap.cps.api.CpsQueryService +import org.onap.cps.integration.base.FunctionalSpecBase +import org.onap.cps.spi.FetchDescendantsOption +import org.onap.cps.spi.PaginationOption +import org.onap.cps.spi.exceptions.CpsPathException + +import static org.onap.cps.spi.FetchDescendantsOption.DIRECT_CHILDREN_ONLY +import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS +import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS +import static org.onap.cps.spi.PaginationOption.NO_PAGINATION + +class QueryServiceIntegrationSpec extends FunctionalSpecBase { + + CpsQueryService objectUnderTest + + def setup() { objectUnderTest = cpsQueryService } + + def 'Query bookstore using CPS path where #scenario.'() { + when: 'query data nodes for bookstore container' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, INCLUDE_ALL_DESCENDANTS) + then: 'the result contains expected number of nodes' + assert result.size() == expectedResultSize + and: 'the result contains the expected leaf values' + result.leaves.forEach( dataNodeLeaves -> { + expectedLeaves.forEach( (expectedLeafKey,expectedLeafValue) -> { + assert dataNodeLeaves[expectedLeafKey] == expectedLeafValue + }) + }) + where: + scenario | cpsPath || expectedResultSize | expectedLeaves + 'the AND condition is used' | '//books[@lang="English" and @price=15]' || 2 | [lang:"English", price:15] + 'the AND is used where result does not exist' | '//books[@lang="English" and @price=1000]' || 0 | [] + } + + def 'Cps Path query using comparative and boolean operators.'() { + given: 'a cps path query in the discount category' + def cpsPath = "/bookstore/categories[@code='5']/books" + leafCondition + when: 'a query is executed to get response by the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, + cpsPath, OMIT_DESCENDANTS) + then: 'the cps-path of queryDataNodes has the expectedLeaves' + def bookPrices = result.collect { it.getLeaves().get('price') } + assert bookPrices.sort() == expectedBookPrices.sort() + where: 'the following data is used' + leafCondition || expectedBookPrices + '[@price = 5]' || [5] + '[@price < 5]' || [1, 2, 3, 4] + '[@price > 5]' || [6, 7, 8, 9, 10] + '[@price <= 5]' || [1, 2, 3, 4, 5] + '[@price >= 5]' || [5, 6, 7, 8, 9, 10] + '[@price > 10]' || [] + '[@price = 3 or @price = 7]' || [3, 7] + '[@price = 3 and @price = 7]' || [] + '[@price > 3 and @price <= 6]' || [4, 5, 6] + '[@price < 3 or @price > 8]' || [1, 2, 9, 10] + '[@price = 1 or @price = 3 or @price = 5]' || [1, 3, 5] + '[@price = 1 or @price >= 8 and @price < 10]' || [1, 8, 9] + '[@price >= 3 and @price <= 5 or @price > 9]' || [3, 4, 5, 10] + } + + def 'Cps Path query for leaf value(s) with #scenario.'() { + when: 'a query is executed to get a data node by the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, fetchDescendantsOption) + then: 'the correct number of parent nodes are returned' + assert result.size() == expectedNumberOfParentNodes + and: 'the correct total number of data nodes are returned' + assert countDataNodesInTree(result) == expectedTotalNumberOfNodes + where: 'the following data is used' + scenario | cpsPath | fetchDescendantsOption || expectedNumberOfParentNodes | expectedTotalNumberOfNodes + 'string and no descendants' | '/bookstore/categories[@code="1"]/books[@title="Matilda"]' | OMIT_DESCENDANTS || 1 | 1 + 'integer and descendants' | '/bookstore/categories[@code="1"]/books[@price=15]' | INCLUDE_ALL_DESCENDANTS || 1 | 1 + 'no condition and no descendants' | '/bookstore/categories' | OMIT_DESCENDANTS || 5 | 5 + 'no condition and level 1 descendants' | '/bookstore' | new FetchDescendantsOption(1) || 1 | 7 + 'no condition and level 2 descendants' | '/bookstore' | new FetchDescendantsOption(2) || 1 | 28 + } + + def 'Query for attribute by cps path with cps paths that return no data because of #scenario.'() { + when: 'a query is executed to get data nodes for the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) + then: 'no data is returned' + assert result.isEmpty() + where: 'following cps queries are performed' + scenario | cpsPath + 'cps path is incomplete' | '/bookstore[@title="Matilda"]' + 'leaf value does not exist' | '/bookstore/categories[@code="1"]/books[@title=\'does not exist\']' + 'incomplete end of xpath prefix' | '/bookstore/categories/books[@price=15]' + } + + def 'Cps Path query using descendant anywhere and #type (further) descendants.'() { + when: 'a query is executed to get a data node by the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="1"]', fetchDescendantsOption) + then: 'the data node has the correct number of children' + assert result[0].childDataNodes.xpath.sort() == expectedChildNodes.sort() + where: 'the following data is used' + type | fetchDescendantsOption || expectedChildNodes + 'omit' | OMIT_DESCENDANTS || [] + 'include' | INCLUDE_ALL_DESCENDANTS || ["/bookstore/categories[@code='1']/books[@title='Matilda']", + "/bookstore/categories[@code='1']/books[@title='The Gruffalo']"] + } + + def 'Cps Path query for all books.'() { + when: 'a query is executed to get all books' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '//books', OMIT_DESCENDANTS) + then: 'the expected number of books are returned' + assert result.size() == 19 + } + + def 'Cps Path query using descendant anywhere with #scenario.'() { + when: 'a query is executed to get a data node by the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) + then: 'xpaths of the retrieved data nodes are as expected' + def bookTitles = result.collect { it.getLeaves().get('title') } + assert bookTitles.sort() == expectedBookTitles.sort() + where: 'the following data is used' + scenario | cpsPath || expectedBookTitles + 'string leaf condition' | '//books[@title="Matilda"]' || ["Matilda"] + 'text condition on leaf' | '//books/title[text()="Matilda"]' || ["Matilda"] + 'text condition case mismatch' | '//books/title[text()="matilda"]' || [] + 'text condition on int leaf' | '//books/price[text()="20"]' || ["A Book with No Language", "Matilda"] + 'text condition on leaf-list' | '//books/authors[text()="Terry Pratchett"]' || ["Good Omens", "The Colour of Magic", "The Light Fantastic"] + 'text condition partial match' | '//books/authors[text()="Terry"]' || [] + 'text condition (existing) empty string' | '//books/lang[text()=""]' || ["A Book with No Language"] + 'text condition on int leaf-list' | '//books/editions[text()="2000"]' || ["Matilda"] + 'match of leaf containing /' | '//books[@lang="N/A"]' || ["Logarithm tables"] + 'text condition on leaf containing /' | '//books/lang[text()="N/A"]' || ["Logarithm tables"] + 'match of key containing /' | '//books[@title="Debian GNU/Linux"]' || ["Debian GNU/Linux"] + 'text condition on key containing /' | '//books/title[text()="Debian GNU/Linux"]' || ["Debian GNU/Linux"] + } + + def 'Query for attribute by cps path using contains condition #scenario.'() { + when: 'a query is executed to get response by the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, INCLUDE_ALL_DESCENDANTS) + then: 'xpaths of the retrieved data nodes are as expected' + def bookTitles = result.collect { it.getLeaves().get('title') } + assert bookTitles.sort() == expectedBookTitles.sort() + where: 'the following data is used' + scenario | cpsPath || expectedBookTitles + 'contains condition with leaf' | '//books[contains(@title,"Mat")]' || ["Matilda"] + 'contains condition with case-sensitive' | '//books[contains(@title,"Ti")]' || [] + 'contains condition with Integer Value' | '//books[contains(@price,"15")]' || ["Annihilation", "The Gruffalo"] + } + + def 'Query for attribute by cps path using contains condition with no value.'() { + when: 'a query is executed to get response by the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '//books[contains(@title,"")]', OMIT_DESCENDANTS) + then: 'all books are returned' + assert result.size() == 19 + } + + def 'Cps Path query using descendant anywhere with #scenario condition for a container element.'() { + when: 'a query is executed to get a data node by the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) + then: 'book titles from the retrieved data nodes are as expected' + def bookTitles = result.collect { it.getLeaves().get('title') } + assert bookTitles.sort() == expectedBookTitles.sort() + where: 'the following data is used' + scenario | cpsPath || expectedBookTitles + 'one leaf' | '//books[@price=14]' || ['The Light Fantastic'] + 'one leaf with ">" condition' | '//books[@price>14]' || ['A Book with No Language', 'Annihilation', 'Debian GNU/Linux', 'Matilda', 'The Gruffalo'] + 'one text' | '//books/authors[text()="Terry Pratchett"]' || ['Good Omens', 'The Colour of Magic', 'The Light Fantastic'] + 'more than one leaf' | '//books[@price=12 and @lang="English"]' || ['The Colour of Magic'] + 'more than one leaf has "OR" condition' | '//books[@lang="English" or @price=15]' || ['Annihilation', 'Good Omens', 'Matilda', 'The Colour of Magic', 'The Gruffalo', 'The Light Fantastic'] + 'more than one leaf has "OR" condition with non-json data' | '//books[@title="xyz" or @price=13]' || ['Good Omens'] + 'more than one leaf has multiple AND' | '//books[@lang="English" and @price=13 and @edition=1983]' || [] + 'more than one leaf has multiple OR' | '//books[ @title="Matilda" or @price=15 or @edition=2006]' || ['Annihilation', 'Matilda', 'The Gruffalo'] + 'leaves reversed in order' | '//books[@lang="English" and @price=12]' || ['The Colour of Magic'] + 'more than one leaf has combination of AND/OR' | '//books[@edition=1983 and @price=13 or @title="Good Omens"]' || ['Good Omens'] + 'more than one leaf has OR/AND' | '//books[@title="The Light Fantastic" or @price=11 and @edition=1983]' || ['The Light Fantastic'] + 'leaf and text' | '//books[@price=14]/authors[text()="Terry Pratchett"]' || ['The Light Fantastic'] + 'leaf and contains' | '//books[contains(@price,"13")]' || ['Good Omens'] + } + + def 'Cps Path query using descendant anywhere with #scenario condition(s) for a list element.'() { + when: 'a query is executed to get a data node by the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, INCLUDE_ALL_DESCENDANTS) + then: 'xpaths of the retrieved data nodes are as expected' + result.xpath.toList() == ["/bookstore/premises/addresses[@house-number='2' and @street='Main Street']"] + where: 'the following data is used' + scenario | cpsPath + 'full composite key' | '//addresses[@house-number=2 and @street="Main Street"]' + 'one partial key leaf' | '//addresses[@house-number=2]' + 'one non key leaf' | '//addresses[@county="Kildare"]' + 'mix of partial key and non key leaf' | '//addresses[@street="Main Street" and @county="Kildare"]' + } + + def 'Query for attribute by cps path of type ancestor with #scenario.'() { + when: 'the given cps path is parsed' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) + then: 'the xpaths of the retrieved data nodes are as expected' + assert result.xpath.sort() == expectedXPaths.sort() + where: 'the following data is used' + scenario | cpsPath || expectedXPaths + 'multiple list-ancestors' | '//books/ancestor::categories' || ["/bookstore/categories[@code='1']", "/bookstore/categories[@code='2']", "/bookstore/categories[@code='3']", "/bookstore/categories[@code='4']", "/bookstore/categories[@code='5']"] + 'one ancestor with list value' | '//books/ancestor::categories[@code="1"]' || ["/bookstore/categories[@code='1']"] + 'top ancestor' | '//books/ancestor::bookstore' || ["/bookstore"] + 'list with index value in the xpath prefix' | '//categories[@code="1"]/books/ancestor::bookstore' || ["/bookstore"] + 'ancestor with parent list' | '//books/ancestor::bookstore/categories' || ["/bookstore/categories[@code='1']", "/bookstore/categories[@code='2']", "/bookstore/categories[@code='3']", "/bookstore/categories[@code='4']", "/bookstore/categories[@code='5']"] + 'ancestor with parent' | '//books/ancestor::bookstore/categories[@code="2"]' || ["/bookstore/categories[@code='2']"] + 'ancestor combined with text condition' | '//books/title[text()="Matilda"]/ancestor::bookstore' || ["/bookstore"] + 'ancestor with parent that does not exist' | '//books/ancestor::parentDoesNoExist/categories' || [] + 'ancestor does not exist' | '//books/ancestor::ancestorDoesNotExist' || [] + 'ancestor combined with contains condition' | '//books[contains(@title,"Mat")]/ancestor::bookstore' || ["/bookstore"] + } + + def 'Query for attribute by cps path of type ancestor with #scenario descendants.'() { + when: 'the given cps path is parsed' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '//books/ancestor::bookstore', fetchDescendantsOption) + then: 'the xpaths of the retrieved data nodes are as expected' + assert countDataNodesInTree(result) == expectedNumberOfNodes + where: 'the following data is used' + scenario | fetchDescendantsOption || expectedNumberOfNodes + 'no' | OMIT_DESCENDANTS || 1 + 'direct' | DIRECT_CHILDREN_ONLY || 7 + 'all' | INCLUDE_ALL_DESCENDANTS || 28 + } + + def 'Cps Path query with #scenario throws a CPS Path Exception.'() { + when: 'trying to execute a query with a syntax (parsing) error' + objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) + then: 'a cps path exception is thrown' + thrown(CpsPathException) + where: 'the following data is used' + scenario | cpsPath + 'cpsPath that cannot be parsed' | 'cpsPath that cannot be parsed' + 'String with comparative operator' | '//books[@lang>"German" and @price>10]' + } + + def 'Cps Path query across anchors with #scenario.'() { + when: 'a query is executed to get a data nodes across anchors by the given CpsPath' + def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, cpsPath, OMIT_DESCENDANTS, NO_PAGINATION) + then: 'the correct dataspace is queried' + assert result.dataspace.toSet() == [FUNCTIONAL_TEST_DATASPACE_1].toSet() + and: 'correct anchors are queried' + assert result.anchorName.toSet() == [BOOKSTORE_ANCHOR_1, BOOKSTORE_ANCHOR_2].toSet() + and: 'the correct number of nodes is returned' + assert result.size() == expectedXpathsPerAnchor.size() * NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA + and: 'the queried nodes have expected xpaths' + assert result.xpath.toSet() == expectedXpathsPerAnchor.toSet() + where: 'the following data is used' + scenario | cpsPath || expectedXpathsPerAnchor + 'container node' | '/bookstore' || ["/bookstore"] + 'list node' | '/bookstore/categories' || ["/bookstore/categories[@code='1']", "/bookstore/categories[@code='2']", "/bookstore/categories[@code='3']", "/bookstore/categories[@code='4']", "/bookstore/categories[@code='5']"] + 'integer leaf-condition' | '/bookstore/categories[@code="1"]/books[@price=15]' || ["/bookstore/categories[@code='1']/books[@title='The Gruffalo']"] + 'multiple list-ancestors' | '//books/ancestor::categories' || ["/bookstore/categories[@code='1']", "/bookstore/categories[@code='2']", "/bookstore/categories[@code='3']", "/bookstore/categories[@code='4']", "/bookstore/categories[@code='5']"] + 'one ancestor with list value' | '//books/ancestor::categories[@code="1"]' || ["/bookstore/categories[@code='1']"] + 'list with index value in the xpath prefix' | '//categories[@code="1"]/books/ancestor::bookstore' || ["/bookstore"] + 'ancestor with parent list' | '//books/ancestor::bookstore/categories' || ["/bookstore/categories[@code='1']", "/bookstore/categories[@code='2']", "/bookstore/categories[@code='3']", "/bookstore/categories[@code='4']", "/bookstore/categories[@code='5']"] + 'ancestor with parent list element' | '//books/ancestor::bookstore/categories[@code="2"]' || ["/bookstore/categories[@code='2']"] + 'ancestor combined with text condition' | '//books/title[text()="Matilda"]/ancestor::bookstore' || ["/bookstore"] + 'ancestor same as target type' | '//books/title[text()="Matilda"]/ancestor::books' || ["/bookstore/categories[@code='1']/books[@title='Matilda']"] + } + + def 'Cps Path query across anchors with #scenario descendants.'() { + when: 'a query is executed to get a data node by the given cps path' + def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, '/bookstore', fetchDescendantsOption, NO_PAGINATION) + then: 'the correct dataspace was queried' + assert result.dataspace.toSet() == [FUNCTIONAL_TEST_DATASPACE_1].toSet() + and: 'correct number of datanodes are returned' + assert countDataNodesInTree(result) == expectedNumberOfNodesPerAnchor * NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA + where: 'the following data is used' + scenario | fetchDescendantsOption || expectedNumberOfNodesPerAnchor + 'no' | OMIT_DESCENDANTS || 1 + 'direct' | DIRECT_CHILDREN_ONLY || 7 + 'all' | INCLUDE_ALL_DESCENDANTS || 28 + } + + def 'Cps Path query across anchors with ancestors and #scenario descendants.'() { + when: 'a query is executed to get a data node by the given cps path' + def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, '//books/ancestor::bookstore', fetchDescendantsOption, NO_PAGINATION) + then: 'the correct dataspace was queried' + assert result.dataspace.toSet() == [FUNCTIONAL_TEST_DATASPACE_1].toSet() + and: 'correct number of datanodes are returned' + assert countDataNodesInTree(result) == expectedNumberOfNodesPerAnchor * NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA + where: 'the following data is used' + scenario | fetchDescendantsOption || expectedNumberOfNodesPerAnchor + 'no' | OMIT_DESCENDANTS || 1 + 'direct' | DIRECT_CHILDREN_ONLY || 7 + 'all' | INCLUDE_ALL_DESCENDANTS || 28 + } + + def 'Cps Path query across anchors with syntax error throws a CPS Path Exception.'() { + when: 'trying to execute a query with a syntax (parsing) error' + objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, 'cpsPath that cannot be parsed' , OMIT_DESCENDANTS, NO_PAGINATION) + then: 'a cps path exception is thrown' + thrown(CpsPathException) + } + + def 'Cps Path querys with all descendants including descendants that are list entries: #scenario.'() { + when: 'a query is executed to get a data node by the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, INCLUDE_ALL_DESCENDANTS) + then: 'correct number of datanodes are returned' + assert countDataNodesInTree(result) == expectedNumberOfDataNodes + where: + scenario | cpsPath || expectedNumberOfDataNodes + 'absolute path all list entries' | '/bookstore/categories' || 24 + 'absolute path 1 list entry by key' | '/bookstore/categories[@code="3"]' || 5 + 'absolute path 1 list entry by name' | '/bookstore/categories[@name="Comedy"]' || 5 + 'relative path all list entries' | '//categories' || 24 + 'relative path 1 list entry by key' | '//categories[@code="3"]' || 5 + 'relative path 1 list entry by leaf' | '//categories[@name="Comedy"]' || 5 + 'incomplete absolute path' | '/categories' || 0 + 'incomplete absolute 1 list entry' | '/categories[@code="3"]' || 0 + } + + def 'Cps Path query contains #wildcard.'() { + when: 'a query is executed with a wildcard in the given cps path' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, INCLUDE_ALL_DESCENDANTS) + then: 'no results are returned, as Cps Path query does not interpret wildcard characters' + assert result.isEmpty() + where: + wildcard | cpsPath + ' sql wildcard in parent path list index' | '/bookstore/categories[@code="%"]/books' + 'regex wildcard in parent path list index' | '/bookstore/categories[@code=".*"]/books' + ' sql wildcard in leaf-condition' | '/bookstore/categories[@code="1"]/books[@title="%"]' + 'regex wildcard in leaf-condition' | '/bookstore/categories[@code="1"]/books[@title=".*"]' + ' sql wildcard in text-condition' | '/bookstore/categories[@code="1"]/books/title[text()="%"]' + 'regex wildcard in text-condition' | '/bookstore/categories[@code="1"]/books/title[text()=".*"]' + ' sql wildcard in contains-condition' | '/bookstore/categories[@code="1"]/books[contains(@title, "%")]' + 'regex wildcard in contains-condition' | '/bookstore/categories[@code="1"]/books[contains(@title, ".*")]' + } + + def 'Cps Path query can return a data node containing [@ in xpath #scenario.'() { + given: 'a book with special characters [@ and ] in title' + cpsDataService.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/categories[@code='1']", '{"books": [ {"title":"[@hello=world]"} ] }', OffsetDateTime.now()) + when: 'a query is executed' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) + then: 'the node is returned' + assert result.size() == 1 + cleanup: 'the new datanode' + cpsDataService.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/categories[@code='1']/books[@title='[@hello=world]']", OffsetDateTime.now()) + where: + scenario || cpsPath + 'leaf-condition' || "/bookstore/categories[@code='1']/books[@title='[@hello=world]']" + 'text-condition' || "/bookstore/categories[@code='1']/books/title[text()='[@hello=world]']" + 'contains-condition' || "/bookstore/categories[@code='1']/books[contains(@title, '[@hello=world]')]" + } + + def 'Cps Path get and query can handle apostrophe inside #quotes.'() { + given: 'a book with special characters in title' + cpsDataService.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/categories[@code='1']", + '{"books": [ {"title":"I\'m escaping"} ] }', OffsetDateTime.now()) + when: 'a query is executed' + def result = objectUnderTest.queryDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, OMIT_DESCENDANTS) + then: 'the node is returned' + assert result.size() == 1 + assert result[0].xpath == "/bookstore/categories[@code='1']/books[@title='I''m escaping']" + cleanup: 'the new datanode' + cpsDataService.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/categories[@code='1']/books[@title='I''m escaping']", OffsetDateTime.now()) + where: + quotes || cpsPath + 'single quotes' || "/bookstore/categories[@code='1']/books[@title='I''m escaping']" + 'double quotes' || '/bookstore/categories[@code="1"]/books[@title="I\'m escaping"]' + 'text-condition' || "/bookstore/categories[@code='1']/books/title[text()='I''m escaping']" + 'contains-condition' || "/bookstore/categories[@code='1']/books[contains(@title, 'I''m escaping')]" + } + + def 'Cps Path query across anchors using pagination option with #scenario.'() { + when: 'a query is executed to get a data nodes across anchors by the given CpsPath and pagination option' + def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, '/bookstore', OMIT_DESCENDANTS, new PaginationOption(pageIndex, pageSize)) + then: 'correct bookstore names are queried' + def bookstoreNames = result.collect { it.getLeaves().get('bookstore-name') } + assert bookstoreNames.toList() == expectedBookstoreNames + and: 'the correct number of page size is returned' + assert result.size() == expectedPageSize + and: 'the queried nodes have expected anchor names' + assert result.anchorName.toSet() == expectedAnchors.toSet() + where: 'the following data is used' + scenario | pageIndex | pageSize || expectedPageSize || expectedAnchors || expectedBookstoreNames + '1st page with one anchor' | 1 | 1 || 1 || [BOOKSTORE_ANCHOR_1] || ['Easons-1'] + '1st page with two anchor' | 1 | 2 || 2 || [BOOKSTORE_ANCHOR_1, BOOKSTORE_ANCHOR_2] || ['Easons-1', 'Easons-2'] + '2nd page' | 2 | 1 || 1 || [BOOKSTORE_ANCHOR_2] || ['Easons-2'] + 'no 2nd page due to page size' | 2 | 2 || 0 || [] || [] + } + + def 'Cps Path query across anchors using pagination option for ancestor axis.'() { + when: 'a query is executed to get a data nodes across anchors by the given CpsPath and pagination option' + def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, '//books/ancestor::categories', INCLUDE_ALL_DESCENDANTS, new PaginationOption(1, 2)) + then: 'correct category codes are queried' + def categoryNames = result.collect { it.getLeaves().get('name') } + assert categoryNames.toSet() == ['Discount books', 'Computing', 'Comedy', 'Thriller', 'Children'].toSet() + and: 'the queried nodes have expected anchors' + assert result.anchorName.toSet() == [BOOKSTORE_ANCHOR_1, BOOKSTORE_ANCHOR_2].toSet() + } + + def 'Count number of anchors for given dataspace name and cps path'() { + expect: '/bookstore is present in two anchors' + assert objectUnderTest.countAnchorsForDataspaceAndCpsPath(FUNCTIONAL_TEST_DATASPACE_1, '/bookstore') == 2 + } + + def 'Cps Path query across anchors using no pagination'() { + when: 'a query is executed to get a data nodes across anchors by the given CpsPath and pagination option' + def result = objectUnderTest.queryDataNodesAcrossAnchors(FUNCTIONAL_TEST_DATASPACE_1, '/bookstore', OMIT_DESCENDANTS, NO_PAGINATION) + then: 'all bookstore names are queried' + def bookstoreNames = result.collect { it.getLeaves().get('bookstore-name') } + assert bookstoreNames.toSet() == ['Easons-1', 'Easons-2'].toSet() + and: 'the correct number of page size is returned' + assert result.size() == 2 + and: 'the queried nodes have expected bookstore names' + assert result.anchorName.toSet() == [BOOKSTORE_ANCHOR_1, BOOKSTORE_ANCHOR_2].toSet() + } +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/SessionManagerIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/SessionManagerIntegrationSpec.groovy new file mode 100644 index 0000000000..428d5f9014 --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/cps/SessionManagerIntegrationSpec.groovy @@ -0,0 +1,73 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.cps + +import org.onap.cps.integration.base.FunctionalSpecBase +import org.onap.cps.spi.exceptions.SessionManagerException +import org.onap.cps.spi.utils.SessionManager + +class SessionManagerIntegrationSpec extends FunctionalSpecBase { + + SessionManager objectUnderTest + + def shortTimeoutForTesting = 300L + def sessionId + + def setup() { + objectUnderTest = sessionManager + sessionId = objectUnderTest.startSession() + } + + def cleanup(){ + objectUnderTest.closeSession(sessionId, objectUnderTest.WITH_COMMIT) + } + + def 'Lock anchor.'(){ + when: 'session tries to acquire anchor lock by passing anchor entity details' + objectUnderTest.lockAnchor(sessionId, FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, shortTimeoutForTesting) + then: 'no exception is thrown' + noExceptionThrown() + } + + def 'Attempt to lock anchor when another session is holding the lock.'(){ + given: 'another session that holds an anchor lock' + def otherSessionId = objectUnderTest.startSession() + objectUnderTest.lockAnchor(otherSessionId,FUNCTIONAL_TEST_DATASPACE_1,BOOKSTORE_ANCHOR_1,shortTimeoutForTesting) + when: 'a session tries to acquire the same anchor lock' + objectUnderTest.lockAnchor(sessionId,FUNCTIONAL_TEST_DATASPACE_1,BOOKSTORE_ANCHOR_1,shortTimeoutForTesting) + then: 'a session manager exception is thrown specifying operation reached timeout' + def thrown = thrown(SessionManagerException) + thrown.message.contains('Timeout') + then: 'when the other session holding the lock is closed, lock can finally be acquired' + objectUnderTest.closeSession(otherSessionId, objectUnderTest.WITH_COMMIT) + objectUnderTest.lockAnchor(sessionId,FUNCTIONAL_TEST_DATASPACE_1,BOOKSTORE_ANCHOR_1,shortTimeoutForTesting) + } + + def 'Lock anchor twice using the same session.'(){ + given: 'session that already holds an anchor lock' + objectUnderTest.lockAnchor(sessionId, FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, shortTimeoutForTesting) + when: 'same session tries to acquire same anchor lock' + objectUnderTest.lockAnchor(sessionId, FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, shortTimeoutForTesting) + then: 'no exception is thrown' + noExceptionThrown() + } + +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/BearerTokenPassthroughSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/BearerTokenPassthroughSpec.groovy new file mode 100644 index 0000000000..c91e750d6e --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/BearerTokenPassthroughSpec.groovy @@ -0,0 +1,101 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.ncmp + +import org.onap.cps.integration.base.CpsIntegrationSpecBase +import org.springframework.http.HttpHeaders +import org.springframework.http.MediaType +import spock.util.concurrent.PollingConditions + +import static org.springframework.http.HttpMethod.DELETE +import static org.springframework.http.HttpMethod.GET +import static org.springframework.http.HttpMethod.PATCH +import static org.springframework.http.HttpMethod.POST +import static org.springframework.http.HttpMethod.PUT +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.request +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status + +class BearerTokenPassthroughSpec extends CpsIntegrationSpecBase { + + def setup() { + dmiDispatcher.moduleNamesPerCmHandleId['ch-1'] = ['M1', 'M2'] + registerCmHandle(DMI_URL, 'ch-1', NO_MODULE_SET_TAG) + } + + def cleanup() { + deregisterCmHandle(DMI_URL, 'ch-1') + } + + def 'Bearer token is passed from NCMP to DMI in pass-through data operations.'() { + when: 'a pass-through data request is sent to NCMP with a bearer token' + mvc.perform(request(httpMethod, '/ncmp/v1/ch/ch-1/data/ds/ncmp-datastore:passthrough-running') + .queryParam('resourceIdentifier', 'my-resource-id') + .contentType(MediaType.APPLICATION_JSON) + .content('{ "some-json": "data" }') + .header(HttpHeaders.AUTHORIZATION, 'Bearer some-bearer-token')) + .andExpect(status().is2xxSuccessful()) + + then: 'DMI has received request with bearer token' + assert dmiDispatcher.lastAuthHeaderReceived == 'Bearer some-bearer-token' + + where: 'all HTTP operations are applied' + httpMethod << [GET, POST, PUT, PATCH, DELETE] + } + + def 'Basic auth header is NOT passed from NCMP to DMI in pass-through data operations.'() { + when: 'a pass-through data request is sent to NCMP with basic authentication' + mvc.perform(request(httpMethod, '/ncmp/v1/ch/ch-1/data/ds/ncmp-datastore:passthrough-running') + .queryParam('resourceIdentifier', 'my-resource-id') + .contentType(MediaType.APPLICATION_JSON) + .content('{ "some-json": "data" }') + .header(HttpHeaders.AUTHORIZATION, 'Basic Y3BzdXNlcjpjcHNyMGNrcyE=')) + .andExpect(status().is2xxSuccessful()) + + then: 'DMI has received request with no authorization header' + assert dmiDispatcher.lastAuthHeaderReceived == null + + where: 'all HTTP operations are applied' + httpMethod << [GET, POST, PUT, PATCH, DELETE] + } + + def 'Bearer token is passed from NCMP to DMI in async batch pass-through data operation.'() { + when: 'a pass-through async data request is sent to NCMP with a bearer token' + def requestBody = """{"operations": [{ + "operation": "read", + "operationId": "operational-1", + "datastore": "ncmp-datastore:passthrough-running", + "resourceIdentifier": "my-resource-id", + "targetIds": ["ch-1"] + }]}""" + mvc.perform(request(POST, '/ncmp/v1/data') + .queryParam('topic', 'my-topic') + .contentType(MediaType.APPLICATION_JSON) + .content(requestBody) + .header(HttpHeaders.AUTHORIZATION, 'Bearer some-bearer-token')) + .andExpect(status().is2xxSuccessful()) + + then: 'DMI will receive the async request with bearer token' + new PollingConditions().within(3, () -> { + assert dmiDispatcher.lastAuthHeaderReceived == 'Bearer some-bearer-token' + }) + } + +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleCreateSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleCreateSpec.groovy new file mode 100644 index 0000000000..914f562a2f --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleCreateSpec.groovy @@ -0,0 +1,178 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.ncmp + +import org.apache.kafka.common.TopicPartition +import org.apache.kafka.common.serialization.StringDeserializer +import org.onap.cps.integration.KafkaTestContainer +import org.onap.cps.integration.base.CpsIntegrationSpecBase +import org.onap.cps.ncmp.api.inventory.NetworkCmProxyInventoryFacade +import org.onap.cps.ncmp.api.inventory.models.CmHandleRegistrationResponse +import org.onap.cps.ncmp.api.inventory.models.DmiPluginRegistration +import org.onap.cps.ncmp.api.inventory.models.NcmpServiceCmHandle +import org.onap.cps.ncmp.events.lcm.v1.LcmEvent +import org.onap.cps.ncmp.impl.inventory.models.CmHandleState +import org.onap.cps.ncmp.impl.inventory.models.LockReasonCategory +import spock.util.concurrent.PollingConditions + +import java.time.Duration +import java.time.OffsetDateTime + +class CmHandleCreateSpec extends CpsIntegrationSpecBase { + + NetworkCmProxyInventoryFacade objectUnderTest + + def kafkaConsumer = KafkaTestContainer.getConsumer('ncmp-group', StringDeserializer.class) + + def setup() { + objectUnderTest = networkCmProxyInventoryFacade + } + + def 'CM Handle registration is successful.'() { + given: 'DMI will return modules when requested' + dmiDispatcher.moduleNamesPerCmHandleId['ch-1'] = ['M1', 'M2'] + + and: 'consumer subscribed to topic' + kafkaConsumer.subscribe(['ncmp-events']) + + when: 'a CM-handle is registered for creation' + def cmHandleToCreate = new NcmpServiceCmHandle(cmHandleId: 'ch-1') + def dmiPluginRegistration = new DmiPluginRegistration(dmiPlugin: DMI_URL, createdCmHandles: [cmHandleToCreate]) + def dmiPluginRegistrationResponse = objectUnderTest.updateDmiRegistrationAndSyncModule(dmiPluginRegistration) + + then: 'registration gives successful response' + assert dmiPluginRegistrationResponse.createdCmHandles == [CmHandleRegistrationResponse.createSuccessResponse('ch-1')] + + and: 'CM-handle is initially in ADVISED state' + assert CmHandleState.ADVISED == objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState + + and: 'CM-handle goes to READY state after module sync' + new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { + assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState + }) + + and: 'the messages is polled' + def message = kafkaConsumer.poll(Duration.ofMillis(10000)) + def records = message.records(new TopicPartition('ncmp-events', 0)) + + and: 'the newest lcm event notification is received with READY state' + def notificationMessage = jsonObjectMapper.convertJsonString(records.last().value().toString(), LcmEvent) + assert notificationMessage.event.newValues.cmHandleState.value() == 'READY' + + and: 'the CM-handle has expected modules' + assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences('ch-1').moduleName.sort() + + cleanup: 'deregister CM handle' + deregisterCmHandle(DMI_URL, 'ch-1') + } + + def 'CM Handle goes to LOCKED state when DMI gives error during module sync.'() { + given: 'DMI is not available to handle requests' + dmiDispatcher.isAvailable = false + + when: 'a CM-handle is registered for creation' + def cmHandleToCreate = new NcmpServiceCmHandle(cmHandleId: 'ch-1') + def dmiPluginRegistration = new DmiPluginRegistration(dmiPlugin: DMI_URL, createdCmHandles: [cmHandleToCreate]) + objectUnderTest.updateDmiRegistrationAndSyncModule(dmiPluginRegistration) + + then: 'CM-handle goes to LOCKED state with reason MODULE_SYNC_FAILED' + new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { + def cmHandleCompositeState = objectUnderTest.getCmHandleCompositeState('ch-1') + assert cmHandleCompositeState.cmHandleState == CmHandleState.LOCKED + assert cmHandleCompositeState.lockReason.lockReasonCategory == LockReasonCategory.MODULE_SYNC_FAILED + }) + + and: 'CM-handle has no modules' + assert objectUnderTest.getYangResourcesModuleReferences('ch-1').empty + + cleanup: 'deregister CM handle' + deregisterCmHandle(DMI_URL, 'ch-1') + } + + def 'Create a CM-handle with existing moduleSetTag.'() { + given: 'DMI will return modules when requested' + dmiDispatcher.moduleNamesPerCmHandleId = ['ch-1': ['M1', 'M2'], 'ch-2': ['M1', 'M3']] + and: 'existing CM-handles cm-1 with moduleSetTag "A", and cm-2 with moduleSetTag "B"' + registerCmHandle(DMI_URL, 'ch-1', 'A') + registerCmHandle(DMI_URL, 'ch-2', 'B') + + when: 'a CM-handle is registered for creation with moduleSetTag "B"' + def cmHandleToCreate = new NcmpServiceCmHandle(cmHandleId: 'ch-3', moduleSetTag: 'B') + objectUnderTest.updateDmiRegistrationAndSyncModule(new DmiPluginRegistration(dmiPlugin: DMI_URL, createdCmHandles: [cmHandleToCreate])) + + then: 'the CM-handle goes to READY state' + new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { + assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState('ch-3').cmHandleState + }) + + and: 'the CM-handle has expected moduleSetTag' + assert objectUnderTest.getNcmpServiceCmHandle('ch-3').moduleSetTag == 'B' + + and: 'the CM-handle has expected modules from module set "B": M1 and M3' + assert ['M1', 'M3'] == objectUnderTest.getYangResourcesModuleReferences('ch-3').moduleName.sort() + + cleanup: 'deregister CM handles' + deregisterCmHandles(DMI_URL, ['ch-1', 'ch-2', 'ch-3']) + } + + def 'CM Handle retry after failed module sync.'() { + given: 'DMI is not initially available to handle requests' + dmiDispatcher.isAvailable = false + + when: 'CM-handles are registered for creation' + def cmHandlesToCreate = [new NcmpServiceCmHandle(cmHandleId: 'ch-1'), new NcmpServiceCmHandle(cmHandleId: 'ch-2')] + def dmiPluginRegistration = new DmiPluginRegistration(dmiPlugin: DMI_URL, createdCmHandles: cmHandlesToCreate) + objectUnderTest.updateDmiRegistrationAndSyncModule(dmiPluginRegistration) + then: 'CM-handles go to LOCKED state' + new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { + assert objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState == CmHandleState.LOCKED + assert objectUnderTest.getCmHandleCompositeState('ch-2').cmHandleState == CmHandleState.LOCKED + }) + + when: 'we wait for LOCKED CM handle retry time (actually just subtract 3 minutes from handles lastUpdateTime)' + overrideCmHandleLastUpdateTime('ch-1', OffsetDateTime.now().minusMinutes(3)) + overrideCmHandleLastUpdateTime('ch-2', OffsetDateTime.now().minusMinutes(3)) + then: 'CM-handles go to ADVISED state' + new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { + assert objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState == CmHandleState.ADVISED + assert objectUnderTest.getCmHandleCompositeState('ch-2').cmHandleState == CmHandleState.ADVISED + }) + + when: 'DMI will return expected modules' + dmiDispatcher.moduleNamesPerCmHandleId = ['ch-1': ['M1', 'M2'], 'ch-2': ['M1', 'M3']] + and: 'DMI is available for retry' + dmiDispatcher.isAvailable = true + then: 'CM-handles go to READY state' + new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { + assert objectUnderTest.getCmHandleCompositeState('ch-1').cmHandleState == CmHandleState.READY + assert objectUnderTest.getCmHandleCompositeState('ch-2').cmHandleState == CmHandleState.READY + }) + and: 'CM-handles have expected modules' + assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences('ch-1').moduleName.sort() + assert ['M1', 'M3'] == objectUnderTest.getYangResourcesModuleReferences('ch-2').moduleName.sort() + and: 'CM-handles have expected module set tags (blank)' + assert objectUnderTest.getNcmpServiceCmHandle('ch-1').moduleSetTag == '' + assert objectUnderTest.getNcmpServiceCmHandle('ch-2').moduleSetTag == '' + + cleanup: 'deregister CM handle' + deregisterCmHandles(DMI_URL, ['ch-1', 'ch-2']) + } +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpgradeSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpgradeSpec.groovy new file mode 100644 index 0000000000..35ea0793ce --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmHandleUpgradeSpec.groovy @@ -0,0 +1,177 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.ncmp + +import org.onap.cps.integration.base.CpsIntegrationSpecBase +import org.onap.cps.ncmp.api.inventory.NetworkCmProxyInventoryFacade +import org.onap.cps.ncmp.api.inventory.models.CmHandleRegistrationResponse +import org.onap.cps.ncmp.api.inventory.models.DmiPluginRegistration +import org.onap.cps.ncmp.api.inventory.models.UpgradedCmHandles +import org.onap.cps.ncmp.impl.inventory.models.CmHandleState +import org.onap.cps.ncmp.impl.inventory.models.LockReasonCategory +import spock.util.concurrent.PollingConditions + +class CmHandleUpgradeSpec extends CpsIntegrationSpecBase { + + NetworkCmProxyInventoryFacade objectUnderTest + + static final CM_HANDLE_ID = 'ch-1' + static final CM_HANDLE_ID_WITH_EXISTING_MODULE_SET_TAG = 'ch-2' + + def setup() { + objectUnderTest = networkCmProxyInventoryFacade + } + + def 'Upgrade CM-handle with new moduleSetTag or no moduleSetTag.'() { + given: 'a CM-handle is created with expected initial modules: M1 and M2' + dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID] = ['M1', 'M2'] + registerCmHandle(DMI_URL, CM_HANDLE_ID, initialModuleSetTag) + assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() + + when: "the CM-handle is upgraded with given moduleSetTag '${updatedModuleSetTag}'" + def cmHandlesToUpgrade = new UpgradedCmHandles(cmHandles: [CM_HANDLE_ID], moduleSetTag: updatedModuleSetTag) + def dmiPluginRegistrationResponse = objectUnderTest.updateDmiRegistrationAndSyncModule( + new DmiPluginRegistration(dmiPlugin: DMI_URL, upgradedCmHandles: cmHandlesToUpgrade)) + + then: 'registration gives successful response' + assert dmiPluginRegistrationResponse.upgradedCmHandles == [CmHandleRegistrationResponse.createSuccessResponse(CM_HANDLE_ID)] + + and: 'CM-handle is in LOCKED state due to MODULE_UPGRADE' + def cmHandleCompositeState = objectUnderTest.getCmHandleCompositeState(CM_HANDLE_ID) + assert cmHandleCompositeState.cmHandleState == CmHandleState.LOCKED + assert cmHandleCompositeState.lockReason.lockReasonCategory == LockReasonCategory.MODULE_UPGRADE + assert cmHandleCompositeState.lockReason.details == "Upgrade to ModuleSetTag: ${updatedModuleSetTag}" + + when: 'DMI will return different modules for upgrade: M1 and M3' + dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID] = ['M1', 'M3'] + + then: 'CM-handle goes to READY state' + new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { + assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(CM_HANDLE_ID).cmHandleState + }) + + and: 'the CM-handle has expected moduleSetTag' + assert objectUnderTest.getNcmpServiceCmHandle(CM_HANDLE_ID).moduleSetTag == updatedModuleSetTag + + and: 'CM-handle has expected updated modules: M1 and M3' + assert ['M1', 'M3'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() + + cleanup: 'deregister CM-handle' + deregisterCmHandle(DMI_URL, CM_HANDLE_ID) + + where: + initialModuleSetTag | updatedModuleSetTag + NO_MODULE_SET_TAG | NO_MODULE_SET_TAG + NO_MODULE_SET_TAG | 'new' + 'initial' | NO_MODULE_SET_TAG + 'initial' | 'new' + } + + def 'Upgrade CM-handle with existing moduleSetTag.'() { + given: 'DMI will return modules for registration' + dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID] = ['M1', 'M2'] + dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID_WITH_EXISTING_MODULE_SET_TAG] = ['M1', 'M3'] + and: "an existing CM-handle handle with moduleSetTag '${updatedModuleSetTag}'" + registerCmHandle(DMI_URL, CM_HANDLE_ID_WITH_EXISTING_MODULE_SET_TAG, updatedModuleSetTag) + assert ['M1', 'M3'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID_WITH_EXISTING_MODULE_SET_TAG).moduleName.sort() + and: "a CM-handle with moduleSetTag '${initialModuleSetTag}' which will be upgraded" + registerCmHandle(DMI_URL, CM_HANDLE_ID, initialModuleSetTag) + assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() + + when: "CM-handle is upgraded to moduleSetTag '${updatedModuleSetTag}'" + def cmHandlesToUpgrade = new UpgradedCmHandles(cmHandles: [CM_HANDLE_ID], moduleSetTag: updatedModuleSetTag) + def dmiPluginRegistrationResponse = objectUnderTest.updateDmiRegistrationAndSyncModule( + new DmiPluginRegistration(dmiPlugin: DMI_URL, upgradedCmHandles: cmHandlesToUpgrade)) + + then: 'registration gives successful response' + assert dmiPluginRegistrationResponse.upgradedCmHandles == [CmHandleRegistrationResponse.createSuccessResponse(CM_HANDLE_ID)] + + and: 'CM-handle goes to READY state' + new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { + assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(CM_HANDLE_ID).cmHandleState + }) + + and: 'the CM-handle has expected moduleSetTag' + assert objectUnderTest.getNcmpServiceCmHandle(CM_HANDLE_ID).moduleSetTag == updatedModuleSetTag + + and: 'CM-handle has expected updated modules: M1 and M3' + assert ['M1', 'M3'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() + + cleanup: 'deregister CM-handle' + deregisterCmHandles(DMI_URL, [CM_HANDLE_ID, CM_HANDLE_ID_WITH_EXISTING_MODULE_SET_TAG]) + + where: + initialModuleSetTag | updatedModuleSetTag + NO_MODULE_SET_TAG | 'moduleSet2' + 'moduleSet1' | 'moduleSet2' + } + + def 'Skip upgrade of CM-handle with same moduleSetTag as before.'() { + given: 'an existing CM-handle with expected initial modules: M1 and M2' + dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID] = ['M1', 'M2'] + registerCmHandle(DMI_URL, CM_HANDLE_ID, 'same') + assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() + + when: 'CM-handle is upgraded with the same moduleSetTag' + def cmHandlesToUpgrade = new UpgradedCmHandles(cmHandles: [CM_HANDLE_ID], moduleSetTag: 'same') + objectUnderTest.updateDmiRegistrationAndSyncModule( + new DmiPluginRegistration(dmiPlugin: DMI_URL, upgradedCmHandles: cmHandlesToUpgrade)) + + then: 'CM-handle remains in READY state' + assert CmHandleState.READY == objectUnderTest.getCmHandleCompositeState(CM_HANDLE_ID).cmHandleState + + and: 'the CM-handle has same moduleSetTag as before' + assert objectUnderTest.getNcmpServiceCmHandle(CM_HANDLE_ID).moduleSetTag == 'same' + + then: 'CM-handle has same modules as before: M1 and M2' + assert ['M1', 'M2'] == objectUnderTest.getYangResourcesModuleReferences(CM_HANDLE_ID).moduleName.sort() + + cleanup: 'deregister CM-handle' + deregisterCmHandle(DMI_URL, CM_HANDLE_ID) + } + + def 'Upgrade of CM-handle fails due to DMI error.'() { + given: 'a CM-handle exists' + dmiDispatcher.moduleNamesPerCmHandleId[CM_HANDLE_ID] = ['M1', 'M2'] + registerCmHandle(DMI_URL, CM_HANDLE_ID, 'oldTag') + and: 'DMI is not available for upgrade' + dmiDispatcher.isAvailable = false + + when: 'the CM-handle is upgraded' + def cmHandlesToUpgrade = new UpgradedCmHandles(cmHandles: [CM_HANDLE_ID], moduleSetTag: 'newTag') + objectUnderTest.updateDmiRegistrationAndSyncModule( + new DmiPluginRegistration(dmiPlugin: DMI_URL, upgradedCmHandles: cmHandlesToUpgrade)) + + then: 'CM-handle goes to LOCKED state with reason MODULE_UPGRADE_FAILED' + new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { + def cmHandleCompositeState = objectUnderTest.getCmHandleCompositeState(CM_HANDLE_ID) + assert cmHandleCompositeState.cmHandleState == CmHandleState.LOCKED + assert cmHandleCompositeState.lockReason.lockReasonCategory == LockReasonCategory.MODULE_UPGRADE_FAILED + }) + + and: 'the CM-handle has same moduleSetTag as before' + assert objectUnderTest.getNcmpServiceCmHandle(CM_HANDLE_ID).moduleSetTag == 'oldTag' + + cleanup: 'deregister CM-handle' + deregisterCmHandle(DMI_URL, CM_HANDLE_ID) + } + +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmNotificationSubscriptionSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmNotificationSubscriptionSpec.groovy new file mode 100644 index 0000000000..a5f7d08c58 --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/CmNotificationSubscriptionSpec.groovy @@ -0,0 +1,112 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.ncmp + +import org.onap.cps.integration.base.CpsIntegrationSpecBase +import org.onap.cps.ncmp.impl.cmnotificationsubscription.utils.CmSubscriptionPersistenceService +import org.springframework.beans.factory.annotation.Autowired + +import static org.onap.cps.ncmp.api.data.models.DatastoreType.PASSTHROUGH_RUNNING + +class CmNotificationSubscriptionSpec extends CpsIntegrationSpecBase { + + @Autowired + CmSubscriptionPersistenceService cmSubscriptionPersistenceService + + def 'Adding a new cm notification subscription'() { + given: 'there is no ongoing cm subscription for the following' + def datastoreType = PASSTHROUGH_RUNNING + def cmHandleId = 'ch-1' + def xpath = '/x/y' + assert cmSubscriptionPersistenceService. + getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath).size() == 0 + when: 'we add a new cm notification subscription' + cmSubscriptionPersistenceService.addCmSubscription(datastoreType, cmHandleId, xpath, + 'subId-1') + then: 'there is an ongoing cm subscription for that CM handle and xpath' + assert cmSubscriptionPersistenceService.isOngoingCmSubscription(datastoreType, cmHandleId, xpath) + and: 'only one subscription id is related to now ongoing cm subscription' + assert cmSubscriptionPersistenceService.getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath).size() == 1 + } + + def 'Adding a cm notification subscription to the already existing cm handle but non existing xpath'() { + given: 'an ongoing cm subscription with the following details' + def datastoreType = PASSTHROUGH_RUNNING + def cmHandleId = 'ch-1' + def existingXpath = '/x/y' + assert cmSubscriptionPersistenceService.isOngoingCmSubscription(datastoreType, cmHandleId, existingXpath) + and: 'a non existing cm subscription with same datastore name and cm handle but different xpath' + def nonExistingXpath = '/x2/y2' + assert !cmSubscriptionPersistenceService.isOngoingCmSubscription(datastoreType, cmHandleId, nonExistingXpath) + when: 'a new cm notification subscription is made for the existing cm handle and non existing xpath' + cmSubscriptionPersistenceService.addCmSubscription(datastoreType, cmHandleId, nonExistingXpath, + 'subId-2') + then: 'there is an ongoing cm subscription for that CM handle and xpath' + assert cmSubscriptionPersistenceService.isOngoingCmSubscription(datastoreType, cmHandleId, nonExistingXpath) + and: 'only one subscription id is related to now ongoing cm subscription' + assert cmSubscriptionPersistenceService.getOngoingCmSubscriptionIds(datastoreType, cmHandleId, nonExistingXpath).size() == 1 + } + + def 'Adding a cm notification subscription to the already existing cm handle and xpath'() { + given: 'an ongoing cm subscription with the following details' + def datastoreType = PASSTHROUGH_RUNNING + def cmHandleId = 'ch-1' + def xpath = '/x/y' + when: 'a new cm notification subscription is made for the SAME CM handle and xpath' + cmSubscriptionPersistenceService.addCmSubscription(datastoreType, cmHandleId, xpath, + 'subId-3') + then: 'it is added to the ongoing list of subscription ids' + def subscriptionIds = cmSubscriptionPersistenceService.getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath) + assert subscriptionIds.size() == 2 + and: 'both subscription ids exists for the CM handle and xpath' + assert subscriptionIds.contains("subId-1") && subscriptionIds.contains("subId-3") + } + + def 'Removing cm notification subscriber among other subscribers'() { + given: 'an ongoing cm subscription with the following details' + def datastoreType = PASSTHROUGH_RUNNING + def cmHandleId = 'ch-1' + def xpath = '/x/y' + and: 'the number of subscribers is as follows' + def originalNumberOfSubscribers = + cmSubscriptionPersistenceService.getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath).size() + when: 'a subscriber is removed' + cmSubscriptionPersistenceService.removeCmSubscription(datastoreType, cmHandleId, xpath, 'subId-3') + then: 'the number of subscribers is reduced by 1' + def updatedNumberOfSubscribers = cmSubscriptionPersistenceService.getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath).size() + assert updatedNumberOfSubscribers == originalNumberOfSubscribers - 1 + } + + def 'Removing the LAST cm notification subscriber for a given cm handle, datastore and xpath'() { + given: 'an ongoing cm subscription with the following details' + def datastoreType = PASSTHROUGH_RUNNING + def cmHandleId = 'ch-1' + def xpath = '/x/y' + and: 'there is only one subscriber' + assert cmSubscriptionPersistenceService + .getOngoingCmSubscriptionIds(datastoreType, cmHandleId, xpath).size() == 1 + when: 'only subscriber is removed' + cmSubscriptionPersistenceService.removeCmSubscription(datastoreType, cmHandleId, xpath, 'subId-1') + then: 'there are no longer any subscriptions for the cm handle, datastore and xpath' + assert !cmSubscriptionPersistenceService.isOngoingCmSubscription(datastoreType, cmHandleId, xpath) + } + +} diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/RestApiSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/RestApiSpec.groovy new file mode 100644 index 0000000000..ab189c2917 --- /dev/null +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/ncmp/RestApiSpec.groovy @@ -0,0 +1,89 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.integration.functional.ncmp + +import static org.hamcrest.Matchers.containsInAnyOrder +import static org.hamcrest.Matchers.hasSize +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status + +import org.onap.cps.integration.base.CpsIntegrationSpecBase +import org.springframework.http.MediaType +import spock.util.concurrent.PollingConditions + +class RestApiSpec extends CpsIntegrationSpecBase { + + def 'Register CM Handles using REST API.'() { + given: 'DMI will return modules' + dmiDispatcher.moduleNamesPerCmHandleId = [ + 'ch-1': ['M1', 'M2'], + 'ch-2': ['M1', 'M2'], + 'ch-3': ['M1', 'M3'] + ] + when: 'a POST request is made to register the CM Handles' + def requestBody = '{"dmiPlugin":"'+DMI_URL+'","createdCmHandles":[{"cmHandle":"ch-1"},{"cmHandle":"ch-2"},{"cmHandle":"ch-3"}]}' + mvc.perform(post('/ncmpInventory/v1/ch').contentType(MediaType.APPLICATION_JSON).content(requestBody)) + .andExpect(status().is2xxSuccessful()) + then: 'CM-handles go to READY state' + new PollingConditions().within(MODULE_SYNC_WAIT_TIME_IN_SECONDS, () -> { + (1..3).each { + mvc.perform(get('/ncmp/v1/ch/ch-'+it)) + .andExpect(status().isOk()) + .andExpect(jsonPath('$.state.cmHandleState').value('READY')) + } + }) + } + + def 'Search for CM Handles by module using REST API.'() { + given: 'a JSON request body containing search parameter' + def requestBodyWithModuleCondition = """{ + "cmHandleQueryParameters": [ + { + "conditionName": "hasAllModules", + "conditionParameters": [ {"moduleName": "%s"} ] + } + ] + }""".formatted(moduleName) + expect: "a search for module ${moduleName} returns expected CM handles" + mvc.perform(post('/ncmp/v1/ch/id-searches').contentType(MediaType.APPLICATION_JSON).content(requestBodyWithModuleCondition)) + .andExpect(status().is2xxSuccessful()) + .andExpect(jsonPath('$[*]', containsInAnyOrder(expectedCmHandles.toArray()))) + .andExpect(jsonPath('$', hasSize(expectedCmHandles.size()))); + where: + moduleName || expectedCmHandles + 'M1' || ['ch-1', 'ch-2', 'ch-3'] + 'M2' || ['ch-1', 'ch-2'] + 'M3' || ['ch-3'] + } + + def 'De-register CM handles using REST API.'() { + when: 'a POST request is made to deregister the CM Handle' + def requestBody = '{"dmiPlugin":"'+DMI_URL+'", "removedCmHandles": ["ch-1", "ch-2", "ch-3"]}' + mvc.perform(post('/ncmpInventory/v1/ch').contentType(MediaType.APPLICATION_JSON).content(requestBody)) + .andExpect(status().is2xxSuccessful()) + then: 'the CM handles are not found using GET' + (1..3).each { + mvc.perform(get('/ncmp/v1/ch/ch-'+it)).andExpect(status().is4xxClientError()) + } + } +} -- cgit 1.2.3-korg