summaryrefslogtreecommitdiffstats
path: root/cps-ri/src/test
diff options
context:
space:
mode:
Diffstat (limited to 'cps-ri/src/test')
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsAdminPersistenceServiceSpec.groovy31
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy42
-rwxr-xr-xcps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy61
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy116
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy69
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy99
-rw-r--r--cps-ri/src/test/resources/data/cps-path-query.sql38
-rwxr-xr-xcps-ri/src/test/resources/data/fragment.sql43
-rw-r--r--cps-ri/src/test/resources/hibernate.cfg.xml16
9 files changed, 385 insertions, 130 deletions
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsAdminPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsAdminPersistenceServiceSpec.groovy
index 063bd5b5ae..2de087fc28 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsAdminPersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsAdminPersistenceServiceSpec.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2021 Nordix Foundation
+ * Copyright (C) 2021-2022 Nordix Foundation
* Modifications Copyright (C) 2021 Pantheon.tech
* Modifications Copyright (C) 2022 Bell Canada
* ================================================================================
@@ -22,6 +22,7 @@
package org.onap.cps.spi.impl
+import org.mockito.Mock
import org.onap.cps.spi.CpsAdminPersistenceService
import org.onap.cps.spi.exceptions.AlreadyDefinedException
import org.onap.cps.spi.exceptions.AnchorNotFoundException
@@ -30,15 +31,21 @@ import org.onap.cps.spi.exceptions.DataspaceNotFoundException
import org.onap.cps.spi.exceptions.SchemaSetNotFoundException
import org.onap.cps.spi.exceptions.ModuleNamesNotFoundException
import org.onap.cps.spi.model.Anchor
+import org.onap.cps.spi.model.CmHandleQueryParameters
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.test.context.jdbc.Sql
+import org.testcontainers.shaded.com.fasterxml.jackson.databind.ObjectMapper
class CpsAdminPersistenceServiceSpec extends CpsPersistenceSpecBase {
@Autowired
CpsAdminPersistenceService objectUnderTest
+ @Mock
+ ObjectMapper objectMapper
+
static final String SET_DATA = '/data/anchor.sql'
+ static final String SET_FRAGMENT_DATA = '/data/fragment.sql'
static final String SAMPLE_DATA_FOR_ANCHORS_WITH_MODULES = '/data/anchors-schemaset-modules.sql'
static final String DATASPACE_WITH_NO_DATA = 'DATASPACE-002-NO-DATA'
static final Integer DELETED_ANCHOR_ID = 3002
@@ -46,7 +53,7 @@ class CpsAdminPersistenceServiceSpec extends CpsPersistenceSpecBase {
@Sql(CLEAR_DATA)
def 'Create and retrieve a new dataspace.'() {
when: 'a new dataspace is created'
- def dataspaceName = 'some new dataspace'
+ def dataspaceName = 'some-new-dataspace'
objectUnderTest.createDataspace(dataspaceName)
then: 'that dataspace can be retrieved from the dataspace repository'
def dataspaceEntity = dataspaceRepository.findByName(dataspaceName).orElseThrow()
@@ -66,7 +73,7 @@ class CpsAdminPersistenceServiceSpec extends CpsPersistenceSpecBase {
@Sql([CLEAR_DATA, SET_DATA])
def 'Create and retrieve a new anchor.'() {
when: 'a new anchor is created'
- def newAnchorName = 'my new anchor'
+ def newAnchorName = 'my-new-anchor'
objectUnderTest.createAnchor(DATASPACE_NAME, SCHEMA_SET_NAME1, newAnchorName)
then: 'that anchor can be retrieved'
def anchor = objectUnderTest.getAnchor(DATASPACE_NAME, newAnchorName)
@@ -141,7 +148,7 @@ class CpsAdminPersistenceServiceSpec extends CpsPersistenceSpecBase {
@Sql(CLEAR_DATA)
def 'Get all anchors in unknown dataspace.'() {
when: 'attempt to get all anchors in an unknown dataspace'
- objectUnderTest.getAnchors('unknown dataspace')
+ objectUnderTest.getAnchors('unknown-dataspace')
then: 'an DataspaceNotFoundException is thrown'
thrown(DataspaceNotFoundException)
}
@@ -219,4 +226,20 @@ class CpsAdminPersistenceServiceSpec extends CpsPersistenceSpecBase {
'dataspace contains schemasets' | 'DATASPACE-003' || DataspaceInUseException | 'contains 1 schemaset(s)'
}
+ @Sql([CLEAR_DATA, SET_FRAGMENT_DATA])
+ def 'Retrieve cm handle ids when #scenario.'() {
+ when: 'the service is invoked'
+ def cmHandleQueryParameters = new CmHandleQueryParameters()
+ cmHandleQueryParameters.setPublicProperties(publicProperties)
+ def returnedCmHandles = objectUnderTest.queryCmHandles(cmHandleQueryParameters)
+ then: 'the correct expected cm handles are returned'
+ returnedCmHandles == expectedCmHandleIds
+ where: 'the following data is used'
+ scenario | publicProperties || expectedCmHandleIds
+ 'single matching property' | ['Contact' : 'newemailforstore@bookstore.com'] || ['PNFDemo2', 'PNFDemo', 'PNFDemo4'] as Set
+ 'public property dont match' | ['wont_match' : 'wont_match'] || [] as Set
+ '2 properties, only one match (and)' | ['Contact' : 'newemailforstore@bookstore.com', 'Contact2': 'newemailforstore2@bookstore.com'] || ['PNFDemo4'] as Set
+ '2 properties, no match (and)' | ['Contact' : 'newemailforstore@bookstore.com', 'Contact2': ''] || [] as Set
+ 'No public properties - return all cm handles' | [ : ] || ['PNFDemo3', 'PNFDemo', 'PNFDemo2', 'PNFDemo4'] as Set
+ }
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy
index ae88d302bb..36b378a775 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2021 Nordix Foundation
+ * Copyright (C) 2021-2022 Nordix Foundation
* Modifications Copyright (C) 2021 Pantheon.tech
* Modifications Copyright (C) 2021 Bell Canada.
* ================================================================================
@@ -92,15 +92,15 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
}
where: 'the following data is used'
scenario | cpsPath || expectedXPaths
- 'fully unique descendant name' | '//categories[@code=2]' || ['/shops/shop[@id=1]/categories[@code=2]', '/shops/shop[@id=2]/categories[@code=1]', '/shops/shop[@id=2]/categories[@code=2]']
- 'descendant name match end of other node' | '//book' || ['/shops/shop[@id=1]/categories[@code=1]/book', '/shops/shop[@id=1]/categories[@code=2]/book']
- 'descendant with text condition on leaf' | '//book/title[text()="Chapters"]' || ['/shops/shop[@id=1]/categories[@code=2]/book']
+ 'fully unique descendant name' | '//categories[@code=2]' || ["/shops/shop[@id='1']/categories[@code='2']", "/shops/shop[@id='2']/categories[@code='1']", "/shops/shop[@id='2']/categories[@code='2']"]
+ 'descendant name match end of other node' | '//book' || ["/shops/shop[@id='1']/categories[@code='1']/book", "/shops/shop[@id='1']/categories[@code='2']/book"]
+ 'descendant with text condition on leaf' | '//book/title[text()="Chapters"]' || ["/shops/shop[@id='1']/categories[@code='2']/book"]
'descendant with text condition case mismatch' | '//book/title[text()="chapters"]' || []
- 'descendant with text condition on int leaf' | '//book/price[text()="5"]' || ['/shops/shop[@id=1]/categories[@code=1]/book']
- 'descendant with text condition on leaf-list' | '//book/labels[text()="special offer"]' || ['/shops/shop[@id=1]/categories[@code=1]/book']
+ 'descendant with text condition on int leaf' | '//book/price[text()="5"]' || ["/shops/shop[@id='1']/categories[@code='1']/book"]
+ 'descendant with text condition on leaf-list' | '//book/labels[text()="special offer"]' || ["/shops/shop[@id='1']/categories[@code='1']/book"]
'descendant with text condition partial match' | '//book/labels[text()="special"]' || []
- 'descendant with text condition (existing) empty string' | '//book/labels[text()=""]' || ['/shops/shop[@id=1]/categories[@code=1]/book']
- 'descendant with text condition on int leaf-list' | '//book/editions[text()="2000"]' || ['/shops/shop[@id=1]/categories[@code=2]/book']
+ 'descendant with text condition (existing) empty string' | '//book/labels[text()=""]' || ["/shops/shop[@id='1']/categories[@code='1']/book"]
+ 'descendant with text condition on int leaf-list' | '//book/editions[text()="2000"]' || ["/shops/shop[@id='1']/categories[@code='2']/book"]
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -115,10 +115,10 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
}
where: 'the following data is used'
scenario | cpsPath || expectedXPaths
- 'one leaf' | '//author[@FirstName="Joe"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]', '/shops/shop[@id=1]/categories[@code=2]/book/author[@FirstName="Joe" and @Surname="Smith"]']
- 'more than one leaf' | '//author[@FirstName="Joe" and @Surname="Bloggs"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
- 'leaves reversed in order' | '//author[@Surname="Bloggs" and @FirstName="Joe"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
- 'leaf and text condition' | '//author[@FirstName="Joe"]/Surname[text()="Bloggs"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
+ 'one leaf' | '//author[@FirstName="Joe"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']", "/shops/shop[@id='1']/categories[@code='2']/book/author[@FirstName='Joe' and @Surname='Smith']"]
+ 'more than one leaf' | '//author[@FirstName="Joe" and @Surname="Bloggs"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']"]
+ 'leaves reversed in order' | '//author[@Surname="Bloggs" and @FirstName="Joe"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']"]
+ 'leaf and text condition' | '//author[@FirstName="Joe"]/Surname[text()="Bloggs"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']"]
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -133,9 +133,9 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
}
where: 'the following data is used'
scenario | cpsPath || expectedXPaths
- 'one partial key leaf' | '//author[@FirstName="Joe"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]', '/shops/shop[@id=1]/categories[@code=2]/book/author[@FirstName="Joe" and @Surname="Smith"]']
- 'one non key leaf' | '//author[@title="Dune"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
- 'mix of partial key and non key leaf' | '//author[@FirstName="Joe" and @title="Dune"]' || ['/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]']
+ 'one partial key leaf' | '//author[@FirstName="Joe"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']", "/shops/shop[@id='1']/categories[@code='2']/book/author[@FirstName='Joe' and @Surname='Smith']"]
+ 'one non key leaf' | '//author[@title="Dune"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']"]
+ 'mix of partial key and non key leaf' | '//author[@FirstName="Joe" and @title="Dune"]' || ["/shops/shop[@id='1']/categories[@code='1']/book/author[@FirstName='Joe' and @Surname='Bloggs']"]
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -149,13 +149,13 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
}
where: 'the following data is used'
scenario | cpsPath || expectedXPaths
- 'multiple list-ancestors' | '//book/ancestor::categories' || ['/shops/shop[@id=1]/categories[@code=1]', '/shops/shop[@id=1]/categories[@code=2]']
- 'one ancestor with list value' | '//book/ancestor::categories[@code=1]' || ['/shops/shop[@id=1]/categories[@code=1]']
+ 'multiple list-ancestors' | '//book/ancestor::categories' || ["/shops/shop[@id='1']/categories[@code='1']", "/shops/shop[@id='1']/categories[@code='2']"]
+ 'one ancestor with list value' | '//book/ancestor::categories[@code=1]' || ["/shops/shop[@id='1']/categories[@code='1']"]
'top ancestor' | '//shop[@id=1]/ancestor::shops' || ['/shops']
- 'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ['/shops/shop[@id=1]']
- 'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ['/shops/shop[@id=1]/categories[@code=2]']
- 'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ['/shops/shop[@id=3]/info/contact']
- 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ['/shops/shop[@id=1]']
+ 'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ["/shops/shop[@id='1']"]
+ 'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ["/shops/shop[@id='1']/categories[@code='2']"]
+ 'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ["/shops/shop[@id='3']/info/contact"]
+ 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ["/shops/shop[@id='1']"]
'ancestor with parent that does not exist' | '//book/ancestor::parentDoesNoExist/categories' || []
'ancestor does not exist' | '//book/ancestor::ancestorDoesNotExist' || []
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
index ab290051a2..6f780fc508 100755
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
@@ -23,11 +23,13 @@ package org.onap.cps.spi.impl
import com.fasterxml.jackson.databind.ObjectMapper
import com.google.common.collect.ImmutableSet
+import org.onap.cps.cpspath.parser.PathParsingException
import org.onap.cps.spi.CpsDataPersistenceService
import org.onap.cps.spi.entities.FragmentEntity
import org.onap.cps.spi.exceptions.AlreadyDefinedException
import org.onap.cps.spi.exceptions.AnchorNotFoundException
import org.onap.cps.spi.exceptions.CpsAdminException
+import org.onap.cps.spi.exceptions.CpsPathException
import org.onap.cps.spi.exceptions.DataNodeNotFoundException
import org.onap.cps.spi.exceptions.DataspaceNotFoundException
import org.onap.cps.spi.model.DataNode
@@ -150,7 +152,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
thrown(expectedException)
where: 'the following data is used'
scenario | parentXpath | dataNode || expectedException
- 'parent does not exist' | 'unknown' | newDataNode || DataNodeNotFoundException
+ 'parent does not exist' | '/unknown' | newDataNode || DataNodeNotFoundException
'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNode || AlreadyDefinedException
}
@@ -185,9 +187,9 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'following parameters were used'
- scenario | parentNodeXpath | listElementXpaths || expectedException
- 'parent node does not exist' | '/unknown' | ['irrelevant'] || DataNodeNotFoundException
- 'already existing fragment' | '/parent-201' | ['/parent-201/child-204[@key="A"]'] || AlreadyDefinedException
+ scenario | parentNodeXpath | listElementXpaths || expectedException
+ 'parent node does not exist' | '/unknown' | ['irrelevant'] || DataNodeNotFoundException
+ 'data fragment already exists' | '/parent-201' | ["/parent-201/child-204[@key='A']"] || AlreadyDefinedException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -208,6 +210,15 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
+ def 'Cps Path query with syntax error throws a CPS Path Exception.'() {
+ when: 'trying to execute a query with a syntax (parsing) error'
+ objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, 'invalid-cps-path/child' , OMIT_DESCENDANTS)
+ then: 'exception is thrown'
+ def exceptionThrown = thrown(CpsPathException)
+ assert exceptionThrown.getDetails().contains('failed to parse at line 1 due to extraneous input \'invalid-cps-path\' expecting \'/\'')
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
def 'Get data node by xpath with all descendants.'() {
when: 'data node is requested with all descendants'
def result = objectUnderTest.getDataNode(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES,
@@ -235,10 +246,10 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'the following data is used'
- scenario | dataspaceName | anchorName | xpath || expectedException
- 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
- 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NO XPATH' || DataNodeNotFoundException
+ scenario | dataspaceName | anchorName | xpath || expectedException
+ 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
+ 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NO XPATH' || DataNodeNotFoundException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -265,10 +276,10 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'the following data is used'
- scenario | dataspaceName | anchorName | xpath || expectedException
- 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
- 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NON-EXISTING XPATH' || DataNodeNotFoundException
+ scenario | dataspaceName | anchorName | xpath || expectedException
+ 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
+ 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING XPATH' || DataNodeNotFoundException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -359,10 +370,10 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
then: 'a #expectedException is thrown'
thrown(expectedException)
where: 'the following data is used'
- scenario | dataspaceName | anchorName | xpath || expectedException
- 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | 'not relevant' || DataspaceNotFoundException
- 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | 'not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'NON-EXISTING XPATH' || DataNodeNotFoundException
+ scenario | dataspaceName | anchorName | xpath || expectedException
+ 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
+ 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
+ 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING XPATH' || DataNodeNotFoundException
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -468,10 +479,10 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
assert remainingChildXpaths.containsAll(expectedRemainingChildXpaths)
where: 'following parameters were used'
scenario | targetXpaths | parentFragmentId || expectedRemainingChildXpaths
- 'list element with key' | '/parent-203/child-204[@key="A"]' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ['/parent-203/child-203', '/parent-203/child-204[@key="B"]']
- 'list element with combined keys' | '/parent-202/child-205[@key="A" and @key2="B"]' | LIST_DATA_NODE_PARENT202_FRAGMENT_ID || ['/parent-202/child-206[@key="A"]']
+ 'list element with key' | '/parent-203/child-204[@key="A"]' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ["/parent-203/child-203", "/parent-203/child-204[@key='B']"]
+ 'list element with combined keys' | '/parent-202/child-205[@key="A" and @key2="B"]' | LIST_DATA_NODE_PARENT202_FRAGMENT_ID || ["/parent-202/child-206[@key='A']"]
'whole list' | '/parent-203/child-204' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ['/parent-203/child-203']
- 'list element under list element' | '/parent-203/child-204[@key="B"]/grand-child-204[@key2="Y"]' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ['/parent-203/child-203', '/parent-203/child-204[@key="A"]', '/parent-203/child-204[@key="B"]']
+ 'list element under list element' | '/parent-203/child-204[@key="B"]/grand-child-204[@key2="Y"]' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ["/parent-203/child-203", "/parent-203/child-204[@key='A']", "/parent-203/child-204[@key='B']"]
}
@Sql([CLEAR_DATA, SET_DATA])
@@ -510,9 +521,9 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
'child of target' | '/parent-206/child-206' | '/parent-206/child-206' || null
'child data node, parent still exists' | '/parent-206/child-206' | '/parent-206' || '/parent-206'
'list element' | '/parent-206/child-206/grand-child-206[@key="A"]' | '/parent-206/child-206/grand-child-206[@key="A"]' || null
- 'list element, sibling still exists' | '/parent-206/child-206/grand-child-206[@key="A"]' | '/parent-206/child-206/grand-child-206[@key="X"]' || '/parent-206/child-206/grand-child-206[@key="X"]'
+ 'list element, sibling still exists' | '/parent-206/child-206/grand-child-206[@key="A"]' | '/parent-206/child-206/grand-child-206[@key="X"]' || "/parent-206/child-206/grand-child-206[@key='X']"
'container node' | '/parent-206' | '/parent-206' || null
- 'container list node' | '/parent-206[@key="A"]' | '/parent-206[@key="B"]' || '/parent-206[@key="B"]'
+ 'container list node' | '/parent-206[@key="A"]' | '/parent-206[@key="B"]' || "/parent-206[@key='B']"
'root node with xpath /' | '/' | '/' || null
'root node with xpath passed as blank' | '' | '' || null
@@ -523,11 +534,11 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
when: 'data node is deleted'
objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, datanodeXpath)
then: 'a #expectedException is thrown'
- thrown(DataNodeNotFoundException)
+ thrown(expectedException)
where: 'the following parameters were used'
- scenario | datanodeXpath
- 'valid data node, non existent child node' | '/parent-203/child-non-existent'
- 'invalid list element' | '/parent-206/child-206/grand-child-206@key="A"]'
+ scenario | datanodeXpath | expectedException
+ 'valid data node, non existent child node' | '/parent-203/child-non-existent' | DataNodeNotFoundException
+ 'invalid list element' | '/parent-206/child-206/grand-child-206@key="A"]' | PathParsingException
}
@Sql([CLEAR_DATA, SET_DATA])
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
index 7166008ad3..b37f471e76 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
@@ -1,6 +1,7 @@
/*
* ============LICENSE_START=======================================================
* Copyright (c) 2021 Bell Canada.
+ * Modifications Copyright (C) 2021-2022 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -28,88 +29,111 @@ import org.onap.cps.spi.model.DataNodeBuilder
import org.onap.cps.spi.repository.AnchorRepository
import org.onap.cps.spi.repository.DataspaceRepository
import org.onap.cps.spi.repository.FragmentRepository
+import org.onap.cps.spi.utils.SessionManager
import org.onap.cps.utils.JsonObjectMapper
import spock.lang.Specification
-
class CpsDataPersistenceServiceSpec extends Specification {
def mockDataspaceRepository = Mock(DataspaceRepository)
def mockAnchorRepository = Mock(AnchorRepository)
def mockFragmentRepository = Mock(FragmentRepository)
def jsonObjectMapper = new JsonObjectMapper(new ObjectMapper())
+ def mockSessionManager = Mock(SessionManager)
def objectUnderTest = new CpsDataPersistenceServiceImpl(
- mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository, jsonObjectMapper)
+ mockDataspaceRepository, mockAnchorRepository, mockFragmentRepository, jsonObjectMapper,mockSessionManager)
def 'Handling of StaleStateException (caused by concurrent updates) during data node tree update.'() {
- def parentXpath = 'parent-01'
+ def parentXpath = '/parent-01'
def myDataspaceName = 'my-dataspace'
def myAnchorName = 'my-anchor'
given: 'data node object'
- def submittedDataNode = new DataNodeBuilder()
- .withXpath(parentXpath)
- .withLeaves(['leaf-name': 'leaf-value'])
- .build()
+ def submittedDataNode = new DataNodeBuilder()
+ .withXpath(parentXpath)
+ .withLeaves(['leaf-name': 'leaf-value'])
+ .build()
and: 'fragment to be updated'
- mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
- def fragmentEntity = new FragmentEntity()
- fragmentEntity.setXpath(parentXpath)
- fragmentEntity.setChildFragments(Collections.emptySet())
- return fragmentEntity
- }
+ mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
+ def fragmentEntity = new FragmentEntity()
+ fragmentEntity.setXpath(parentXpath)
+ fragmentEntity.setChildFragments(Collections.emptySet())
+ return fragmentEntity
+ }
and: 'data node is concurrently updated by another transaction'
- mockFragmentRepository.save(_) >> { throw new StaleStateException("concurrent updates") }
+ mockFragmentRepository.save(_) >> { throw new StaleStateException("concurrent updates") }
when: 'attempt to update data node'
- objectUnderTest.replaceDataNodeTree(myDataspaceName, myAnchorName, submittedDataNode)
+ objectUnderTest.replaceDataNodeTree(myDataspaceName, myAnchorName, submittedDataNode)
then: 'concurrency exception is thrown'
- def concurrencyException = thrown(ConcurrencyException)
- assert concurrencyException.getDetails().contains(myDataspaceName)
- assert concurrencyException.getDetails().contains(myAnchorName)
- assert concurrencyException.getDetails().contains(parentXpath)
+ def concurrencyException = thrown(ConcurrencyException)
+ assert concurrencyException.getDetails().contains(myDataspaceName)
+ assert concurrencyException.getDetails().contains(myAnchorName)
+ assert concurrencyException.getDetails().contains(parentXpath)
}
def 'Retrieving a data node with a property JSON value of #scenario'() {
given: 'a fragment with a property JSON value of #scenario'
- mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
- new FragmentEntity(childFragments: Collections.emptySet(),
- attributes: "{\"some attribute\": ${dataString}}")
- }
+ mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
+ new FragmentEntity(childFragments: Collections.emptySet(),
+ attributes: "{\"some attribute\": ${dataString}}")
+ }
when: 'getting the data node represented by this fragment'
- def dataNode = objectUnderTest.getDataNode('my-dataspace', 'my-anchor',
- 'parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
+ def dataNode = objectUnderTest.getDataNode('my-dataspace', 'my-anchor',
+ '/parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
then: 'the leaf is of the correct value and data type'
- def attributeValue = dataNode.leaves.get('some attribute')
- assert attributeValue == expectedValue
- assert attributeValue.class == expectedDataClass
+ def attributeValue = dataNode.leaves.get('some attribute')
+ assert attributeValue == expectedValue
+ assert attributeValue.class == expectedDataClass
where: 'the following Data Type is passed'
- scenario | dataString || expectedValue | expectedDataClass
- 'just numbers' | '15174' || 15174 | Integer
- 'number with dot' | '15174.32' || 15174.32 | Double
- 'number with 0 value after dot' | '15174.0' || 15174.0 | Double
- 'number with 0 value before dot' | '0.32' || 0.32 | Double
- 'number higher than max int' | '2147483648' || 2147483648 | Long
- 'just text' | '"Test"' || 'Test' | String
- 'number with exponent' | '1.2345e5' || 1.2345e5 | Double
- 'number higher than max int with dot' | '123456789101112.0' || 123456789101112.0 | Double
- 'text and numbers' | '"String = \'1234\'"' || "String = '1234'" | String
- 'number as String' | '"12345"' || '12345' | String
+ scenario | dataString || expectedValue | expectedDataClass
+ 'just numbers' | '15174' || 15174 | Integer
+ 'number with dot' | '15174.32' || 15174.32 | Double
+ 'number with 0 value after dot' | '15174.0' || 15174.0 | Double
+ 'number with 0 value before dot' | '0.32' || 0.32 | Double
+ 'number higher than max int' | '2147483648' || 2147483648 | Long
+ 'just text' | '"Test"' || 'Test' | String
+ 'number with exponent' | '1.2345e5' || 1.2345e5 | Double
+ 'number higher than max int with dot' | '123456789101112.0' || 123456789101112.0 | Double
+ 'text and numbers' | '"String = \'1234\'"' || "String = '1234'" | String
+ 'number as String' | '"12345"' || '12345' | String
}
def 'Retrieving a data node with invalid JSON'() {
given: 'a fragment with invalid JSON'
- mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
- new FragmentEntity(childFragments: Collections.emptySet(), attributes: '{invalid json')
- }
+ mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, _) >> {
+ new FragmentEntity(childFragments: Collections.emptySet(), attributes: '{invalid json')
+ }
when: 'getting the data node represented by this fragment'
- def dataNode = objectUnderTest.getDataNode('my-dataspace', 'my-anchor',
- 'parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
+ def dataNode = objectUnderTest.getDataNode('my-dataspace', 'my-anchor',
+ '/parent-01', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
then: 'a data validation exception is thrown'
- thrown(DataValidationException)
+ thrown(DataValidationException)
+ }
+
+ def 'start session'() {
+ when: 'start session'
+ objectUnderTest.startSession()
+ then: 'the session manager method to start session is invoked'
+ 1 * mockSessionManager.startSession()
}
-}
+ def 'close session'() {
+ given: 'session ID'
+ def someSessionId = 'someSessionId'
+ when: 'close session method is called with session ID as parameter'
+ objectUnderTest.closeSession(someSessionId)
+ then: 'the session manager method to close session is invoked with parameter'
+ 1 * mockSessionManager.closeSession(someSessionId)
+ }
+
+ def 'Lock anchor.'(){
+ when: 'lock anchor method is called with anchor entity details'
+ objectUnderTest.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L)
+ then: 'the session manager method to lock anchor is invoked with same parameters'
+ 1 * mockSessionManager.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L)
+ }
+} \ No newline at end of file
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy
new file mode 100644
index 0000000000..9b58c8bc32
--- /dev/null
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerIntegrationSpec.groovy
@@ -0,0 +1,69 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2022 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.spi.utils
+
+import org.onap.cps.spi.exceptions.SessionManagerException
+import org.onap.cps.spi.impl.CpsPersistenceSpecBase
+import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.test.context.jdbc.Sql
+
+class SessionManagerIntegrationSpec extends CpsPersistenceSpecBase{
+
+ final static String SET_DATA = '/data/anchor.sql'
+
+ @Autowired
+ SessionManager objectUnderTest
+
+ def sessionId
+ def shortTimeoutForTesting = 200L
+
+ def setup(){
+ sessionId = objectUnderTest.startSession()
+ }
+
+ def cleanup(){
+ objectUnderTest.closeSession(sessionId)
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Lock anchor.'(){
+ when: 'session tries to acquire anchor lock by passing anchor entity details'
+ objectUnderTest.lockAnchor(sessionId, DATASPACE_NAME, ANCHOR_NAME1, shortTimeoutForTesting)
+ then: 'no exception is thrown'
+ noExceptionThrown()
+ }
+
+ @Sql([CLEAR_DATA, SET_DATA])
+ def 'Attempt to lock anchor when another session is holding the lock.'(){
+ given: 'another session that holds an anchor lock'
+ def otherSessionId = objectUnderTest.startSession()
+ objectUnderTest.lockAnchor(otherSessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting)
+ when: 'a session tries to acquire the same anchor lock'
+ objectUnderTest.lockAnchor(sessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting)
+ then: 'a session manager exception is thrown specifying operation reached timeout'
+ def thrown = thrown(SessionManagerException)
+ thrown.message.contains('Timeout')
+ then: 'when the other session holding the lock is closed, lock can finally be acquired'
+ objectUnderTest.closeSession(otherSessionId)
+ objectUnderTest.lockAnchor(sessionId,DATASPACE_NAME,ANCHOR_NAME1,shortTimeoutForTesting)
+ }
+
+}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy
new file mode 100644
index 0000000000..a2df06ef0e
--- /dev/null
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/utils/SessionManagerSpec.groovy
@@ -0,0 +1,99 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2022 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.spi.utils
+
+import com.google.common.util.concurrent.TimeLimiter
+import org.hibernate.HibernateException
+import org.hibernate.Transaction
+import org.onap.cps.spi.entities.AnchorEntity
+import org.onap.cps.spi.exceptions.SessionManagerException
+import org.onap.cps.spi.repository.AnchorRepository
+import org.onap.cps.spi.repository.DataspaceRepository
+import org.testcontainers.shaded.com.google.common.util.concurrent.UncheckedExecutionException
+import spock.lang.Specification
+import org.hibernate.Session
+
+import java.util.concurrent.ExecutionException
+
+class SessionManagerSpec extends Specification {
+
+ def spiedTimeLimiterProvider = Spy(TimeLimiterProvider)
+ def mockDataspaceRepository = Mock(DataspaceRepository)
+ def mockAnchorRepository = Mock(AnchorRepository)
+ def mockSession = Mock(Session)
+
+ def objectUnderTest = new SessionManager(spiedTimeLimiterProvider, mockDataspaceRepository, mockAnchorRepository)
+
+ def 'Lock anchor entity with #exceptionDuringTest exception.'(){
+ given: 'a dummy session'
+ objectUnderTest.sessionMap.put('dummySession', mockSession)
+ and: 'the anchor name can be resolved'
+ def mockAnchorEntity = Mock(AnchorEntity)
+ mockAnchorEntity.getId() > 456
+ mockAnchorRepository.getByDataspaceAndName(_, _) >> mockAnchorEntity
+ and: 'timeLimiter throws an #exceptionDuringTest exception'
+ def mockTimeLimiter = Mock(TimeLimiter)
+ spiedTimeLimiterProvider.getTimeLimiter(_) >> mockTimeLimiter
+ mockTimeLimiter.callWithTimeout(*_) >> { throw exceptionDuringTest }
+ when: 'session tries to acquire anchor lock'
+ objectUnderTest.lockAnchor('dummySession', 'some-dataspace','some-anchor', 123L)
+ then: 'a session manager exception is thrown with the expected detail'
+ def thrown = thrown(SessionManagerException)
+ thrown.details.contains(expectedExceptionDetail)
+ where:
+ exceptionDuringTest || expectedExceptionDetail
+ new InterruptedException() || 'interrupted'
+ new ExecutionException() || 'aborted'
+ }
+
+ def 'Close session that does not exist.'() {
+ when: 'attempt to close session that does not exist'
+ objectUnderTest.closeSession('unknown session id')
+ then: 'a session manager exception is thrown with the unknown id in the details'
+ def thrown = thrown(SessionManagerException)
+ assert thrown.details.contains('unknown session id')
+ }
+
+ def 'Hibernate exception while closing session.'() {
+ given: 'a test session with a transaction'
+ objectUnderTest.sessionMap.put('testSessionId', mockSession)
+ mockSession.getTransaction() >> Mock(Transaction)
+ and: 'an hibernate exception when closing that session'
+ def hibernateException = new HibernateException('test')
+ mockSession.close() >> { throw hibernateException }
+ when: 'attempt to close session'
+ objectUnderTest.closeSession('testSessionId')
+ then: 'a session manager exception is thrown with the session id in the details'
+ def thrown = thrown(SessionManagerException)
+ assert thrown.details.contains('testSessionId')
+ and: 'the original exception as cause'
+ assert thrown.cause == hibernateException
+ }
+
+ def 'Attempt to lock anchor entity with session Id that does not exists'(){
+ when: 'attempt to acquire anchor lock with session that does not exists'
+ objectUnderTest.lockAnchor('unknown session id','','',123L)
+ then: 'a session manager exception is thrown with the unknown id in the details'
+ def thrown = thrown(SessionManagerException)
+ thrown.details.contains('unknown session id')
+ }
+
+}
diff --git a/cps-ri/src/test/resources/data/cps-path-query.sql b/cps-ri/src/test/resources/data/cps-path-query.sql
index 8f525df6bd..d1a62209eb 100644
--- a/cps-ri/src/test/resources/data/cps-path-query.sql
+++ b/cps-ri/src/test/resources/data/cps-path-query.sql
@@ -1,6 +1,6 @@
/*
============LICENSE_START=======================================================
- Copyright (C) 2021 Nordix Foundation.
+ Copyright (C) 2021-2022 Nordix Foundation.
Modifications Copyright (C) 2021 Bell Canada.
================================================================================
Licensed under the Apache License, Version 2.0 (the "License");
@@ -30,25 +30,25 @@ INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES
INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
(1, 1001, 1003, null, '/shops', null),
- (2, 1001, 1003, 1, '/shops/shop[@id=1]', '{"id" : 1, "type" : "bookstore"}'),
- (3, 1001, 1003, 2, '/shops/shop[@id=1]/categories[@code=1]', '{"code" : 1, "type" : "bookstore", "name": "SciFi"}'),
- (4, 1001, 1003, 2, '/shops/shop[@id=1]/categories[@code=2]', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}'),
- (5, 1001, 1003, 3, '/shops/shop[@id=1]/categories[@code=1]/book', '{"price" : 5, "title" : "Dune", "labels" : ["special offer","classics",""]}'),
- (6, 1001, 1003, 4, '/shops/shop[@id=1]/categories[@code=2]/book', '{"price" : 15, "title" : "Chapters", "editions" : [2000,2010,2020]}'),
- (7, 1001, 1003, 5, '/shops/shop[@id=1]/categories[@code=1]/book/author[@FirstName="Joe" and @Surname="Bloggs"]', '{"FirstName" : "Joe", "Surname": "Bloggs","title": "Dune"}'),
- (8, 1001, 1003, 6, '/shops/shop[@id=1]/categories[@code=2]/book/author[@FirstName="Joe" and @Surname="Smith"]', '{"FirstName" : "Joe", "Surname": "Smith","title": "Chapters"}');
+ (2, 1001, 1003, 1, '/shops/shop[@id=''1'']', '{"id" : 1, "type" : "bookstore"}'),
+ (3, 1001, 1003, 2, '/shops/shop[@id=''1'']/categories[@code=''1'']', '{"code" : 1, "type" : "bookstore", "name": "SciFi"}'),
+ (4, 1001, 1003, 2, '/shops/shop[@id=''1'']/categories[@code=''2'']', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}'),
+ (5, 1001, 1003, 3, '/shops/shop[@id=''1'']/categories[@code=''1'']/book', '{"price" : 5, "title" : "Dune", "labels" : ["special offer","classics",""]}'),
+ (6, 1001, 1003, 4, '/shops/shop[@id=''1'']/categories[@code=''2'']/book', '{"price" : 15, "title" : "Chapters", "editions" : [2000,2010,2020]}'),
+ (7, 1001, 1003, 5, '/shops/shop[@id=''1'']/categories[@code=''1'']/book/author[@FirstName=''Joe'' and @Surname=''Bloggs'']', '{"FirstName" : "Joe", "Surname": "Bloggs","title": "Dune"}'),
+ (8, 1001, 1003, 6, '/shops/shop[@id=''1'']/categories[@code=''2'']/book/author[@FirstName=''Joe'' and @Surname=''Smith'']', '{"FirstName" : "Joe", "Surname": "Smith","title": "Chapters"}');
INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
- (9, 1001, 1003, 1, '/shops/shop[@id=2]', '{"type" : "bookstore"}'),
- (10, 1001, 1003, 9, '/shops/shop[@id=2]/categories[@code=1]', '{"code" : 2, "type" : "bookstore", "name": "Kids"}'),
- (11, 1001, 1003, 10, '/shops/shop[@id=2]/categories[@code=2]', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}');
+ (9, 1001, 1003, 1, '/shops/shop[@id=''2'']', '{"type" : "bookstore"}'),
+ (10, 1001, 1003, 9, '/shops/shop[@id=''2'']/categories[@code=''1'']', '{"code" : 2, "type" : "bookstore", "name": "Kids"}'),
+ (11, 1001, 1003, 10, '/shops/shop[@id=''2'']/categories[@code=''2'']', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}');
INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
- (12, 1001, 1003, 1, '/shops/shop[@id=3]', '{"type" : "garden centre"}'),
- (13, 1001, 1003, 12, '/shops/shop[@id=3]/categories[@code=1]', '{"id" : 1, "type" : "garden centre", "name": "indoor plants"}'),
- (14, 1001, 1003, 12, '/shops/shop[@id=3]/categories[@code=2]', '{"id" : 2, "type" : "garden centre", "name": "outdoor plants"}'),
- (16, 1001, 1003, 1, '/shops/shop[@id=3]/info', null),
- (17, 1001, 1003, 1, '/shops/shop[@id=3]/info/contact', null),
- (18, 1001, 1003, 1, '/shops/shop[@id=3]/info/contact/website', '{"address" : "myshop.ie"}'),
- (19, 1001, 1003, 12, '/shops/shop[@id=3]/info/contact/phonenumbers[@type="mob"]', '{"type" : "mob", "number" : "123123456"}'),
- (20, 1001, 1003, 12, '/shops/shop[@id=3]/info/contact/phonenumbers[@type="landline"]', '{"type" : "landline", "number" : "012123456"}');
+ (12, 1001, 1003, 1, '/shops/shop[@id=''3'']', '{"type" : "garden centre"}'),
+ (13, 1001, 1003, 12, '/shops/shop[@id=''3'']/categories[@code=''1'']', '{"id" : 1, "type" : "garden centre", "name": "indoor plants"}'),
+ (14, 1001, 1003, 12, '/shops/shop[@id=''3'']/categories[@code=''2'']', '{"id" : 2, "type" : "garden centre", "name": "outdoor plants"}'),
+ (16, 1001, 1003, 1, '/shops/shop[@id=''3'']/info', null),
+ (17, 1001, 1003, 1, '/shops/shop[@id=''3'']/info/contact', null),
+ (18, 1001, 1003, 1, '/shops/shop[@id=''3'']/info/contact/website', '{"address" : "myshop.ie"}'),
+ (19, 1001, 1003, 12, '/shops/shop[@id=''3'']/info/contact/phonenumbers[@type=''mob'']', '{"type" : "mob", "number" : "123123456"}'),
+ (20, 1001, 1003, 12, '/shops/shop[@id=''3'']/info/contact/phonenumbers[@type=''landline'']', '{"type" : "landline", "number" : "012123456"}');
diff --git a/cps-ri/src/test/resources/data/fragment.sql b/cps-ri/src/test/resources/data/fragment.sql
index a27bb5fdea..4106541061 100755
--- a/cps-ri/src/test/resources/data/fragment.sql
+++ b/cps-ri/src/test/resources/data/fragment.sql
@@ -1,6 +1,6 @@
/*
============LICENSE_START=======================================================
- Copyright (C) 2021 Nordix Foundation.
+ Copyright (C) 2021-2022 Nordix Foundation.
Modifications Copyright (C) 2021 Pantheon.tech
Modifications Copyright (C) 2021-2022 Bell Canada.
================================================================================
@@ -21,14 +21,16 @@
*/
INSERT INTO DATASPACE (ID, NAME) VALUES
- (1001, 'DATASPACE-001');
+ (1001, 'DATASPACE-001'),
+ (1002, 'NCMP-Admin');
INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES
(2001, 'SCHEMA-SET-001', 1001);
INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES
(3001, 'ANCHOR-001', 1001, 2001),
- (3003, 'ANCHOR-003', 1001, 2001);
+ (3003, 'ANCHOR-003', 1001, 2001),
+ (3004, 'ncmp-dmi-registry', 1002, 2001);
INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH) VALUES
(4001, 1001, 3001, null, '/parent-1'),
@@ -50,21 +52,32 @@ INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES)
(4203, 1001, 3003, 4202, '/parent-200/child-201/grand-child', '{"leaf-value": "original"}'),
(4206, 1001, 3003, null, '/parent-201', '{"leaf-value": "original"}'),
(4207, 1001, 3003, 4206, '/parent-201/child-203', '{}'),
- (4208, 1001, 3003, 4206, '/parent-201/child-204[@key="A"]', '{"key": "A"}'),
- (4209, 1001, 3003, 4206, '/parent-201/child-204[@key="B"]', '{"key": "B"}'),
+ (4208, 1001, 3003, 4206, '/parent-201/child-204[@key=''A'']', '{"key": "A"}'),
+ (4209, 1001, 3003, 4206, '/parent-201/child-204[@key=''B'']', '{"key": "B"}'),
(4211, 1001, 3003, null, '/parent-202', '{"leaf-value": "original"}'),
- (4212, 1001, 3003, 4211, '/parent-202/child-205[@key="A" and @key2="B"]', '{"key": "A", "key2": "B"}'),
- (4213, 1001, 3003, 4211, '/parent-202/child-206[@key="A"]', '{"key": "A"}'),
+ (4212, 1001, 3003, 4211, '/parent-202/child-205[@key=''A'' and @key2=''B'']', '{"key": "A", "key2": "B"}'),
+ (4213, 1001, 3003, 4211, '/parent-202/child-206[@key=''A'']', '{"key": "A"}'),
(4214, 1001, 3003, null, '/parent-203', '{"leaf-value": "original"}'),
(4215, 1001, 3003, 4214, '/parent-203/child-203', '{}'),
- (4216, 1001, 3003, 4214, '/parent-203/child-204[@key="A"]', '{"key": "A"}'),
- (4217, 1001, 3003, 4214, '/parent-203/child-204[@key="B"]', '{"key": "B"}'),
- (4218, 1001, 3003, 4217, '/parent-203/child-204[@key="B"]/grand-child-204[@key2="Y"]', '{"key": "B", "key2": "Y"}'),
+ (4216, 1001, 3003, 4214, '/parent-203/child-204[@key=''A'']', '{"key": "A"}'),
+ (4217, 1001, 3003, 4214, '/parent-203/child-204[@key=''B'']', '{"key": "B"}'),
+ (4218, 1001, 3003, 4217, '/parent-203/child-204[@key=''B'']/grand-child-204[@key2=''Y'']', '{"key": "B", "key2": "Y"}'),
(4226, 1001, 3003, null, '/parent-206', '{"leaf-value": "original"}'),
(4227, 1001, 3003, 4226, '/parent-206/child-206', '{}'),
(4228, 1001, 3003, 4227, '/parent-206/child-206/grand-child-206', '{}'),
- (4229, 1001, 3003, 4227, '/parent-206/child-206/grand-child-206[@key="A"]', '{"key": "A"}'),
- (4230, 1001, 3003, 4227, '/parent-206/child-206/grand-child-206[@key="X"]', '{"key": "X"}'),
- (4231, 1001, 3003, null, '/parent-206[@key="A"]', '{"key": "A"}'),
- (4232, 1001, 3003, 4231, '/parent-206[@key="A"]/child-206', '{}'),
- (4233, 1001, 3003, null, '/parent-206[@key="B"]', '{"key": "B"}'); \ No newline at end of file
+ (4229, 1001, 3003, 4227, '/parent-206/child-206/grand-child-206[@key=''A'']', '{"key": "A"}'),
+ (4230, 1001, 3003, 4227, '/parent-206/child-206/grand-child-206[@key=''X'']', '{"key": "X"}'),
+ (4231, 1001, 3003, null, '/parent-206[@key=''A'']', '{"key": "A"}'),
+ (4232, 1001, 3003, 4231, '/parent-206[@key=''A'']/child-206', '{}'),
+ (4233, 1001, 3003, null, '/parent-206[@key=''B'']', '{"key": "B"}');
+
+INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
+ (5000, 1002, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo'']', '{"id": "PNFDemo", "dmi-service-name": "http://172.21.235.14:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
+ (5001, 1002, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo2'']', '{"id": "PNFDemo2", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
+ (5002, 1002, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo3'']', '{"id": "PNFDemo3", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
+ (5003, 1002, 3004, null, '/dmi-registry/cm-handles[@id=''PNFDemo4'']', '{"id": "PNFDemo4", "dmi-service-name": "http://172.26.46.68:8783", "dmi-data-service-name": "", "dmi-model-service-name": ""}'),
+ (5004, 1002, 3004, 5000, '/dmi-registry/cm-handles[@id=''PNFDemo'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'),
+ (5005, 1002, 3004, 5001, '/dmi-registry/cm-handles[@id=''PNFDemo2'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'),
+ (5006, 1002, 3004, 5002, '/dmi-registry/cm-handles[@id=''PNFDemo3'']/public-properties[@name=''Contact'']', '{"name": "Contact3", "value": "PNF3@bookstore.com"}'),
+ (5007, 1002, 3004, 5003, '/dmi-registry/cm-handles[@id=''PNFDemo4'']/public-properties[@name=''Contact'']', '{"name": "Contact", "value": "newemailforstore@bookstore.com"}'),
+ (5008, 1002, 3004, 5004, '/dmi-registry/cm-handles[@id=''PNFDemo4'']/public-properties[@name=''Contact2'']', '{"name": "Contact2", "value": "newemailforstore2@bookstore.com"}');
diff --git a/cps-ri/src/test/resources/hibernate.cfg.xml b/cps-ri/src/test/resources/hibernate.cfg.xml
new file mode 100644
index 0000000000..fae9275ddc
--- /dev/null
+++ b/cps-ri/src/test/resources/hibernate.cfg.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE hibernate-configuration PUBLIC
+ "-//Hibernate/Hibernate Configuration DTD 3.0//EN"
+ "http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
+
+<hibernate-configuration>
+ <session-factory>
+ <property name="hibernate.connection.driver_class">org.postgresql.Driver</property>
+ <property name="hibernate.connection.url">${DB_URL}</property>
+ <property name="hibernate.connection.username">${DB_USERNAME}</property>
+ <property name="hibernate.connection.password">${DB_PASSWORD}</property>
+ <property name="hibernate.dialect">org.hibernate.dialect.PostgreSQL82Dialect</property>
+ <property name="show_sql">true</property>
+ <property name="hibernate.hbm2ddl.auto">none</property>
+ </session-factory>
+</hibernate-configuration> \ No newline at end of file