diff options
9 files changed, 243 insertions, 151 deletions
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java index 9f7ef1e882..42d8135057 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java @@ -25,8 +25,10 @@ import lombok.Getter; @Getter public enum NcmpEventResponseCode { - CODE_100("100", "cm handle id(s) not found"), - CODE_101("101", "cm handle(s) not ready"); + CM_HANDLES_NOT_FOUND("100", "cm handle id(s) not found"), + CM_HANDLES_NOT_READY("101", "cm handle(s) not ready"), + DMI_SERVICE_NOT_RESPONDING("102", "dmi plugin service is not responding"), + UNABLE_TO_READ_RESOURCE_DATA("103", "dmi plugin service is not able to read resource data"); private final String statusCode; private final String statusMessage; diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java index b4784f418f..8f0975f177 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java @@ -30,8 +30,10 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; +import org.onap.cps.ncmp.api.NcmpEventResponseCode; import org.onap.cps.ncmp.api.impl.client.DmiRestClient; import org.onap.cps.ncmp.api.impl.config.NcmpConfiguration; +import org.onap.cps.ncmp.api.impl.exception.HttpClientRequestException; import org.onap.cps.ncmp.api.impl.executor.TaskExecutor; import org.onap.cps.ncmp.api.impl.utils.DmiServiceUrlBuilder; import org.onap.cps.ncmp.api.impl.utils.data.operation.ResourceDataOperationRequestUtils; @@ -43,7 +45,9 @@ import org.onap.cps.spi.exceptions.CpsException; import org.onap.cps.utils.JsonObjectMapper; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Component; +import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; +import org.springframework.web.util.UriComponentsBuilder; /** * Operations class for DMI data. @@ -240,14 +244,37 @@ public class DmiDataOperations extends DmiOperations { final String dataOperationRequestBodiesAsJsonString = jsonObjectMapper.asJsonString(dmiDataOperationRequestBodies); TaskExecutor.executeTask(() -> dmiRestClient.postOperationWithJsonData(dataOperationResourceUrl, - dataOperationRequestBodiesAsJsonString, READ), + dataOperationRequestBodiesAsJsonString, READ), DEFAULT_ASYNC_TASK_EXECUTOR_TIMEOUT_IN_MILLISECONDS) - .whenCompleteAsync(this::handleTaskCompletion); + .whenCompleteAsync((response, throwable) -> handleTaskCompletionException(throwable, + dataOperationResourceUrl, dmiDataOperationRequestBodies)); } - private void handleTaskCompletion(final Object response, final Throwable throwable) { - // TODO Need to publish an error response to client given topic. - // Code should be implemented into https://jira.onap.org/browse/CPS-1558 ( - // NCMP : Handle non responding DMI-Plugin) + private void handleTaskCompletionException(final Throwable throwable, + final String dataOperationResourceUrl, + final List<DmiDataOperation> dmiDataOperationRequestBodies) { + if (throwable != null) { + final MultiValueMap<String, String> dataOperationResourceUrlParameters = + UriComponentsBuilder.fromUriString(dataOperationResourceUrl).build().getQueryParams(); + final String topicName = dataOperationResourceUrlParameters.get("topic").get(0); + final String requestId = dataOperationResourceUrlParameters.get("requestId").get(0); + + final MultiValueMap<String, Map<NcmpEventResponseCode, List<String>>> + cmHandleIdsPerResponseCodesPerOperationId = new LinkedMultiValueMap<>(); + + dmiDataOperationRequestBodies.forEach(dmiDataOperationRequestBody -> { + final List<String> cmHandleIds = dmiDataOperationRequestBody.getCmHandles().stream() + .map(CmHandle::getId).collect(Collectors.toList()); + if (throwable.getCause() instanceof HttpClientRequestException) { + cmHandleIdsPerResponseCodesPerOperationId.add(dmiDataOperationRequestBody.getOperationId(), + Map.of(NcmpEventResponseCode.UNABLE_TO_READ_RESOURCE_DATA, cmHandleIds)); + } else { + cmHandleIdsPerResponseCodesPerOperationId.add(dmiDataOperationRequestBody.getOperationId(), + Map.of(NcmpEventResponseCode.DMI_SERVICE_NOT_RESPONDING, cmHandleIds)); + } + }); + ResourceDataOperationRequestUtils.publishErrorMessageToClientTopic(topicName, requestId, + cmHandleIdsPerResponseCodesPerOperationId); + } } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java index 957f48a862..d8fb904f21 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java @@ -68,7 +68,7 @@ public class ResourceDataOperationRequestUtils { final Collection<YangModelCmHandle> yangModelCmHandles) { final Map<String, List<DmiDataOperation>> dmiDataOperationsOutPerDmiServiceName = new HashMap<>(); - final MultiValueMap<String, Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerOperationIdPerResponseCode + final MultiValueMap<String, Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerResponseCodesPerOperationId = new LinkedMultiValueMap<>(); final Set<String> nonReadyCmHandleIdsLookup = filterAndGetNonReadyCmHandleIds(yangModelCmHandles); @@ -100,25 +100,34 @@ public class ResourceDataOperationRequestUtils { } } } - populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerOperationIdPerResponseCode, - dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CODE_100, nonExistingCmHandleIds); - populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerOperationIdPerResponseCode, - dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CODE_101, nonReadyCmHandleIds); + populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerResponseCodesPerOperationId, + dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CM_HANDLES_NOT_FOUND, + nonExistingCmHandleIds); + populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerResponseCodesPerOperationId, + dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CM_HANDLES_NOT_READY, + nonReadyCmHandleIds); } - if (!cmHandleIdsPerOperationIdPerResponseCode.isEmpty()) { - publishErrorMessageToClientTopic(topicParamInQuery, requestId, cmHandleIdsPerOperationIdPerResponseCode); + if (!cmHandleIdsPerResponseCodesPerOperationId.isEmpty()) { + publishErrorMessageToClientTopic(topicParamInQuery, requestId, cmHandleIdsPerResponseCodesPerOperationId); } return dmiDataOperationsOutPerDmiServiceName; } + /** + * Creates data operation cloud event and publish it to client topic. + * + * @param clientTopic client given topic + * @param requestId unique identifier per request + * @param cmHandleIdsPerResponseCodesPerOperationId list of cm handle ids per operation id with response code + */ @Async - private static void publishErrorMessageToClientTopic(final String clientTopic, + public static void publishErrorMessageToClientTopic(final String clientTopic, final String requestId, final MultiValueMap<String, Map<NcmpEventResponseCode, List<String>>> - cmHandleIdsPerOperationIdPerResponseCode) { + cmHandleIdsPerResponseCodesPerOperationId) { final CloudEvent dataOperationCloudEvent = DataOperationEventCreator.createDataOperationEvent(clientTopic, - requestId, cmHandleIdsPerOperationIdPerResponseCode); + requestId, cmHandleIdsPerResponseCodesPerOperationId); final EventsPublisher<CloudEvent> eventsPublisher = CpsApplicationContext.getCpsBean(EventsPublisher.class); eventsPublisher.publishCloudEvent(clientTopic, requestId, dataOperationCloudEvent); } @@ -166,13 +175,13 @@ public class ResourceDataOperationRequestUtils { } private static void populateCmHandleIdsPerOperationIdPerResponseCode(final MultiValueMap<String, - Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerOperationIdByResponseCode, + Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerResponseCodesPerOperationId, final String operationId, final NcmpEventResponseCode ncmpEventResponseCode, final List<String> cmHandleIds) { if (!cmHandleIds.isEmpty()) { - cmHandleIdsPerOperationIdByResponseCode.add(operationId, Map.of(ncmpEventResponseCode, cmHandleIds)); + cmHandleIdsPerResponseCodesPerOperationId.add(operationId, Map.of(ncmpEventResponseCode, cmHandleIds)); } } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy index 59e62e34d0..3f40f430f3 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy @@ -22,12 +22,16 @@ package org.onap.cps.ncmp.api.impl.operations import com.fasterxml.jackson.databind.ObjectMapper +import io.cloudevents.core.CloudEventUtils +import io.cloudevents.jackson.PojoCloudEventDataMapper +import org.onap.cps.ncmp.api.NcmpEventResponseCode import org.onap.cps.ncmp.api.impl.config.NcmpConfiguration import org.onap.cps.ncmp.api.impl.events.EventsPublisher +import org.onap.cps.ncmp.api.impl.exception.HttpClientRequestException import org.onap.cps.ncmp.api.impl.utils.DmiServiceUrlBuilder import org.onap.cps.ncmp.api.impl.utils.context.CpsApplicationContext import org.onap.cps.ncmp.api.models.DataOperationRequest -import org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent +import org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent import org.onap.cps.ncmp.utils.TestUtils import org.onap.cps.utils.JsonObjectMapper import org.spockframework.spring.SpringBean @@ -37,6 +41,7 @@ import org.springframework.http.ResponseEntity import org.springframework.test.context.ContextConfiguration import org.springframework.http.HttpStatus import spock.lang.Shared +import java.util.concurrent.TimeoutException import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_OPERATIONAL import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_RUNNING @@ -110,6 +115,28 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec { assert requestBodyAsJsonStringArg == expectedBatchRequestAsJson } + def 'Execute (async) data operation from DMI service for #scenario.'() { + given: 'data operation request body and dmi resource url' + def dmiDataOperation = DmiDataOperation.builder().operationId('some-operation-id').build() + dmiDataOperation.getCmHandles().add(CmHandle.builder().id('some-cm-handle-id').build()) + def dmiDataOperationResourceDataUrl = "http://dmi-service-name:dmi-port/dmi/v1/data?topic=my-topic-name&requestId=some-request-id" + def actualDataOperationCloudEvent = null + when: 'exception occurs after sending request to dmi service' + objectUnderTest.handleTaskCompletionException(new Throwable(exception), dmiDataOperationResourceDataUrl, List.of(dmiDataOperation)) + then: 'a cloud event is published' + eventsPublisher.publishCloudEvent('my-topic-name', 'some-request-id', _) >> { args -> actualDataOperationCloudEvent = args[2] } + and: 'the event contains the expected error details' + def eventDataValue = extractDataValue(actualDataOperationCloudEvent) + assert eventDataValue.operationId == dmiDataOperation.operationId + assert eventDataValue.ids == dmiDataOperation.cmHandles.id + assert eventDataValue.statusCode == responseCode.statusCode + assert eventDataValue.statusMessage == responseCode.statusMessage + where: 'the following exceptions are occurred' + scenario | exception || responseCode + 'http client request exception' | new HttpClientRequestException('error-message', 'error-details', HttpStatus.SERVICE_UNAVAILABLE.value()) || NcmpEventResponseCode.UNABLE_TO_READ_RESOURCE_DATA + 'timeout exception' | new TimeoutException() || NcmpEventResponseCode.DMI_SERVICE_NOT_RESPONDING + } + def 'call get all resource data.'() { given: 'the system returns a cm handle with a sample property' mockYangModelCmHandleRetrieval([yangModelCmHandleProperty]) @@ -142,4 +169,8 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec { CREATE || 'create' UPDATE || 'update' } + + def extractDataValue(actualDataOperationCloudEvent) { + return CloudEventUtils.mapData(actualDataOperationCloudEvent, PojoCloudEventDataMapper.from(new ObjectMapper(), DataOperationEvent.class)).getValue().data.responses[0] + } } diff --git a/cps-rest/src/test/groovy/org/onap/cps/rest/utils/MultipartFileUtilSpec.groovy b/cps-rest/src/test/groovy/org/onap/cps/rest/utils/MultipartFileUtilSpec.groovy index 572db005b3..e9d559c31d 100644 --- a/cps-rest/src/test/groovy/org/onap/cps/rest/utils/MultipartFileUtilSpec.groovy +++ b/cps-rest/src/test/groovy/org/onap/cps/rest/utils/MultipartFileUtilSpec.groovy @@ -22,23 +22,12 @@ package org.onap.cps.rest.utils import org.onap.cps.spi.exceptions.CpsException import org.onap.cps.spi.exceptions.ModelValidationException -import org.onap.cps.spi.model.DataNodeBuilder -import org.onap.cps.utils.DataMapUtils import org.springframework.mock.web.MockMultipartFile import org.springframework.web.multipart.MultipartFile import spock.lang.Specification class MultipartFileUtilSpec extends Specification { - def 'Data node without leaves and without children.'() { - given: 'a datanode with no leaves and no children' - def dataNodeWithoutData = new DataNodeBuilder().withXpath('some xpath').build() - when: 'it is converted to a map' - def result = DataMapUtils.toDataMap(dataNodeWithoutData) - then: 'an empty object map is returned' - result.isEmpty() - } - def 'Extract yang resource from yang file.'() { given: 'uploaded yang file' def multipartFile = new MockMultipartFile("file", "filename.yang", "text/plain", "content".getBytes()) @@ -116,6 +105,32 @@ class MultipartFileUtilSpec extends Specification { fileType << ['YANG', 'ZIP'] } + def 'Resource name extension checks, with #scenario.'() { + expect: 'extension check returns expected result' + assert MultipartFileUtil.resourceNameEndsWithExtension(resourceName, '.test') == expectedResult + where: 'following resource names are tested' + scenario | resourceName || expectedResult + 'correct extension'| 'file.test' || true + 'mixed case' | 'file.TesT' || true + 'other extension' | 'file.other' || false + 'no extension' | 'file' || false + 'null' | null || false + } + + def 'Extract resourcename, with #scenario.'() { + expect: 'extension check returns expected result' + assert MultipartFileUtil.extractResourceNameFromPath(path) == expectedResoureName + where: 'following resource names are tested' + scenario | path || expectedResoureName + 'no folder' | 'file.test' || 'file.test' + 'single folder' | 'folder/file.test' || 'file.test' + 'multiple folders' | 'f1/f2/file.test' || 'file.test' + 'with root' | '/f1/f2/file.test' || 'file.test' + 'windows notation' | 'c:\\f2\\file.test' || 'file.test' + 'empty path' | '' || '' + 'null path' | null || '' + } + def multipartZipFileFromResource(resourcePath) { return new MockMultipartFile("file", "TEST.ZIP", "application/zip", getClass().getResource(resourcePath).getBytes()) diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java index 99cda229db..0a7afc8f64 100755 --- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java @@ -39,6 +39,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.onap.cps.api.CpsAdminService; import org.onap.cps.api.CpsDataService; +import org.onap.cps.cpspath.parser.CpsPathUtil; import org.onap.cps.notification.NotificationService; import org.onap.cps.notification.Operation; import org.onap.cps.spi.CpsDataPersistenceService; @@ -354,10 +355,11 @@ public class CpsDataServiceImpl implements CpsDataService { } return dataNodes; } + final String normalizedParentNodeXpath = CpsPathUtil.getNormalizedXpath(parentNodeXpath); final ContainerNode containerNode = - timedYangParser.parseData(contentType, nodeData, schemaContext, parentNodeXpath); + timedYangParser.parseData(contentType, nodeData, schemaContext, normalizedParentNodeXpath); final Collection<DataNode> dataNodes = new DataNodeBuilder() - .withParentNodeXpath(parentNodeXpath) + .withParentNodeXpath(normalizedParentNodeXpath) .withContainerNode(containerNode) .buildCollection(); if (dataNodes.isEmpty()) { diff --git a/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy index e27b437637..c636f4b5ff 100644 --- a/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy @@ -1,7 +1,7 @@ /* * ============LICENSE_START======================================================= * Copyright (C) 2021 Pantheon.tech - * Modifications Copyright (C) 2020-2022 Nordix Foundation + * Modifications Copyright (C) 2020-2023 Nordix Foundation * Modifications Copyright (C) 2022 Bell Canada. * Modifications Copyright (C) 2023 TechMahindra Ltd. * ================================================================================ @@ -29,50 +29,19 @@ class DataMapUtilsSpec extends Specification { def noChildren = [] - def dataNode = buildDataNode( - "/parent",[parentLeaf:'parentLeafValue', parentLeafList:['parentLeafListEntry1','parentLeafListEntry2']],[ - buildDataNode('/parent/child-list[@id=1/2]',[listElementLeaf:'listElement1leafValue'],noChildren), - buildDataNode('/parent/child-list[@id=2]',[listElementLeaf:'listElement2leafValue'],noChildren), - buildDataNode('/parent/child-object',[childLeaf:'childLeafValue'], - [buildDataNode('/parent/child-object/grand-child-object',[grandChildLeaf:'grandChildLeafValue'],noChildren)] - ), - ]) - - def dataNodeWithAnchor = buildDataNodeWithAnchor( - "/parent", 'anchor01',[parentLeaf:'parentLeafValue', parentLeafList:['parentLeafListEntry1','parentLeafListEntry2']],[ - buildDataNode('/parent/child-list[@id=1/2]',[listElementLeaf:'listElement1leafValue'],noChildren), - buildDataNode('/parent/child-list[@id=2]',[listElementLeaf:'listElement2leafValue'],noChildren), - buildDataNode('/parent/child-object',[childLeaf:'childLeafValue'], - [buildDataNode('/parent/child-object/grand-child-object',[grandChildLeaf:'grandChildLeafValue'],noChildren)] - ), - ]) - - static def buildDataNode(xpath, leaves, children) { - return new DataNodeBuilder().withXpath(xpath).withLeaves(leaves).withChildDataNodes(children).build() - } - - static def buildDataNodeWithAnchor(xpath, anchorName, leaves, children) { - return new DataNodeBuilder().withXpath(xpath).withAnchor(anchorName).withLeaves(leaves).withChildDataNodes(children).build() - } - def 'Data node structure conversion to map.'() { when: 'data node structure is converted to a map' def result = DataMapUtils.toDataMap(dataNode) - then: 'root node identifier is null' result.parent == null - then: 'root node leaves are top level elements' result.parentLeaf == 'parentLeafValue' result.parentLeafList == ['parentLeafListEntry1','parentLeafListEntry2'] - and: 'leaves of child list element are listed as structures under common identifier' result.'child-list'.collect().containsAll(['listElementLeaf': 'listElement1leafValue'], ['listElementLeaf': 'listElement2leafValue']) - and: 'leaves for child element is populated under its node identifier' result.'child-object'.childLeaf == 'childLeafValue' - and: 'leaves for grandchild element is populated under its node identifier' result.'child-object'.'grand-child-object'.grandChildLeaf == 'grandChildLeafValue' } @@ -84,10 +53,8 @@ class DataMapUtilsSpec extends Specification { def parentNode = result.parent parentNode.parentLeaf == 'parentLeafValue' parentNode.parentLeafList == ['parentLeafListEntry1','parentLeafListEntry2'] - and: 'leaves for child element is populated under its node identifier' parentNode.'child-object'.childLeaf == 'childLeafValue' - and: 'leaves for grandchild element is populated under its node identifier' parentNode.'child-object'.'grand-child-object'.grandChildLeaf == 'grandChildLeafValue' } @@ -112,15 +79,48 @@ class DataMapUtilsSpec extends Specification { def parentNode = result.get("dataNode").parent parentNode.parentLeaf == 'parentLeafValue' parentNode.parentLeafList == ['parentLeafListEntry1','parentLeafListEntry2'] - and: 'leaves for child element is populated under its node identifier' assert parentNode.'child-object'.childLeaf == 'childLeafValue' - and: 'leaves for grandchild element is populated under its node identifier' assert parentNode.'child-object'.'grand-child-object'.grandChildLeaf == 'grandChildLeafValue' - and: 'data node is associated with anchor name' assert result.get('anchorName') == 'anchor01' } + + def 'Data node without leaves and without children.'() { + given: 'a datanode with no leaves and no children' + def dataNodeWithoutData = new DataNodeBuilder().withXpath('some xpath').build() + when: 'it is converted to a map' + def result = DataMapUtils.toDataMap(dataNodeWithoutData) + then: 'an empty object map is returned' + result.isEmpty() + } + + def dataNode = buildDataNode( + "/parent",[parentLeaf:'parentLeafValue', parentLeafList:['parentLeafListEntry1','parentLeafListEntry2']],[ + buildDataNode('/parent/child-list[@id=1/2]',[listElementLeaf:'listElement1leafValue'],noChildren), + buildDataNode('/parent/child-list[@id=2]',[listElementLeaf:'listElement2leafValue'],noChildren), + buildDataNode('/parent/child-object',[childLeaf:'childLeafValue'], + [buildDataNode('/parent/child-object/grand-child-object',[grandChildLeaf:'grandChildLeafValue'],noChildren)] + ), + ]) + + def dataNodeWithAnchor = buildDataNodeWithAnchor( + "/parent", 'anchor01',[parentLeaf:'parentLeafValue', parentLeafList:['parentLeafListEntry1','parentLeafListEntry2']],[ + buildDataNode('/parent/child-list[@id=1/2]',[listElementLeaf:'listElement1leafValue'],noChildren), + buildDataNode('/parent/child-list[@id=2]',[listElementLeaf:'listElement2leafValue'],noChildren), + buildDataNode('/parent/child-object',[childLeaf:'childLeafValue'], + [buildDataNode('/parent/child-object/grand-child-object',[grandChildLeaf:'grandChildLeafValue'],noChildren)] + ), + ]) + + def buildDataNode(xpath, leaves, children) { + return new DataNodeBuilder().withXpath(xpath).withLeaves(leaves).withChildDataNodes(children).build() + } + + def buildDataNodeWithAnchor(xpath, anchorName, leaves, children) { + return new DataNodeBuilder().withXpath(xpath).withAnchor(anchorName).withLeaves(leaves).withChildDataNodes(children).build() + } + } diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy index 2efbcb2af6..5c9ced34e6 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy @@ -47,85 +47,87 @@ class CpsDataServiceIntegrationSpec extends FunctionalSpecBase { def setup() { objectUnderTest = cpsDataService - originalCountBookstoreChildNodes = countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)) -} + originalCountBookstoreChildNodes = countDataNodesInBookstore() + } -def 'Read bookstore top-level container(s) using #fetchDescendantsOption.'() { - when: 'get data nodes for bookstore container' - def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', fetchDescendantsOption) - then: 'the tree consist ouf of #expectNumberOfDataNodes data nodes' - assert countDataNodesInTree(result) == expectNumberOfDataNodes - and: 'the top level data node has the expected attribute and value' - assert result.leaves['bookstore-name'] == ['Easons'] - and: 'they are from the correct dataspace' - assert result.dataspace == [FUNCTIONAL_TEST_DATASPACE_1] - and: 'they are from the correct anchor' - assert result.anchorName == [BOOKSTORE_ANCHOR_1] - where: 'the following option is used' - fetchDescendantsOption || expectNumberOfDataNodes - OMIT_DESCENDANTS || 1 - DIRECT_CHILDREN_ONLY || 6 - INCLUDE_ALL_DESCENDANTS || 17 - new FetchDescendantsOption(2) || 17 -} + def 'Read bookstore top-level container(s) using #fetchDescendantsOption.'() { + when: 'get data nodes for bookstore container' + def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', fetchDescendantsOption) + then: 'the tree consist ouf of #expectNumberOfDataNodes data nodes' + assert countDataNodesInTree(result) == expectNumberOfDataNodes + and: 'the top level data node has the expected attribute and value' + assert result.leaves['bookstore-name'] == ['Easons'] + and: 'they are from the correct dataspace' + assert result.dataspace == [FUNCTIONAL_TEST_DATASPACE_1] + and: 'they are from the correct anchor' + assert result.anchorName == [BOOKSTORE_ANCHOR_1] + where: 'the following option is used' + fetchDescendantsOption || expectNumberOfDataNodes + OMIT_DESCENDANTS || 1 + DIRECT_CHILDREN_ONLY || 6 + INCLUDE_ALL_DESCENDANTS || 17 + new FetchDescendantsOption(2) || 17 + } -def 'Read bookstore top-level container(s) using "root" path variations.'() { - when: 'get data nodes for bookstore container' - def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, root, OMIT_DESCENDANTS) - then: 'the tree consist ouf of one data node' - assert countDataNodesInTree(result) == 1 - and: 'the top level data node has the expected attribute and value' - assert result.leaves['bookstore-name'] == ['Easons'] - where: 'the following variations of "root" are used' - root << [ '/', '' ] -} + def 'Read bookstore top-level container(s) using "root" path variations.'() { + when: 'get data nodes for bookstore container' + def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, root, OMIT_DESCENDANTS) + then: 'the tree consist ouf of one data node' + assert countDataNodesInTree(result) == 1 + and: 'the top level data node has the expected attribute and value' + assert result.leaves['bookstore-name'] == ['Easons'] + where: 'the following variations of "root" are used' + root << [ '/', '' ] + } -def 'Read data nodes with error: #cpsPath'() { - when: 'attempt to get data nodes using invalid path' - objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, DIRECT_CHILDREN_ONLY) - then: 'a #expectedException is thrown' - thrown(expectedException) - where: - cpsPath || expectedException - 'invalid path' || CpsPathException - '/non-existing-path' || DataNodeNotFoundException -} + def 'Read data nodes with error: #cpsPath'() { + when: 'attempt to get data nodes using invalid path' + objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, DIRECT_CHILDREN_ONLY) + then: 'a #expectedException is thrown' + thrown(expectedException) + where: + cpsPath || expectedException + 'invalid path' || CpsPathException + '/non-existing-path' || DataNodeNotFoundException + } -def 'Read (multiple) data nodes (batch) with #cpsPath'() { - when: 'attempt to get data nodes using invalid path' - objectUnderTest.getDataNodesForMultipleXpaths(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ cpsPath ], DIRECT_CHILDREN_ONLY) - then: 'no exception is thrown' - noExceptionThrown() - where: - cpsPath << [ 'invalid path', '/non-existing-path' ] -} + def 'Read (multiple) data nodes (batch) with #cpsPath'() { + when: 'attempt to get data nodes using invalid path' + objectUnderTest.getDataNodesForMultipleXpaths(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ cpsPath ], DIRECT_CHILDREN_ONLY) + then: 'no exception is thrown' + noExceptionThrown() + where: + cpsPath << [ 'invalid path', '/non-existing-path' ] + } -def 'Delete root data node.'() { - when: 'the "root" is deleted' - objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ '/' ], now) - and: 'attempt to get the top level data node' - objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY) - then: 'an datanode not found exception is thrown' - thrown(DataNodeNotFoundException) - cleanup: - restoreBookstoreDataAnchor(1) -} + def 'Delete root data node.'() { + when: 'the "root" is deleted' + objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ '/' ], now) + and: 'attempt to get the top level data node' + objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY) + then: 'an datanode not found exception is thrown' + thrown(DataNodeNotFoundException) + cleanup: + restoreBookstoreDataAnchor(1) + } -def 'Add and Delete a (container) data node.'() { - given: 'new (webinfo) datanode' - def json = '{"webinfo": {"domain-name":"ourbookstore.com" ,"contact-email":"info@ourbookstore.com" }}' - when: 'the new datanode is saved' - objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now) - then: 'it can be retrieved by its xpath' - def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', DIRECT_CHILDREN_ONLY) + def 'Add and Delete a (container) data node using #scenario.'() { + when: 'the new datanode is saved' + objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , parentXpath, json, now) + then: 'it can be retrieved by its normalized xpath' + def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, normalizedXpathToNode, DIRECT_CHILDREN_ONLY) assert result.size() == 1 - assert result[0].xpath == '/bookstore/webinfo' + assert result[0].xpath == normalizedXpathToNode and: 'there is now one extra datanode' - assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)) + assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore() when: 'the new datanode is deleted' - objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', now) + objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, normalizedXpathToNode, now) then: 'the original number of data nodes is restored' - assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)) + assert originalCountBookstoreChildNodes == countDataNodesInBookstore() + where: + scenario | parentXpath | json || normalizedXpathToNode + 'normalized parent xpath' | '/bookstore' | '{"webinfo": {"domain-name":"ourbookstore.com", "contact-email":"info@ourbookstore.com" }}' || "/bookstore/webinfo" + 'non-normalized parent xpath' | '/bookstore/categories[ @code="1"]' | '{"books": {"title":"new" }}' || "/bookstore/categories[@code='1']/books[@title='new']" } def 'Attempt to create a top level data node using root.'() { @@ -186,12 +188,12 @@ def 'Add and Delete a (container) data node.'() { objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', DIRECT_CHILDREN_ONLY).size() == 1 objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', DIRECT_CHILDREN_ONLY).size() == 1 and: 'there are now two extra data nodes' - assert originalCountBookstoreChildNodes + 2 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)) + assert originalCountBookstoreChildNodes + 2 == countDataNodesInBookstore() when: 'the new elements are deleted' objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now) objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', now) then: 'the original number of data nodes is restored' - assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)) + assert originalCountBookstoreChildNodes == countDataNodesInBookstore() } def 'Add list (element) data nodes that already exist.'() { @@ -203,7 +205,7 @@ def 'Add and Delete a (container) data node.'() { def exceptionThrown = thrown(AlreadyDefinedExceptionBatch) exceptionThrown.alreadyDefinedXpaths == [ '/bookstore/categories[@code=\'1\']' ] as Set and: 'there is now one extra data nodes' - assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)) + assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore() cleanup: restoreBookstoreDataAnchor(1) } @@ -216,7 +218,7 @@ def 'Add and Delete a (container) data node.'() { when: 'the new element is deleted' objectUnderTest.deleteListOrListElement(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now) then: 'the original number of data nodes is restored' - assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)) + assert originalCountBookstoreChildNodes == countDataNodesInBookstore() } def 'Add and Delete a batch of lists (element) data nodes.'() { @@ -229,12 +231,12 @@ def 'Add and Delete a (container) data node.'() { assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', DIRECT_CHILDREN_ONLY).size() == 1 assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', DIRECT_CHILDREN_ONLY).size() == 1 and: 'there are now two extra data nodes' - assert originalCountBookstoreChildNodes + 2 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)) + assert originalCountBookstoreChildNodes + 2 == countDataNodesInBookstore() when: 'the new elements are deleted' objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now) objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', now) then: 'the original number of data nodes is restored' - assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)) + assert originalCountBookstoreChildNodes == countDataNodesInBookstore() } def 'Add and Delete a batch of lists (element) data nodes with partial success.'() { @@ -247,7 +249,7 @@ def 'Add and Delete a (container) data node.'() { def exceptionThrown = thrown(AlreadyDefinedExceptionBatch) assert exceptionThrown.alreadyDefinedXpaths == [ '/bookstore/categories[@code=\'1\']' ] as Set and: 'there is now one extra data node' - assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)) + assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore() cleanup: restoreBookstoreDataAnchor(1) } @@ -362,4 +364,8 @@ def 'Add and Delete a (container) data node.'() { cleanup: restoreBookstoreDataAnchor(1) } + + def countDataNodesInBookstore() { + return countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', INCLUDE_ALL_DESCENDANTS)) + } } diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy index 6b1efe955f..74070b1d83 100644 --- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy +++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy @@ -64,7 +64,7 @@ class CpsPerfTestBase extends PerfTestBase { addAnchorsWithData(5, CPS_PERFORMANCE_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'bookstore', data) stopWatch.stop() def durationInMillis = stopWatch.getTotalTimeMillis() - recordAndAssertPerformance('Creating bookstore anchors with large data tree', 3_000, durationInMillis) + recordAndAssertPerformance('Creating bookstore anchors with large data tree', 1_500, durationInMillis) } def addOpenRoadModel() { @@ -81,7 +81,7 @@ class CpsPerfTestBase extends PerfTestBase { addAnchorsWithData(5, CPS_PERFORMANCE_TEST_DATASPACE, LARGE_SCHEMA_SET, 'openroadm', data) stopWatch.stop() def durationInMillis = stopWatch.getTotalTimeMillis() - recordAndAssertPerformance('Creating openroadm anchors with large data tree', 30_000, durationInMillis) + recordAndAssertPerformance('Creating openroadm anchors with large data tree', 20_000, durationInMillis) } def generateOpenRoadData(numberOfNodes) { @@ -98,8 +98,8 @@ class CpsPerfTestBase extends PerfTestBase { assert countDataNodesInTree(result) == 1 stopWatch.stop() def durationInMillis = stopWatch.getTotalTimeMillis() - then: 'all data is read within 30 seconds (warm up not critical)' - recordAndAssertPerformance("Warming database", 30_000, durationInMillis) + then: 'all data is read within 20 seconds' + recordAndAssertPerformance("Warming database", 20_000, durationInMillis) } } |