summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--cps-application/src/main/resources/application.yml2
-rw-r--r--cps-ncmp-rest-stub/pom.xml6
-rw-r--r--cps-ncmp-rest-stub/src/main/java/org/onap/cps/ncmp/rest/stub/controller/NetworkCmProxyStubController.java2
-rwxr-xr-xcps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyController.java4
-rwxr-xr-xcps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/exceptions/NetworkCmProxyRestExceptionHandler.java1
-rw-r--r--cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyControllerSpec.groovy6
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/exception/InvalidDatastoreException.java (renamed from cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/exceptions/InvalidDatastoreException.java)4
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DataStoreEnum.java34
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DatastoreType.java (renamed from cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/handlers/DatastoreType.java)6
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java4
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/SyncUtils.java4
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/init/SubscriptionModelLoader.java2
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImplSpec.groovy14
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy10
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/utils/DmiServiceUrlBuilderSpec.groovy9
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/sync/SyncUtilsSpec.groovy4
-rw-r--r--cps-ri/pom.xml11
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java9
-rw-r--r--cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java42
-rw-r--r--[-rwxr-xr-x]cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy32
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy73
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy105
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy239
-rw-r--r--cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy100
-rw-r--r--cps-ri/src/test/resources/data/perf-test.sql28
-rw-r--r--cps-service/src/main/java/org/onap/cps/api/CpsDataService.java12
-rwxr-xr-xcps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java18
-rw-r--r--cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java21
-rw-r--r--cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataServiceImplSpec.groovy29
-rwxr-xr-xdocs/release-notes.rst2
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/base/FunctionalSpecBase.groovy1
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy101
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy6
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsModuleServicePerfTest.groovy (renamed from cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy)34
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/DeletePerfTest.groovy170
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/GetPerfTest.groovy16
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/QueryPerfTest.groovy22
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/UpdatePerfTest.groovy62
-rw-r--r--integration-test/src/test/resources/data/bookstore/bookstore.yang9
39 files changed, 528 insertions, 726 deletions
diff --git a/cps-application/src/main/resources/application.yml b/cps-application/src/main/resources/application.yml
index ccd09c855..bba8f09eb 100644
--- a/cps-application/src/main/resources/application.yml
+++ b/cps-application/src/main/resources/application.yml
@@ -194,7 +194,7 @@ ncmp:
parallelism-level: 10
model-loader:
- subscription: false
+ subscription: true
maximum-attempt-count: 20
# Custom Hazelcast Config.
diff --git a/cps-ncmp-rest-stub/pom.xml b/cps-ncmp-rest-stub/pom.xml
index fda0189a8..f434863c7 100644
--- a/cps-ncmp-rest-stub/pom.xml
+++ b/cps-ncmp-rest-stub/pom.xml
@@ -104,12 +104,6 @@
<dependency>
<groupId>org.onap.cps</groupId>
<artifactId>cps-ncmp-rest</artifactId>
- <exclusions>
- <exclusion>
- <groupId>org.onap.cps</groupId>
- <artifactId>cps-ncmp-service</artifactId>
- </exclusion>
- </exclusions>
<optional>true</optional>
</dependency>
</dependencies>
diff --git a/cps-ncmp-rest-stub/src/main/java/org/onap/cps/ncmp/rest/stub/controller/NetworkCmProxyStubController.java b/cps-ncmp-rest-stub/src/main/java/org/onap/cps/ncmp/rest/stub/controller/NetworkCmProxyStubController.java
index 6d5ee8ce6..688f62403 100644
--- a/cps-ncmp-rest-stub/src/main/java/org/onap/cps/ncmp/rest/stub/controller/NetworkCmProxyStubController.java
+++ b/cps-ncmp-rest-stub/src/main/java/org/onap/cps/ncmp/rest/stub/controller/NetworkCmProxyStubController.java
@@ -31,7 +31,7 @@ import java.util.List;
import java.util.Map;
import java.util.UUID;
import lombok.extern.slf4j.Slf4j;
-import org.onap.cps.ncmp.rest.controller.handlers.DatastoreType;
+import org.onap.cps.ncmp.api.impl.operations.DatastoreType;
import org.onap.cps.ncmp.rest.model.CmHandleQueryParameters;
import org.onap.cps.ncmp.rest.model.RestOutputCmHandle;
import org.onap.cps.ncmp.rest.stub.handlers.NetworkCmProxyApiStubDefaultImpl;
diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyController.java b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyController.java
index 324c1ae2d..fca1d6310 100755
--- a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyController.java
+++ b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/NetworkCmProxyController.java
@@ -34,15 +34,15 @@ import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.onap.cps.ncmp.api.NetworkCmProxyDataService;
+import org.onap.cps.ncmp.api.impl.exception.InvalidDatastoreException;
+import org.onap.cps.ncmp.api.impl.operations.DatastoreType;
import org.onap.cps.ncmp.api.inventory.CompositeState;
import org.onap.cps.ncmp.api.models.CmHandleQueryApiParameters;
import org.onap.cps.ncmp.api.models.NcmpServiceCmHandle;
import org.onap.cps.ncmp.rest.api.NetworkCmProxyApi;
-import org.onap.cps.ncmp.rest.controller.handlers.DatastoreType;
import org.onap.cps.ncmp.rest.controller.handlers.NcmpCachedResourceRequestHandler;
import org.onap.cps.ncmp.rest.controller.handlers.NcmpDatastoreRequestHandler;
import org.onap.cps.ncmp.rest.controller.handlers.NcmpPassthroughResourceRequestHandler;
-import org.onap.cps.ncmp.rest.exceptions.InvalidDatastoreException;
import org.onap.cps.ncmp.rest.mapper.CmHandleStateMapper;
import org.onap.cps.ncmp.rest.model.CmHandlePublicProperties;
import org.onap.cps.ncmp.rest.model.CmHandleQueryParameters;
diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/exceptions/NetworkCmProxyRestExceptionHandler.java b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/exceptions/NetworkCmProxyRestExceptionHandler.java
index 58a60d2e1..5faeee69f 100755
--- a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/exceptions/NetworkCmProxyRestExceptionHandler.java
+++ b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/exceptions/NetworkCmProxyRestExceptionHandler.java
@@ -25,6 +25,7 @@ import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.onap.cps.ncmp.api.impl.exception.DmiRequestException;
import org.onap.cps.ncmp.api.impl.exception.HttpClientRequestException;
+import org.onap.cps.ncmp.api.impl.exception.InvalidDatastoreException;
import org.onap.cps.ncmp.api.impl.exception.NcmpException;
import org.onap.cps.ncmp.api.impl.exception.ServerNcmpException;
import org.onap.cps.ncmp.rest.controller.NetworkCmProxyController;
diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyControllerSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyControllerSpec.groovy
index 9c22f7ce1..fb411c054 100644
--- a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyControllerSpec.groovy
+++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyControllerSpec.groovy
@@ -66,9 +66,9 @@ import static org.onap.cps.ncmp.api.impl.operations.OperationEnum.CREATE
import static org.onap.cps.ncmp.api.impl.operations.OperationEnum.UPDATE
import static org.onap.cps.ncmp.api.impl.operations.OperationEnum.PATCH
import static org.onap.cps.ncmp.api.impl.operations.OperationEnum.DELETE
-import static org.onap.cps.ncmp.rest.controller.handlers.DatastoreType.PASSTHROUGH_OPERATIONAL
-import static org.onap.cps.ncmp.rest.controller.handlers.DatastoreType.PASSTHROUGH_RUNNING
-import static org.onap.cps.ncmp.rest.controller.handlers.DatastoreType.OPERATIONAL
+import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_OPERATIONAL
+import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_RUNNING
+import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.OPERATIONAL
import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS;
import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS;
diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/exceptions/InvalidDatastoreException.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/exception/InvalidDatastoreException.java
index ff13a93e5..6cfa159b2 100644
--- a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/exceptions/InvalidDatastoreException.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/exception/InvalidDatastoreException.java
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2022 Nordix Foundation
+ * Copyright (C) 2023 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,7 +18,7 @@
* ============LICENSE_END=========================================================
*/
-package org.onap.cps.ncmp.rest.exceptions;
+package org.onap.cps.ncmp.api.impl.exception;
public class InvalidDatastoreException extends RuntimeException {
/**
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DataStoreEnum.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DataStoreEnum.java
deleted file mode 100644
index 24edc73f3..000000000
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DataStoreEnum.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2023 Nordix Foundation
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.ncmp.api.impl.operations;
-
-import lombok.Getter;
-
-@Getter
-public enum DataStoreEnum {
- PASSTHROUGH_OPERATIONAL("ncmp-datastore:passthrough-operational"),
- PASSTHROUGH_RUNNING("ncmp-datastore:passthrough-running");
- private final String value;
-
- DataStoreEnum(final String value) {
- this.value = value;
- }
-}
diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/handlers/DatastoreType.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DatastoreType.java
index e8ab997d6..6520c05c0 100644
--- a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/handlers/DatastoreType.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DatastoreType.java
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2022 Nordix Foundation
+ * Copyright (C) 2023 Nordix Foundation
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,14 +18,14 @@
* ============LICENSE_END=========================================================
*/
-package org.onap.cps.ncmp.rest.controller.handlers;
+package org.onap.cps.ncmp.api.impl.operations;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import lombok.Getter;
-import org.onap.cps.ncmp.rest.exceptions.InvalidDatastoreException;
+import org.onap.cps.ncmp.api.impl.exception.InvalidDatastoreException;
@Getter
public enum DatastoreType {
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java
index d648352f1..1a3952306 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java
@@ -21,7 +21,7 @@
package org.onap.cps.ncmp.api.impl.operations;
-import static org.onap.cps.ncmp.api.impl.operations.DataStoreEnum.PASSTHROUGH_RUNNING;
+import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_RUNNING;
import static org.onap.cps.ncmp.api.impl.operations.OperationEnum.READ;
import java.util.Collection;
@@ -157,7 +157,7 @@ public class DmiDataOperations extends DmiOperations {
final YangModelCmHandle yangModelCmHandle = getYangModelCmHandle(cmHandleId);
final String jsonRequestBody = getDmiRequestBody(operation, null, requestData, dataType,
yangModelCmHandle);
- final String dmiUrl = getDmiRequestUrl(PASSTHROUGH_RUNNING.getValue(), cmHandleId, resourceId,
+ final String dmiUrl = getDmiRequestUrl(PASSTHROUGH_RUNNING.getDatastoreName(), cmHandleId, resourceId,
null, null,
yangModelCmHandle.resolveDmiServiceName(RequiredDmiService.DATA));
final CmHandleState cmHandleState = yangModelCmHandle.getCompositeState().getCmHandleState();
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/SyncUtils.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/SyncUtils.java
index b9cecfb3d..53d8c797b 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/SyncUtils.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/SyncUtils.java
@@ -21,7 +21,7 @@
package org.onap.cps.ncmp.api.inventory.sync;
-import static org.onap.cps.ncmp.api.impl.operations.DataStoreEnum.PASSTHROUGH_OPERATIONAL;
+import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_OPERATIONAL;
import com.fasterxml.jackson.databind.JsonNode;
import java.time.Duration;
@@ -168,7 +168,7 @@ public class SyncUtils {
*/
public String getResourceData(final String cmHandleId) {
final ResponseEntity<Object> resourceDataResponseEntity = dmiDataOperations.getResourceDataFromDmi(
- PASSTHROUGH_OPERATIONAL.getValue(),
+ PASSTHROUGH_OPERATIONAL.getDatastoreName(),
cmHandleId,
UUID.randomUUID().toString());
if (resourceDataResponseEntity.getStatusCode().is2xxSuccessful()) {
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/init/SubscriptionModelLoader.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/init/SubscriptionModelLoader.java
index 659779acf..af9ee721c 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/init/SubscriptionModelLoader.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/init/SubscriptionModelLoader.java
@@ -58,7 +58,7 @@ public class SubscriptionModelLoader implements ModelLoader {
@Value("${ncmp.timers.model-loader.retry-time-ms:1000}")
private long retryTimeMs;
- @Value("${ncmp.model-loader.subscription:false}")
+ @Value("${ncmp.model-loader.subscription:true}")
private boolean subscriptionModelLoaderEnabled;
/**
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImplSpec.groovy
index 5b49e0463..3d8e9cb2e 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImplSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImplSpec.groovy
@@ -52,8 +52,8 @@ import org.springframework.http.HttpStatus
import org.springframework.http.ResponseEntity
import spock.lang.Specification
-import static org.onap.cps.ncmp.api.impl.operations.DataStoreEnum.PASSTHROUGH_OPERATIONAL
-import static org.onap.cps.ncmp.api.impl.operations.DataStoreEnum.PASSTHROUGH_RUNNING
+import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_OPERATIONAL
+import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_RUNNING
import static org.onap.cps.ncmp.api.impl.operations.OperationEnum.CREATE
import static org.onap.cps.ncmp.api.impl.operations.OperationEnum.UPDATE
@@ -109,11 +109,11 @@ class NetworkCmProxyDataServiceImplSpec extends Specification {
given: 'get data node is called'
mockDataNode()
and: 'get resource data from DMI is called'
- mockDmiDataOperations.getResourceDataFromDmi(PASSTHROUGH_OPERATIONAL.value,'testCmHandle',
+ mockDmiDataOperations.getResourceDataFromDmi(PASSTHROUGH_OPERATIONAL.datastoreName,'testCmHandle',
'testResourceId', OPTIONS_PARAM, NO_TOPIC, NO_REQUEST_ID) >>
new ResponseEntity<>('dmi-response', HttpStatus.OK)
when: 'get resource data operational for cm-handle is called'
- def response = objectUnderTest.getResourceDataForCmHandle(PASSTHROUGH_OPERATIONAL.value, 'testCmHandle',
+ def response = objectUnderTest.getResourceDataForCmHandle(PASSTHROUGH_OPERATIONAL.datastoreName, 'testCmHandle',
'testResourceId', OPTIONS_PARAM, NO_TOPIC, NO_REQUEST_ID)
then: 'DMI returns a json response'
response == 'dmi-response'
@@ -123,11 +123,11 @@ class NetworkCmProxyDataServiceImplSpec extends Specification {
given: 'cpsDataService returns valid data node'
mockDataNode()
and: 'DMI returns valid response and data'
- mockDmiDataOperations.getResourceDataFromDmi(PASSTHROUGH_RUNNING.value, 'testCmHandle',
+ mockDmiDataOperations.getResourceDataFromDmi(PASSTHROUGH_RUNNING.datastoreName, 'testCmHandle',
'testResourceId', OPTIONS_PARAM, NO_TOPIC, NO_REQUEST_ID) >>
new ResponseEntity<>('{dmi-response}', HttpStatus.OK)
when: 'get resource data is called'
- def response = objectUnderTest.getResourceDataForCmHandle(PASSTHROUGH_RUNNING.value, 'testCmHandle',
+ def response = objectUnderTest.getResourceDataForCmHandle(PASSTHROUGH_RUNNING.datastoreName, 'testCmHandle',
'testResourceId', OPTIONS_PARAM, NO_TOPIC, NO_REQUEST_ID)
then: 'get resource data returns expected response'
response == '{dmi-response}'
@@ -149,7 +149,7 @@ class NetworkCmProxyDataServiceImplSpec extends Specification {
then: 'get bulk resource data returns expected response'
response == '{dmi-bulk-response}'
where: 'the following data stores are used'
- datastoreName << [PASSTHROUGH_RUNNING.value, PASSTHROUGH_OPERATIONAL.value]
+ datastoreName << [PASSTHROUGH_RUNNING.datastoreName, PASSTHROUGH_OPERATIONAL.datastoreName]
}
def 'Getting Yang Resources.'() {
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
index 89b3a2ff2..667b2f105 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
@@ -33,8 +33,8 @@ import org.springframework.test.context.ContextConfiguration
import org.springframework.http.HttpStatus
import spock.lang.Shared
-import static org.onap.cps.ncmp.api.impl.operations.DataStoreEnum.PASSTHROUGH_OPERATIONAL
-import static org.onap.cps.ncmp.api.impl.operations.DataStoreEnum.PASSTHROUGH_RUNNING
+import static DatastoreType.PASSTHROUGH_OPERATIONAL
+import static DatastoreType.PASSTHROUGH_RUNNING
import static org.onap.cps.ncmp.api.impl.operations.OperationEnum.CREATE
import static org.onap.cps.ncmp.api.impl.operations.OperationEnum.READ
import static org.onap.cps.ncmp.api.impl.operations.OperationEnum.UPDATE
@@ -68,7 +68,7 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec {
mockDmiRestClient.postOperationWithJsonData(expectedUrl, expectedJson, READ) >> responseFromDmi
dmiServiceUrlBuilder.getDmiDatastoreUrl(_, _) >> expectedUrl
when: 'get resource data is invoked'
- def result = objectUnderTest.getResourceDataFromDmi(dataStore.value, cmHandleId, resourceIdentifier,
+ def result = objectUnderTest.getResourceDataFromDmi(dataStore.datastoreName, cmHandleId, resourceIdentifier,
options, NO_TOPIC, NO_REQUEST_ID)
then: 'the result is the response from the DMI service'
assert result == responseFromDmi
@@ -91,7 +91,7 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec {
mockDmiRestClient.postOperationWithJsonData(expectedDmiBulkResourceDataUrl, expectedBulkRequestAsJson, READ) >> responseFromDmi
dmiServiceUrlBuilder.getBulkRequestUrl(_, _) >> expectedDmiBulkResourceDataUrl
when: 'get resource data for bulk cm handle is invoked'
- def result = objectUnderTest.getResourceDataFromDmi( dataStore.value, [cmHandleId], resourceIdentifier,
+ def result = objectUnderTest.getResourceDataFromDmi( dataStore.datastoreName, [cmHandleId], resourceIdentifier,
OPTIONS_PARAM, 'some-topic','requestId')
then: 'the result is the response from the DMI service'
assert result == responseFromDmi
@@ -110,7 +110,7 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec {
mockDmiRestClient.postOperationWithJsonData(expectedUrl, '{"operation":"read","cmHandleProperties":{"prop1":"val1"}}', READ) >> responseFromDmi
dmiServiceUrlBuilder.getDmiDatastoreUrl(_, _) >> expectedUrl
when: 'get resource data is invoked'
- def result = objectUnderTest.getResourceDataFromDmi( PASSTHROUGH_OPERATIONAL.value, cmHandleId, NO_REQUEST_ID)
+ def result = objectUnderTest.getResourceDataFromDmi( PASSTHROUGH_OPERATIONAL.datastoreName, cmHandleId, NO_REQUEST_ID)
then: 'the result is the response from the DMI service'
assert result == responseFromDmi
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/utils/DmiServiceUrlBuilderSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/utils/DmiServiceUrlBuilderSpec.groovy
index 6ca310550..57803dac2 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/utils/DmiServiceUrlBuilderSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/utils/DmiServiceUrlBuilderSpec.groovy
@@ -20,11 +20,10 @@
package org.onap.cps.ncmp.api.impl.utils
+import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_RUNNING
+
import org.onap.cps.ncmp.api.impl.operations.RequiredDmiService
import org.onap.cps.spi.utils.CpsValidator
-
-import static org.onap.cps.ncmp.api.impl.operations.DataStoreEnum.PASSTHROUGH_RUNNING
-
import org.onap.cps.ncmp.api.impl.yangmodels.YangModelCmHandle
import org.onap.cps.ncmp.api.impl.config.NcmpConfiguration
import org.onap.cps.ncmp.api.models.NcmpServiceCmHandle
@@ -46,7 +45,7 @@ class DmiServiceUrlBuilderSpec extends Specification {
def 'Create the dmi service url with #scenario.'() {
given: 'uri variables'
dmiProperties.dmiBasePath = 'dmi'
- def uriVars = objectUnderTest.populateUriVariables(PASSTHROUGH_RUNNING.value, yangModelCmHandle.resolveDmiServiceName(RequiredDmiService.DATA),
+ def uriVars = objectUnderTest.populateUriVariables(PASSTHROUGH_RUNNING.datastoreName, yangModelCmHandle.resolveDmiServiceName(RequiredDmiService.DATA),
"cmHandle")
and: 'query params'
def uriQueries = objectUnderTest.populateQueryParams(resourceId,
@@ -66,7 +65,7 @@ class DmiServiceUrlBuilderSpec extends Specification {
def 'Populate dmi data store url #scenario.'() {
given: 'uri variables are created'
dmiProperties.dmiBasePath = dmiBasePath
- def uriVars = objectUnderTest.populateUriVariables(PASSTHROUGH_RUNNING.value, yangModelCmHandle.resolveDmiServiceName(RequiredDmiService.DATA),
+ def uriVars = objectUnderTest.populateUriVariables(PASSTHROUGH_RUNNING.datastoreName, yangModelCmHandle.resolveDmiServiceName(RequiredDmiService.DATA),
"cmHandle")
and: 'null query params'
def uriQueries = objectUnderTest.populateQueryParams(null,
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/sync/SyncUtilsSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/sync/SyncUtilsSpec.groovy
index 8164dcf9c..c6ce1a5df 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/sync/SyncUtilsSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/inventory/sync/SyncUtilsSpec.groovy
@@ -21,7 +21,7 @@
package org.onap.cps.ncmp.api.inventory.sync
-import static org.onap.cps.ncmp.api.impl.operations.DataStoreEnum.PASSTHROUGH_OPERATIONAL
+import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_OPERATIONAL
import com.fasterxml.jackson.databind.JsonNode
import com.fasterxml.jackson.databind.ObjectMapper
@@ -137,7 +137,7 @@ class SyncUtilsSpec extends Specification{
def jsonString = '{"stores:bookstore":{"categories":[{"code":"01"}]}}'
JsonNode jsonNode = jsonObjectMapper.convertToJsonNode(jsonString);
def responseEntity = new ResponseEntity<>(jsonNode, HttpStatus.OK)
- mockDmiDataOperations.getResourceDataFromDmi(PASSTHROUGH_OPERATIONAL.value, 'cm-handle-123', _) >> responseEntity
+ mockDmiDataOperations.getResourceDataFromDmi(PASSTHROUGH_OPERATIONAL.datastoreName, 'cm-handle-123', _) >> responseEntity
when: 'get resource data is called'
def result = objectUnderTest.getResourceData('cm-handle-123')
then: 'the returned data is correct'
diff --git a/cps-ri/pom.xml b/cps-ri/pom.xml
index ea1efcb3e..3ef57cf03 100644
--- a/cps-ri/pom.xml
+++ b/cps-ri/pom.xml
@@ -33,7 +33,8 @@
<artifactId>cps-ri</artifactId>
<properties>
- <minimum-coverage>0.79</minimum-coverage>
+ <minimum-coverage>0.45</minimum-coverage>
+ <!-- Coverage is provided by integration-test module instead -->
</properties>
<dependencies>
@@ -147,18 +148,10 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
- <configuration>
- <excludes>
- <exclude>%regex[.*PerfTest.*]</exclude>
- </excludes>
- </configuration>
</plugin>
</plugins>
</build>
</profile>
- <profile>
- <id>include-performance</id>
- </profile>
</profiles>
<build>
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java
index 49e2dd253..b7ce98e1a 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java
@@ -1,6 +1,7 @@
/*
* ============LICENSE_START=======================================================
* Copyright (C) 2022 Nordix Foundation
+ * Modifications Copyright (C) 2023 TechMahindra Ltd.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -40,9 +41,11 @@ public class FragmentEntityArranger {
public static Collection<FragmentEntity> toFragmentEntityTrees(final AnchorEntity anchorEntity,
final Collection<FragmentExtract> fragmentExtracts) {
final Map<Long, FragmentEntity> fragmentEntityPerId = new HashMap<>();
- for (final FragmentExtract fragmentExtract : fragmentExtracts) {
- final FragmentEntity fragmentEntity = toFragmentEntity(anchorEntity, fragmentExtract);
- fragmentEntityPerId.put(fragmentEntity.getId(), fragmentEntity);
+ if (fragmentExtracts != null) {
+ for (final FragmentExtract fragmentExtract : fragmentExtracts) {
+ final FragmentEntity fragmentEntity = toFragmentEntity(anchorEntity, fragmentExtract);
+ fragmentEntityPerId.put(fragmentEntity.getId(), fragmentEntity);
+ }
}
return reuniteChildrenWithTheirParents(fragmentEntityPerId);
}
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
index c26cd2fea..d0154e116 100644
--- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
@@ -87,13 +87,6 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
private static final AnchorEntity ALL_ANCHORS = null;
@Override
- public void addChildDataNode(final String dataspaceName, final String anchorName, final String parentNodeXpath,
- final DataNode newChildDataNode) {
- final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName);
- addNewChildDataNode(anchorEntity, parentNodeXpath, newChildDataNode);
- }
-
- @Override
public void addChildDataNodes(final String dataspaceName, final String anchorName,
final String parentNodeXpath, final Collection<DataNode> dataNodes) {
final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName);
@@ -450,14 +443,25 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
}
@Override
- public void updateDataLeaves(final String dataspaceName, final String anchorName, final String xpath,
- final Map<String, Serializable> updateLeaves) {
+ public void batchUpdateDataLeaves(final String dataspaceName, final String anchorName,
+ final Map<String, Map<String, Serializable>> updatedLeavesPerXPath) {
final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName);
- final FragmentEntity fragmentEntity = getFragmentEntity(anchorEntity, xpath);
- final String currentLeavesAsString = fragmentEntity.getAttributes();
- final String mergedLeaves = mergeLeaves(updateLeaves, currentLeavesAsString);
- fragmentEntity.setAttributes(mergedLeaves);
- fragmentRepository.save(fragmentEntity);
+
+ final Collection<String> xpathsOfUpdatedLeaves = updatedLeavesPerXPath.keySet();
+ final Collection<FragmentEntity> fragmentEntities = getFragmentEntities(anchorEntity, xpathsOfUpdatedLeaves,
+ FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS);
+
+ for (final FragmentEntity fragmentEntity : fragmentEntities) {
+ final Map<String, Serializable> updatedLeaves = updatedLeavesPerXPath.get(fragmentEntity.getXpath());
+ final String mergedLeaves = mergeLeaves(updatedLeaves, fragmentEntity.getAttributes());
+ fragmentEntity.setAttributes(mergedLeaves);
+ }
+
+ try {
+ fragmentRepository.saveAll(fragmentEntities);
+ } catch (final StaleStateException staleStateException) {
+ retryUpdateDataNodesIndividually(anchorEntity, fragmentEntities);
+ }
}
@Override
@@ -687,9 +691,13 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
}
private String mergeLeaves(final Map<String, Serializable> updateLeaves, final String currentLeavesAsString) {
- final Map<String, Serializable> currentLeavesAsMap = currentLeavesAsString.isEmpty()
- ? new HashMap<>() : jsonObjectMapper.convertJsonString(currentLeavesAsString, Map.class);
- currentLeavesAsMap.putAll(updateLeaves);
+ Map<String, Serializable> currentLeavesAsMap = new HashMap<>();
+ if (currentLeavesAsString != null) {
+ currentLeavesAsMap = currentLeavesAsString.isEmpty()
+ ? new HashMap<>() : jsonObjectMapper.convertJsonString(currentLeavesAsString, Map.class);
+ currentLeavesAsMap.putAll(updateLeaves);
+ }
+
if (currentLeavesAsMap.isEmpty()) {
return "";
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
index 67ccc805a..6d6dfd270 100755..100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
@@ -320,36 +320,6 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
}
@Sql([CLEAR_DATA, SET_DATA])
- def 'Update data node leaves.'() {
- when: 'update is performed for leaves'
- objectUnderTest.updateDataLeaves(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES,
- '/parent-200/child-201', ['leaf-value': 'new'])
- then: 'leaves are updated for selected data node'
- def updatedFragment = fragmentRepository.getReferenceById(DATA_NODE_202_FRAGMENT_ID)
- def updatedLeaves = getLeavesMap(updatedFragment)
- assert updatedLeaves.size() == 1
- assert updatedLeaves.'leaf-value' == 'new'
- and: 'existing child entry remains as is'
- def childFragment = updatedFragment.childFragments.iterator().next()
- def childLeaves = getLeavesMap(childFragment)
- assert childFragment.id == CHILD_OF_DATA_NODE_202_FRAGMENT_ID
- assert childLeaves.'leaf-value' == 'original'
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
- def 'Update data leaves error scenario: #scenario.'() {
- when: 'attempt to update data node for #scenario'
- objectUnderTest.updateDataLeaves(dataspaceName, anchorName, xpath, ['leaf-name': 'leaf-value'])
- then: 'a #expectedException is thrown'
- thrown(expectedException)
- where: 'the following data is used'
- scenario | dataspaceName | anchorName | xpath || expectedException
- 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | '/not relevant' || DataspaceNotFoundException
- 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' | '/not relevant' || AnchorNotFoundException
- 'non-existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING-XPATH' || DataNodeNotFoundException
- }
-
- @Sql([CLEAR_DATA, SET_DATA])
def 'Update data nodes and descendants by removing descendants.'() {
given: 'data nodes with leaves updated, no children'
def submittedDataNodes = [buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [])]
@@ -592,7 +562,7 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
given: 'a data nodes with list-element child with "/" in index value (and grandchild)'
def grandChild = new DataNodeBuilder().withXpath(deleteTestGrandChildXPath).build()
def child = new DataNodeBuilder().withXpath(deleteTestChildXpath).withChildDataNodes([grandChild]).build()
- objectUnderTest.addChildDataNode(DATASPACE_NAME, ANCHOR_NAME3, deleteTestParentXPath, child)
+ objectUnderTest.addChildDataNodes(DATASPACE_NAME, ANCHOR_NAME3, deleteTestParentXPath, [child])
and: 'number of children before delete is stored'
def numberOfChildrenBeforeDelete = objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_NAME3, pathToParentOfDeletedNode, INCLUDE_ALL_DESCENDANTS)[0].childDataNodes.size()
when: 'target node is deleted'
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
index 8a5838827..e8921b3ed 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
+++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
@@ -38,6 +38,7 @@ import org.onap.cps.spi.utils.SessionManager
import org.onap.cps.utils.JsonObjectMapper
import org.springframework.dao.DataIntegrityViolationException
import spock.lang.Specification
+import java.util.stream.Collectors
class CpsDataPersistenceServiceSpec extends Specification {
@@ -68,6 +69,53 @@ class CpsDataPersistenceServiceSpec extends Specification {
2 * mockFragmentRepository.save(_)
}
+ def 'Handling of StaleStateException (caused by concurrent updates) during patch operation for data nodes.'() {
+ given: 'the system can update one datanode and has two more datanodes that throw an exception while updating'
+ def dataNodes = createDataNodesAndMockRepositoryMethodSupportingThem([
+ '/node1': 'OK',
+ '/node2': 'EXCEPTION',
+ '/node3': 'EXCEPTION'])
+ def updatedLeavesPerXPath = dataNodes.stream()
+ .collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves))
+ and: 'the batch update will therefore also fail'
+ mockFragmentRepository.saveAll(*_) >> { throw new StaleStateException("concurrent updates") }
+ when: 'attempt batch update data nodes'
+ objectUnderTest.batchUpdateDataLeaves('some-dataspace', 'some-anchor', updatedLeavesPerXPath)
+ then: 'concurrency exception is thrown'
+ def thrown = thrown(ConcurrencyException)
+ assert thrown.message == 'Concurrent Transactions'
+ and: 'it does not contain the successful datanode'
+ assert !thrown.details.contains('/node1')
+ and: 'it contains the failed datanodes'
+ assert thrown.details.contains('/node2')
+ assert thrown.details.contains('/node3')
+ }
+
+ def 'Batch update data node leaves and descendants: #scenario'(){
+ given: 'the fragment repository returns fragment entities related to the xpath inputs'
+ mockFragmentRepository.findExtractsWithDescendants(_, [] as Set, _) >> []
+ mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath'] as Set, _) >> [
+ mockFragmentExtract(1, null, 123, '/test/xpath', "{\"id\":\"testId1\"}")
+ ]
+ mockFragmentRepository.findExtractsWithDescendants(123, ['/test/xpath1', '/test/xpath2'] as Set, _) >> [
+ mockFragmentExtract(1, null, 123, '/test/xpath1', "{\"id\":\"testId1\"}"),
+ mockFragmentExtract(2, null, 123, '/test/xpath2', "{\"id\":\"testId1\"}")
+ ]
+ when: 'replace data node tree'
+ objectUnderTest.batchUpdateDataLeaves('dataspaceName', 'anchorName',
+ dataNodes.stream().collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves)))
+ then: 'call fragment repository save all method'
+ 1 * mockFragmentRepository.saveAll({fragmentEntities ->
+ assert fragmentEntities as List == expectedFragmentEntities
+ assert fragmentEntities.size() == expectedSize
+ })
+ where: 'the following Data Type is passed'
+ scenario | dataNodes | expectedSize || expectedFragmentEntities
+ 'empty data node list' | [] | 0 || []
+ 'one data node in list' | [new DataNode(xpath: '/test/xpath', leaves: ['id': 'testId'])] | 1 || [new FragmentEntity(xpath: '/test/xpath', attributes: '{"id":"testId"}', anchor: anchorEntity)]
+ 'multiple data nodes' | [new DataNode(xpath: '/test/xpath1', leaves: ['id': 'newTestId1']), new DataNode(xpath: '/test/xpath2', leaves: ['id': 'newTestId2'])] | 2 || [new FragmentEntity(xpath: '/test/xpath2', attributes: '{"id":"newTestId2"}', anchor: anchorEntity), new FragmentEntity(xpath: '/test/xpath1', attributes: '{"id":"newTestId1"}', anchor: anchorEntity)]
+ }
+
def 'Handling of StaleStateException (caused by concurrent updates) during update data nodes and descendants.'() {
given: 'the system can update one datanode and has two more datanodes that throw an exception while updating'
def dataNodes = createDataNodesAndMockRepositoryMethodSupportingThem([
@@ -81,7 +129,7 @@ class CpsDataPersistenceServiceSpec extends Specification {
then: 'concurrency exception is thrown'
def thrown = thrown(ConcurrencyException)
assert thrown.message == 'Concurrent Transactions'
- and: 'it does not contain the successfull datanode'
+ and: 'it does not contain the successful datanode'
assert !thrown.details.contains('/node1')
and: 'it contains the failed datanodes'
assert thrown.details.contains('/node2')
@@ -157,26 +205,7 @@ class CpsDataPersistenceServiceSpec extends Specification {
1 * mockSessionManager.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L)
}
- def 'update data node leaves: #scenario'(){
- given: 'A node exists for the given xpath'
- mockFragmentRepository.getByAnchorAndXpath(_, '/some/xpath') >> new FragmentEntity(xpath: '/some/xpath', attributes: existingAttributes)
- when: 'the node leaves are updated'
- objectUnderTest.updateDataLeaves('some-dataspace', 'some-anchor', '/some/xpath', newAttributes as Map<String, Serializable>)
- then: 'the fragment entity saved has the original and new attributes'
- 1 * mockFragmentRepository.save({fragmentEntity -> {
- assert fragmentEntity.getXpath() == '/some/xpath'
- assert fragmentEntity.getAttributes() == mergedAttributes
- }})
- where: 'the following attributes combinations are used'
- scenario | existingAttributes | newAttributes | mergedAttributes
- 'add new leaf' | '{"existing":"value"}' | ["new":"value"] | '{"existing":"value","new":"value"}'
- 'update existing leaf' | '{"existing":"value"}' | ["existing":"value2"] | '{"existing":"value2"}'
- 'update nothing with nothing' | '' | [] | ''
- 'update with nothing' | '{"existing":"value"}' | [] | '{"existing":"value"}'
- 'update with same value' | '{"existing":"value"}' | ["existing":"value"] | '{"existing":"value"}'
- }
-
- def 'update data node and descendants: #scenario'(){
+ def 'Replace data node and descendants: #scenario'(){
given: 'the fragment repository returns fragment entities related to the xpath inputs'
mockFragmentRepository.findExtractsWithDescendants(_, [] as Set, _) >> []
mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath'] as Set, _) >> [
@@ -192,7 +221,7 @@ class CpsDataPersistenceServiceSpec extends Specification {
'one data node in list' | [new DataNode(xpath: '/test/xpath', leaves: ['id': 'testId'], childDataNodes: [])] || [new FragmentEntity(xpath: '/test/xpath', attributes: '{"id":"testId"}', anchor: anchorEntity, childFragments: [])]
}
- def 'update data nodes and descendants'() {
+ def 'Replace data nodes and descendants'() {
given: 'the fragment repository returns fragment entities related to the xpath inputs'
mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath1', '/test/xpath2'] as Set, _) >> [
mockFragmentExtract(1, null, 123, '/test/xpath1', null),
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy
deleted file mode 100644
index daa774698..000000000
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2023 Nordix Foundation
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the 'License');
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an 'AS IS' BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.spi.impl
-
-import org.onap.cps.spi.model.DataNode
-import org.onap.cps.spi.model.DataNodeBuilder
-import org.springframework.util.StopWatch
-
-class CpsPersistencePerfSpecBase extends CpsPersistenceSpecBase {
-
- static final String PERF_TEST_DATA = '/data/perf-test.sql'
- static final String PERF_DATASPACE = 'PERF-DATASPACE'
- static final String PERF_ANCHOR = 'PERF-ANCHOR'
- static final String PERF_TEST_PARENT = '/perf-parent-1'
-
- static def xpathsToAllGrandChildren = []
-
- static def PERFORMANCE_RECORD = []
-
- def stopWatch = new StopWatch()
-
- def cleanupSpec() {
- println('#############################################################################')
- println('## P E R F O R M A N C E T E S T R E S U L T S ##')
- println('#############################################################################')
- PERFORMANCE_RECORD.sort().each { println(it) }
- PERFORMANCE_RECORD.clear()
- }
-
- def createLineage(cpsDataPersistenceService, numberOfChildren, numberOfGrandChildren, createLists) {
- xpathsToAllGrandChildren = []
- (1..numberOfChildren).each {
- if (createLists) {
- def xpathFormat = "${PERF_TEST_PARENT}/perf-test-list-${it}[@key='%d']"
- def listElements = goForthAndMultiply(xpathFormat, numberOfGrandChildren)
- cpsDataPersistenceService.addListElements(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT, listElements)
- } else {
- def xpathFormat = "${PERF_TEST_PARENT}/perf-test-child-${it}/perf-test-grand-child-%d"
- def grandChildren = goForthAndMultiply(xpathFormat, numberOfGrandChildren)
- def child = new DataNodeBuilder()
- .withXpath("${PERF_TEST_PARENT}/perf-test-child-${it}")
- .withChildDataNodes(grandChildren)
- .build()
- cpsDataPersistenceService.addChildDataNode(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT, child)
- }
- }
- }
-
- def goForthAndMultiply(xpathFormat, numberOfGrandChildren) {
- def grandChildren = []
- (1..numberOfGrandChildren).each {
- def xpath = String.format(xpathFormat as String, it)
- def grandChild = new DataNodeBuilder().withXpath(xpath).build()
- xpathsToAllGrandChildren.add(grandChild.xpath)
- grandChildren.add(grandChild)
- }
- return grandChildren
- }
-
- def countDataNodes(Collection<DataNode> dataNodes) {
- int nodeCount = 0
- for (DataNode parent : dataNodes) {
- nodeCount = nodeCount + countDataNodes(parent)
- }
- return nodeCount
- }
-
- def countDataNodes(DataNode dataNode) {
- int nodeCount = 1
- for (DataNode child : dataNode.childDataNodes) {
- nodeCount = nodeCount + countDataNodes(child)
- }
- return nodeCount
- }
-
- def recordAndAssertPerformance(String shortTitle, thresholdInMs, recordedTimeInMs) {
- def pass = recordedTimeInMs <= thresholdInMs
- if (shortTitle.length()>40) {
- shortTitle = shortTitle.substring(0,40)
- }
- def record = String.format('%2d.%-40s limit%,7d took %,7d ms ', PERFORMANCE_RECORD.size()+1, shortTitle, thresholdInMs, recordedTimeInMs)
- record += pass?'PASS':'FAIL'
- PERFORMANCE_RECORD.add(record)
- assert recordedTimeInMs <= thresholdInMs
- return true
- }
-}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy
deleted file mode 100644
index 428088135..000000000
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy
+++ /dev/null
@@ -1,239 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2023 Nordix Foundation
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.spi.performance
-
-import org.onap.cps.spi.CpsDataPersistenceService
-import org.onap.cps.spi.impl.CpsPersistencePerfSpecBase
-import org.springframework.beans.factory.annotation.Autowired
-import org.springframework.test.context.jdbc.Sql
-
-class CpsDataPersistenceServiceDeletePerfTest extends CpsPersistencePerfSpecBase {
-
- @Autowired
- CpsDataPersistenceService objectUnderTest
-
- @Sql([CLEAR_DATA, PERF_TEST_DATA])
- def 'Create a node with many descendants (please note, subsequent tests depend on this running first).'() {
- when: 'a node with a large number of descendants is created'
- stopWatch.start()
- createLineage(objectUnderTest, 150, 50, false)
- stopWatch.stop()
- def setupDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'setup duration is under 10 seconds'
- recordAndAssertPerformance('Setup', 10_000, setupDurationInMillis)
- }
-
- def 'Delete 10 children with grandchildren'() {
- when: 'child nodes are deleted'
- stopWatch.start()
- (1..10).each {
- def childPath = "${PERF_TEST_PARENT}/perf-test-child-${it}".toString()
- objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, childPath)
- }
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 300 milliseconds'
- recordAndAssertPerformance('Delete 10 children', 300, deleteDurationInMillis)
- }
-
- def 'Batch delete 100 children with grandchildren'() {
- given: 'a list of xpaths to delete'
- def xpathsToDelete = (11..110).collect {
- "${PERF_TEST_PARENT}/perf-test-child-${it}".toString()
- }
- when: 'child nodes are deleted'
- stopWatch.start()
- objectUnderTest.deleteDataNodes(PERF_DATASPACE, PERF_ANCHOR, xpathsToDelete)
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 300 milliseconds'
- recordAndAssertPerformance('Batch delete 100 children', 300, deleteDurationInMillis)
- }
-
- def 'Delete 50 grandchildren (that have no descendants)'() {
- when: 'target nodes are deleted'
- stopWatch.start()
- (1..50).each {
- def grandchildPath = "${PERF_TEST_PARENT}/perf-test-child-111/perf-test-grand-child-${it}".toString()
- objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, grandchildPath)
- }
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 700 milliseconds'
- recordAndAssertPerformance('Delete 50 grandchildren', 700, deleteDurationInMillis)
- }
-
- def 'Batch delete 500 grandchildren (that have no descendants)'() {
- given: 'a list of xpaths to delete'
- def xpathsToDelete = []
- for (int childIndex = 0; childIndex < 10; childIndex++) {
- xpathsToDelete.addAll((1..50).collect {
- "${PERF_TEST_PARENT}/perf-test-child-${112+childIndex}/perf-test-grand-child-${it}".toString()
- })
- }
- when: 'target nodes are deleted'
- stopWatch.start()
- objectUnderTest.deleteDataNodes(PERF_DATASPACE, PERF_ANCHOR, xpathsToDelete)
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 100 milliseconds'
- recordAndAssertPerformance('Batch delete 500 grandchildren', 100, deleteDurationInMillis)
- }
-
- @Sql([CLEAR_DATA, PERF_TEST_DATA])
- def 'Create a node with many list elements (please note, subsequent tests depend on this running first).'() {
- when: 'a node with a large number of lists is created'
- stopWatch.start()
- createLineage(objectUnderTest, 150, 50, true)
- stopWatch.stop()
- def setupDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'setup duration is under 6 seconds'
- recordAndAssertPerformance('Setup lists', 6_000, setupDurationInMillis)
- }
-
- def 'Delete 10 whole lists'() {
- when: 'lists are deleted'
- stopWatch.start()
- (1..10).each {
- def childPath = "${PERF_TEST_PARENT}/perf-test-list-${it}".toString()
- objectUnderTest.deleteListDataNode(PERF_DATASPACE, PERF_ANCHOR, childPath)
- }
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 300 milliseconds'
- recordAndAssertPerformance('Delete 10 whole lists', 300, deleteDurationInMillis)
- }
-
- def 'Batch delete 100 whole lists'() {
- given: 'a list of xpaths to delete'
- def xpathsToDelete = (11..110).collect {
- "${PERF_TEST_PARENT}/perf-test-list-${it}".toString()
- }
- when: 'lists are deleted'
- stopWatch.start()
- objectUnderTest.deleteDataNodes(PERF_DATASPACE, PERF_ANCHOR, xpathsToDelete)
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 600 milliseconds'
- recordAndAssertPerformance('Batch delete 100 whole lists', 600, deleteDurationInMillis)
- }
-
- def 'Delete 10 list elements'() {
- when: 'list elements are deleted'
- stopWatch.start()
- (1..10).each {
- def grandchildPath = "${PERF_TEST_PARENT}/perf-test-list-111[@key='${it}']".toString()
- objectUnderTest.deleteListDataNode(PERF_DATASPACE, PERF_ANCHOR, grandchildPath)
- }
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 200 milliseconds'
- recordAndAssertPerformance('Delete 10 lists elements', 200, deleteDurationInMillis)
- }
-
- def 'Batch delete 500 list elements'() {
- given: 'a list of xpaths to delete'
- def xpathsToDelete = []
- for (int childIndex = 0; childIndex < 10; childIndex++) {
- xpathsToDelete.addAll((1..50).collect {
- "${PERF_TEST_PARENT}/perf-test-list-${112+childIndex}[@key='${it}']".toString()
- })
- }
- when: 'list elements are deleted'
- stopWatch.start()
- objectUnderTest.deleteDataNodes(PERF_DATASPACE, PERF_ANCHOR, xpathsToDelete)
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 100 milliseconds'
- recordAndAssertPerformance('Batch delete 500 lists elements', 100, deleteDurationInMillis)
- }
-
- @Sql([CLEAR_DATA, PERF_TEST_DATA])
- def 'Delete 1 large data node'() {
- given: 'a node with a large number of descendants is created'
- createLineage(objectUnderTest, 50, 50, false)
- createLineage(objectUnderTest, 50, 50, true)
- when: 'parent node is deleted'
- stopWatch.start()
- objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT)
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 300 milliseconds'
- recordAndAssertPerformance('Delete one large node', 300, deleteDurationInMillis)
- }
-
- @Sql([CLEAR_DATA, PERF_TEST_DATA])
- def 'Batch delete 1 large data node'() {
- given: 'a node with a large number of descendants is created'
- createLineage(objectUnderTest, 50, 50, false)
- createLineage(objectUnderTest, 50, 50, true)
- when: 'parent node is batch deleted'
- stopWatch.start()
- objectUnderTest.deleteDataNodes(PERF_DATASPACE, PERF_ANCHOR, [PERF_TEST_PARENT])
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 300 milliseconds'
- recordAndAssertPerformance('Batch delete one large node', 300, deleteDurationInMillis)
- }
-
- @Sql([CLEAR_DATA, PERF_TEST_DATA])
- def 'Delete root node with many descendants'() {
- given: 'a node with a large number of descendants is created'
- createLineage(objectUnderTest, 50, 50, false)
- createLineage(objectUnderTest, 50, 50, true)
- when: 'root node is deleted'
- stopWatch.start()
- objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, '/')
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 300 milliseconds'
- recordAndAssertPerformance('Delete root node', 300, deleteDurationInMillis)
- }
-
- @Sql([CLEAR_DATA, PERF_TEST_DATA])
- def 'Delete data nodes for an anchor'() {
- given: 'a node with a large number of descendants is created'
- createLineage(objectUnderTest, 50, 50, false)
- createLineage(objectUnderTest, 50, 50, true)
- when: 'data nodes are deleted'
- stopWatch.start()
- objectUnderTest.deleteDataNodes(PERF_DATASPACE, PERF_ANCHOR)
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 300 milliseconds'
- recordAndAssertPerformance('Delete data nodes for anchor', 300, deleteDurationInMillis)
- }
-
- @Sql([CLEAR_DATA, PERF_TEST_DATA])
- def 'Delete data nodes for multiple anchors'() {
- given: 'a node with a large number of descendants is created'
- createLineage(objectUnderTest, 50, 50, false)
- createLineage(objectUnderTest, 50, 50, true)
- when: 'data nodes are deleted'
- stopWatch.start()
- objectUnderTest.deleteDataNodes(PERF_DATASPACE, [PERF_ANCHOR])
- stopWatch.stop()
- def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'delete duration is under 300 milliseconds'
- recordAndAssertPerformance('Delete data nodes for anchors', 300, deleteDurationInMillis)
- }
-
-}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy
deleted file mode 100644
index 2628e9697..000000000
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2022-2023 Nordix Foundation
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.spi.performance
-
-import org.onap.cps.spi.impl.CpsPersistencePerfSpecBase
-import org.onap.cps.spi.CpsDataPersistenceService
-import org.onap.cps.spi.repository.AnchorRepository
-import org.onap.cps.spi.repository.DataspaceRepository
-import org.onap.cps.spi.repository.FragmentRepository
-import org.springframework.beans.factory.annotation.Autowired
-import org.springframework.test.context.jdbc.Sql
-
-import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS
-import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS
-
-class CpsDataPersistenceServicePerfTest extends CpsPersistencePerfSpecBase {
-
- @Autowired
- CpsDataPersistenceService objectUnderTest
-
- @Autowired
- DataspaceRepository dataspaceRepository
-
- @Autowired
- AnchorRepository anchorRepository
-
- @Autowired
- FragmentRepository fragmentRepository
-
- static def NUMBER_OF_CHILDREN = 200
- static def NUMBER_OF_GRAND_CHILDREN = 50
-
- @Sql([CLEAR_DATA, PERF_TEST_DATA])
- def 'Create a node with many descendants (please note, subsequent tests depend on this running first).'() {
- given: 'a node with a large number of descendants is created'
- stopWatch.start()
- createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false)
- stopWatch.stop()
- def setupDurationInMillis = stopWatch.getTotalTimeMillis()
- and: 'setup duration is under 10 seconds'
- recordAndAssertPerformance('Setup', 10000, setupDurationInMillis)
- }
-
- def 'Update data nodes with descendants'() {
- given: 'a list of xpaths to data nodes with descendants (xpath for each child)'
- def xpaths = (1..20).collect {
- "${PERF_TEST_PARENT}/perf-test-child-${it}".toString()
- }
- and: 'the correct number of data nodes are fetched'
- def dataNodes = objectUnderTest.getDataNodesForMultipleXpaths(PERF_DATASPACE, PERF_ANCHOR, xpaths, INCLUDE_ALL_DESCENDANTS)
- assert dataNodes.size() == 20
- assert countDataNodes(dataNodes) == 20 + 20 * 50
- when: 'the fragment entities are updated by the data nodes'
- stopWatch.start()
- objectUnderTest.updateDataNodesAndDescendants(PERF_DATASPACE, PERF_ANCHOR, dataNodes)
- stopWatch.stop()
- def updateDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'update duration is under 600 milliseconds'
- recordAndAssertPerformance('Update data nodes with descendants', 600, updateDurationInMillis)
- }
-
- def 'Update data nodes without descendants'() {
- given: 'a list of xpaths to data nodes without descendants (xpath for each grandchild)'
- def xpaths = []
- for (int childIndex = 21; childIndex <= 40; childIndex++) {
- xpaths.addAll((1..50).collect {
- "${PERF_TEST_PARENT}/perf-test-child-${childIndex}/perf-test-grand-child-${it}".toString()
- })
- }
- and: 'the correct number of data nodes are fetched'
- def dataNodes = objectUnderTest.getDataNodesForMultipleXpaths(PERF_DATASPACE, PERF_ANCHOR, xpaths, OMIT_DESCENDANTS)
- assert dataNodes.size() == 20 * 50
- assert countDataNodes(dataNodes) == 20 * 50
- when: 'the fragment entities are updated by the data nodes'
- stopWatch.start()
- objectUnderTest.updateDataNodesAndDescendants(PERF_DATASPACE, PERF_ANCHOR, dataNodes)
- stopWatch.stop()
- def updateDurationInMillis = stopWatch.getTotalTimeMillis()
- then: 'update duration is under 900 milliseconds'
- recordAndAssertPerformance('Update data nodes without descendants', 900, updateDurationInMillis)
- }
-}
diff --git a/cps-ri/src/test/resources/data/perf-test.sql b/cps-ri/src/test/resources/data/perf-test.sql
deleted file mode 100644
index 5119f26b2..000000000
--- a/cps-ri/src/test/resources/data/perf-test.sql
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- ============LICENSE_START=======================================================
- Copyright (C) 2022 Nordix Foundation.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- SPDX-License-Identifier: Apache-2.0
- ============LICENSE_END=========================================================
-*/
-
-INSERT INTO DATASPACE (ID, NAME) VALUES (9001, 'PERF-DATASPACE');
-
-INSERT INTO SCHEMA_SET (ID, NAME, DATASPACE_ID) VALUES (9002, 'PERF-SCHEMA-SET', 9001);
-
-INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES (9003, 'PERF-ANCHOR', 9001, 9002);
-
-INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH) VALUES (0, 9001, 9003, null, '/perf-parent-1');
-
diff --git a/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java b/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java
index fc008685c..6a2cac467 100644
--- a/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java
+++ b/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java
@@ -138,9 +138,7 @@ public interface CpsDataService {
FetchDescendantsOption fetchDescendantsOption);
/**
- * Updates data node for given dataspace and anchor using xpath to parent node. This method can currently
- * update only one top level data node. The method will throw DataValidationException when more than one top level
- * data nodes are provided in jsonData
+ * Updates multiple data nodes for given dataspace and anchor using xpath to parent node.
*
* @param dataspaceName dataspace name
* @param anchorName anchor name
@@ -154,10 +152,10 @@ public interface CpsDataService {
/**
* Replaces an existing data node's content including descendants.
*
- * @param dataspaceName dataspace name
- * @param anchorName anchor name
- * @param parentNodeXpath xpath to parent node
- * @param jsonData json data
+ * @param dataspaceName dataspace name
+ * @param anchorName anchor name
+ * @param parentNodeXpath xpath to parent node
+ * @param jsonData json data
* @param observedTimestamp observedTimestamp
*/
void updateDataNodeAndDescendants(String dataspaceName, String anchorName, String parentNodeXpath, String jsonData,
diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java
index 51e31f08c..99cda229d 100755
--- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java
+++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java
@@ -29,8 +29,10 @@ import static org.onap.cps.notification.Operation.DELETE;
import static org.onap.cps.notification.Operation.UPDATE;
import io.micrometer.core.annotation.Timed;
+import java.io.Serializable;
import java.time.OffsetDateTime;
import java.util.Collection;
+import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
@@ -155,20 +157,16 @@ public class CpsDataServiceImpl implements CpsDataService {
@Override
@Timed(value = "cps.data.service.datanode.leaves.update",
- description = "Time taken to get a batch of data nodes")
+ description = "Time taken to update a batch of leaf data nodes")
public void updateNodeLeaves(final String dataspaceName, final String anchorName, final String parentNodeXpath,
final String jsonData, final OffsetDateTime observedTimestamp) {
cpsValidator.validateNameCharacters(dataspaceName, anchorName);
final Anchor anchor = cpsAdminService.getAnchor(dataspaceName, anchorName);
final Collection<DataNode> dataNodesInPatch = buildDataNodes(anchor, parentNodeXpath, jsonData,
ContentType.JSON);
- if (dataNodesInPatch.size() > 1) {
- throw new DataValidationException("Operation is not supported for multiple data nodes",
- "Number of data nodes present: " + dataNodesInPatch.size());
- }
- cpsDataPersistenceService.updateDataLeaves(dataspaceName, anchorName,
- dataNodesInPatch.iterator().next().getXpath(),
- dataNodesInPatch.iterator().next().getLeaves());
+ final Map<String, Map<String, Serializable>> xpathToUpdatedLeaves = dataNodesInPatch.stream()
+ .collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves));
+ cpsDataPersistenceService.batchUpdateDataLeaves(dataspaceName, anchorName, xpathToUpdatedLeaves);
processDataUpdatedEventAsync(anchor, parentNodeXpath, UPDATE, observedTimestamp);
}
@@ -395,8 +393,8 @@ public class CpsDataServiceImpl implements CpsDataService {
if (dataNodeUpdate == null) {
return;
}
- cpsDataPersistenceService.updateDataLeaves(anchor.getDataspaceName(), anchor.getName(),
- dataNodeUpdate.getXpath(), dataNodeUpdate.getLeaves());
+ cpsDataPersistenceService.batchUpdateDataLeaves(anchor.getDataspaceName(), anchor.getName(),
+ Collections.singletonMap(dataNodeUpdate.getXpath(), dataNodeUpdate.getLeaves()));
final Collection<DataNode> childDataNodeUpdates = dataNodeUpdate.getChildDataNodes();
for (final DataNode childDataNodeUpdate : childDataNodeUpdates) {
processDataNodeUpdate(anchor, childDataNodeUpdate);
diff --git a/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java b/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java
index d28a3339f..9674bbe8c 100644
--- a/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java
+++ b/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java
@@ -44,15 +44,6 @@ public interface CpsDataPersistenceService {
*/
void storeDataNodes(String dataspaceName, String anchorName, Collection<DataNode> dataNodes);
- /**
- * Add a child to a Fragment.
- *
- * @param dataspaceName dataspace name
- * @param anchorName anchor name
- * @param parentXpath parent xpath
- * @param dataNode dataNode
- */
- void addChildDataNode(String dataspaceName, String anchorName, String parentXpath, DataNode dataNode);
/**
* Add multiple children to a Fragment.
@@ -116,14 +107,14 @@ public interface CpsDataPersistenceService {
FetchDescendantsOption fetchDescendantsOption);
/**
- * Updates leaves for existing data node.
+ * Updates data leaves for multiple data nodes.
*
- * @param dataspaceName dataspace name
- * @param anchorName anchor name
- * @param xpath xpath
- * @param leaves the leaves as a map where key is a leaf name and a value is a leaf value
+ * @param dataspaceName dataspace name
+ * @param anchorName anchor name
+ * @param updatedLeavesPerXPath Map of xPaths to updated leaf nodes
*/
- void updateDataLeaves(String dataspaceName, String anchorName, String xpath, Map<String, Serializable> leaves);
+ void batchUpdateDataLeaves(String dataspaceName, String anchorName,
+ Map<String, Map<String, Serializable>> updatedLeavesPerXPath);
/**
* Replaces multiple existing data nodes' content including descendants in a batch operation.
diff --git a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataServiceImplSpec.groovy
index be397b92c..e357d2462 100644
--- a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataServiceImplSpec.groovy
+++ b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataServiceImplSpec.groovy
@@ -215,15 +215,15 @@ class CpsDataServiceImplSpec extends Specification {
when: 'update data method is invoked with json data #jsonData and parent node xpath #parentNodeXpath'
objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, parentNodeXpath, jsonData, observedTimestamp)
then: 'the persistence service method is invoked with correct parameters'
- 1 * mockCpsDataPersistenceService.updateDataLeaves(dataspaceName, anchorName, expectedNodeXpath, leaves)
+ 1 * mockCpsDataPersistenceService.batchUpdateDataLeaves(dataspaceName, anchorName, {dataNode -> dataNode.keySet()[0] == expectedNodeXpath})
and: 'the CpsValidator is called on the dataspaceName and AnchorName'
1 * mockCpsValidator.validateNameCharacters(dataspaceName, anchorName)
and: 'data updated event is sent to notification service'
1 * mockNotificationService.processDataUpdatedEvent(anchor, parentNodeXpath, Operation.UPDATE, observedTimestamp)
where: 'following parameters were used'
- scenario | parentNodeXpath | jsonData || expectedNodeXpath | leaves
- 'top level node' | '/' | '{"test-tree": {"branch": []}}' || '/test-tree' | Collections.emptyMap()
- 'level 2 node' | '/test-tree' | '{"branch": [{"name":"Name"}]}' || '/test-tree/branch[@name=\'Name\']' | ['name': 'Name']
+ scenario | parentNodeXpath | jsonData || expectedNodeXpath
+ 'top level node' | '/' | '{"test-tree": {"branch": []}}' || '/test-tree'
+ 'level 2 node' | '/test-tree' | '{"branch": [{"name":"Name"}]}' || '/test-tree/branch[@name=\'Name\']'
}
def 'Update list-element data node with : #scenario.'() {
@@ -244,11 +244,21 @@ class CpsDataServiceImplSpec extends Specification {
given: 'schema set for given dataspace and anchor refers multipleDataTree model'
setupSchemaSetMocks('multipleDataTree.yang')
and: 'json string with multiple data trees'
+ def parentNodeXpath = '/'
def updatedJsonData = '{"first-container":{"a-leaf":"a-new-Value"},"last-container":{"x-leaf":"x-new-value"}}'
when: 'update operation is performed on multiple data nodes'
- objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, '/', updatedJsonData, observedTimestamp)
- then: 'expected exception is thrown'
- thrown(DataValidationException)
+ objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, parentNodeXpath, updatedJsonData, observedTimestamp)
+ then: 'the persistence service method is invoked with correct parameters'
+ 1 * mockCpsDataPersistenceService.batchUpdateDataLeaves(dataspaceName, anchorName, {dataNode -> dataNode.keySet()[index] == expectedNodeXpath})
+ and: 'the CpsValidator is called on the dataspaceName and AnchorName'
+ 1 * mockCpsValidator.validateNameCharacters(dataspaceName, anchorName)
+ and: 'data updated event is sent to notification service'
+ 1 * mockNotificationService.processDataUpdatedEvent(anchor, parentNodeXpath, Operation.UPDATE, observedTimestamp)
+ where: 'the following parameters were used'
+ index | expectedNodeXpath
+ 0 | '/first-container'
+ 1 | '/last-container'
+
}
def 'Update Bookstore node leaves' () {
@@ -260,8 +270,9 @@ class CpsDataServiceImplSpec extends Specification {
objectUnderTest.updateNodeLeavesAndExistingDescendantLeaves(dataspaceName, anchorName,
'/bookstore', jsonData, observedTimestamp)
then: 'the persistence service method is invoked with correct parameters'
- 1 * mockCpsDataPersistenceService.updateDataLeaves(dataspaceName, anchorName,
- "/bookstore/categories[@code='01']", ['name':'Romance', 'code': '01'])
+ 1 * mockCpsDataPersistenceService.batchUpdateDataLeaves(dataspaceName, anchorName,
+ {updatedDataNodesPerXPath -> updatedDataNodesPerXPath.keySet()
+ .iterator().next() == "/bookstore/categories[@code='01']"})
and: 'the CpsValidator is called on the dataspaceName and AnchorName'
1 * mockCpsValidator.validateNameCharacters(dataspaceName, anchorName)
and: 'the data updated event is sent to the notification service'
diff --git a/docs/release-notes.rst b/docs/release-notes.rst
index d6620236b..0b2d2a461 100755
--- a/docs/release-notes.rst
+++ b/docs/release-notes.rst
@@ -43,7 +43,7 @@ Bug Fixes
Features
--------
- - None
+ - `CPS-1006 <https://jira.onap.org/browse/CPS-1006>`_ Extend CPS PATCH API to allow update of leaves for multiple data nodes
Version: 3.3.1
==============
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/base/FunctionalSpecBase.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/base/FunctionalSpecBase.groovy
index b7a6030d8..f18a8e4c9 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/base/FunctionalSpecBase.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/base/FunctionalSpecBase.groovy
@@ -27,6 +27,7 @@ class FunctionalSpecBase extends CpsIntegrationSpecBase {
def static NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA = 2
def static BOOKSTORE_ANCHOR_1 = 'bookstoreAnchor1'
def static BOOKSTORE_ANCHOR_2 = 'bookstoreAnchor2'
+ def static BOOKSTORE_ANCHOR_FOR_PATCH = 'bookstoreAnchor2'
def static initialized = false
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy
index f609ba00e..bf86e13c8 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy
@@ -24,12 +24,23 @@ package org.onap.cps.integration.functional
import org.onap.cps.api.CpsDataService
import org.onap.cps.integration.base.FunctionalSpecBase
import org.onap.cps.spi.FetchDescendantsOption
+import org.onap.cps.spi.exceptions.AnchorNotFoundException
+import org.onap.cps.spi.exceptions.DataValidationException
+import org.onap.cps.spi.exceptions.DataspaceNotFoundException
+
+import java.time.OffsetDateTime
+
+import java.time.OffsetDateTime
class CpsDataServiceIntegrationSpec extends FunctionalSpecBase {
CpsDataService objectUnderTest
+ def originalCountBookstoreChildNodes
- def setup() { objectUnderTest = cpsDataService }
+ def setup() {
+ objectUnderTest = cpsDataService
+ originalCountBookstoreChildNodes = countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+ }
def 'Read bookstore top-level container(s) using #fetchDescendantsOption.'() {
when: 'get data nodes for bookstore container'
@@ -38,6 +49,10 @@ class CpsDataServiceIntegrationSpec extends FunctionalSpecBase {
assert countDataNodesInTree(result) == expectNumberOfDataNodes
and: 'the top level data node has the expected attribute and value'
assert result.leaves['bookstore-name'] == ['Easons']
+ and: 'they are from the correct dataspace'
+ assert result.dataspace == [FUNCTIONAL_TEST_DATASPACE_1]
+ and: 'they are from the correct anchor'
+ assert result.anchorName == [BOOKSTORE_ANCHOR_1]
where: 'the following option is used'
fetchDescendantsOption || expectNumberOfDataNodes
FetchDescendantsOption.OMIT_DESCENDANTS || 1
@@ -46,13 +61,83 @@ class CpsDataServiceIntegrationSpec extends FunctionalSpecBase {
new FetchDescendantsOption(2) || 17
}
- def 'Read bookstore top-level container(s) has correct dataspace and anchor.'() {
- when: 'get data nodes for bookstore container'
- def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
- then: 'the correct dataspace was queried'
- assert result.dataspace.toSet() == [FUNCTIONAL_TEST_DATASPACE_1].toSet()
- and: 'the correct anchor was queried'
- assert result.anchorName.toSet() == [BOOKSTORE_ANCHOR_1].toSet()
+ def 'Add and Delete a (container) datanode.'() {
+ given: 'new (webinfo) datanode'
+ def json = '{"webinfo": {"domain-name":"ourbookstore.com" ,"contact-email":"info@ourbookstore.com" }}'
+ when: 'the new datanode is saved'
+ objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, OffsetDateTime.now())
+ then: 'it can be retrieved by its xpath'
+ def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', FetchDescendantsOption.DIRECT_CHILDREN_ONLY)
+ assert result.size() == 1
+ assert result[0].xpath == '/bookstore/webinfo'
+ and: 'there is now one extra datanode'
+ assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+ when: 'the new datanode is deleted'
+ objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', OffsetDateTime.now())
+ then: 'the original number of datanodes is restored'
+ assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+ }
+
+ def 'Add and Delete list (element) datanodes.'() {
+ given: 'two new (categories) datanodes'
+ def json = '{"categories": [ {"code":"new1"}, {"code":"new2" } ] }'
+ when: 'the new list elements are saved'
+ objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, OffsetDateTime.now())
+ then: 'they can be retrieved by their xpaths'
+ objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', FetchDescendantsOption.DIRECT_CHILDREN_ONLY).size() == 1
+ objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', FetchDescendantsOption.DIRECT_CHILDREN_ONLY).size() == 1
+ and: 'there are now two extra datanodes'
+ assert originalCountBookstoreChildNodes + 2 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+ when: 'the new elements are deleted'
+ objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', OffsetDateTime.now())
+ objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', OffsetDateTime.now())
+ then: 'the original number of datanodes is restored'
+ assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
}
+ def 'Add and Delete a batch of lists (element) datanodes.'() {
+ given: 'two new (categories) datanodes in two separate batches'
+ def json1 = '{"categories": [ {"code":"new1"} ] }'
+ def json2 = '{"categories": [ {"code":"new2"} ] }'
+ when: 'the batches of new list element(s) are saved'
+ objectUnderTest.saveListElementsBatch(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', [json1, json2], OffsetDateTime.now())
+ then: 'they can be retrieved by their xpaths'
+ objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', FetchDescendantsOption.DIRECT_CHILDREN_ONLY).size() == 1
+ objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', FetchDescendantsOption.DIRECT_CHILDREN_ONLY).size() == 1
+ and: 'there are now two extra datanodes'
+ assert originalCountBookstoreChildNodes + 2 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+ when: 'the new elements are deleted'
+ objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', OffsetDateTime.now())
+ objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', OffsetDateTime.now())
+ then: 'the original number of datanodes is restored'
+ assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+ }
+
+ def 'Update multiple data node leaves.'() {
+ given: 'Updated json for bookstore data'
+ def jsonData = "{'book-store:books':{'lang':'English/French','price':100,'title':'Matilda','authors':['RoaldDahl']}}"
+ when: 'update is performed for leaves'
+ objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_FOR_PATCH, "/bookstore/categories[@code='1']", jsonData, OffsetDateTime.now())
+ then: 'the updated data nodes are retrieved'
+ def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_FOR_PATCH, "/bookstore/categories[@code=1]/books[@title='Matilda']", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
+ and: 'the leaf values are updated as expected'
+ assert result.leaves['lang'] == ['English/French']
+ assert result.leaves['price'] == [100]
+ }
+
+ def 'Update multiple data leaves error scenario: #scenario.'() {
+ given: 'Updated json for bookstore data'
+ def jsonData = "{'book-store:books':{'lang':'English/French','price':100,'title':'Matilda','authors':['RoaldDahl'],'pub_year':1988}}"
+ when: 'attempt to update data node for #scenario'
+ objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, xpath, jsonData, OffsetDateTime.now())
+ then: 'a #expectedException is thrown'
+ thrown(expectedException)
+ where: 'the following data is used'
+ scenario | dataspaceName | anchorName | xpath || expectedException
+ 'invalid dataspace name' | 'INVALID DATAsPACE' | 'not-relevant' | '/not relevant' || DataValidationException
+ 'invalid anchor name' | FUNCTIONAL_TEST_DATASPACE_1 | 'INVALID ANCHOR' | '/not relevant' || DataValidationException
+ 'non-existing dataspace' | 'non-existing-dataspace' | 'not-relevant' | '/not relevant' || DataspaceNotFoundException
+ 'non-existing anchor' | FUNCTIONAL_TEST_DATASPACE_1 | 'non-existing-anchor' | '/not relevant' || AnchorNotFoundException
+ 'non-existing-xpath' | FUNCTIONAL_TEST_DATASPACE_1 | BOOKSTORE_ANCHOR_FOR_PATCH | '/non-existing' || DataValidationException
+ }
}
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy
index d339f6ddc..6b1efe955 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy
@@ -81,7 +81,7 @@ class CpsPerfTestBase extends PerfTestBase {
addAnchorsWithData(5, CPS_PERFORMANCE_TEST_DATASPACE, LARGE_SCHEMA_SET, 'openroadm', data)
stopWatch.stop()
def durationInMillis = stopWatch.getTotalTimeMillis()
- recordAndAssertPerformance('Creating openroadm anchors with large data tree', 25_000, durationInMillis)
+ recordAndAssertPerformance('Creating openroadm anchors with large data tree', 30_000, durationInMillis)
}
def generateOpenRoadData(numberOfNodes) {
@@ -98,8 +98,8 @@ class CpsPerfTestBase extends PerfTestBase {
assert countDataNodesInTree(result) == 1
stopWatch.stop()
def durationInMillis = stopWatch.getTotalTimeMillis()
- then: 'all data is read within 25 seconds (warm up not critical)'
- recordAndAssertPerformance("Warming database", 25_000, durationInMillis)
+ then: 'all data is read within 30 seconds (warm up not critical)'
+ recordAndAssertPerformance("Warming database", 30_000, durationInMillis)
}
}
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsModuleServicePerfTest.groovy
index 222a828b9..ce0ed5c17 100644
--- a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsModuleReferenceRepositoryPerfTest.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsModuleServicePerfTest.groovy
@@ -18,23 +18,16 @@
* ============LICENSE_END=========================================================
*/
-package org.onap.cps.spi.performance
+package org.onap.cps.integration.performance.cps
-import org.onap.cps.spi.CpsModulePersistenceService
-import org.onap.cps.spi.entities.SchemaSetEntity
-import org.onap.cps.spi.impl.CpsPersistenceSpecBase
+import org.onap.cps.api.CpsModuleService
+import org.onap.cps.integration.performance.base.CpsPerfTestBase
import org.onap.cps.spi.model.ModuleReference
-import org.onap.cps.spi.repository.ModuleReferenceRepository
-import org.onap.cps.spi.repository.SchemaSetRepository
-import org.springframework.beans.factory.annotation.Autowired
-import org.springframework.test.context.jdbc.Sql
import org.springframework.util.StopWatch
import java.util.concurrent.ThreadLocalRandom
-class CpsModuleReferenceRepositoryPerfTest extends CpsPersistenceSpecBase {
-
- static final String PERF_TEST_DATA = '/data/perf-test.sql'
+class CpsModuleServicePerfTest extends CpsPerfTestBase {
def NEW_RESOURCE_CONTENT = 'module stores {\n' +
' yang-version 1.1;\n' +
@@ -48,16 +41,10 @@ class CpsModuleReferenceRepositoryPerfTest extends CpsPersistenceSpecBase {
' }' +
'}'
- @Autowired
- CpsModulePersistenceService objectUnderTest
-
- @Autowired
- SchemaSetRepository schemaSetRepository
+ CpsModuleService objectUnderTest
- @Autowired
- ModuleReferenceRepository moduleReferenceRepository
+ def setup() { objectUnderTest = cpsModuleService }
- @Sql([CLEAR_DATA, PERF_TEST_DATA])
def 'Store new schema set with many modules'() {
when: 'a new schema set with 200 modules is stored'
def newYangResourcesNameToContentMap = [:]
@@ -68,17 +55,16 @@ class CpsModuleReferenceRepositoryPerfTest extends CpsPersistenceSpecBase {
def content = NEW_RESOURCE_CONTENT.replace('2020',String.valueOf(year)).replace('stores',moduleName)
newYangResourcesNameToContentMap.put(resourceName, content)
}
- objectUnderTest.storeSchemaSet('PERF-DATASPACE', 'perfSchemaSet', newYangResourcesNameToContentMap)
+ objectUnderTest.createSchemaSet(CPS_PERFORMANCE_TEST_DATASPACE, 'perfSchemaSet', newYangResourcesNameToContentMap)
then: 'the schema set is persisted correctly'
- def dataspaceEntity = dataspaceRepository.getByName('PERF-DATASPACE')
- SchemaSetEntity result = schemaSetRepository.getByDataspaceAndName(dataspaceEntity, 'perfSchemaSet')
- result.yangResources.size() == 200
+ def result = cpsModuleService.getSchemaSet(CPS_PERFORMANCE_TEST_DATASPACE, 'perfSchemaSet')
+ result.moduleReferences.size() == 200
and: 'identification of new module resources is fast enough (1,000 executions less then 6,000 milliseconds)'
def stopWatch = new StopWatch()
1000.times() {
def moduleReferencesToCheck = createModuleReferencesWithRandomMatchingExistingModuleReferences()
stopWatch.start()
- def newModuleReferences = moduleReferenceRepository.identifyNewModuleReferences(moduleReferencesToCheck)
+ def newModuleReferences = objectUnderTest.identifyNewModuleReferences(moduleReferencesToCheck)
stopWatch.stop()
assert newModuleReferences.size() > 0 && newModuleReferences.size() < 300
}
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/DeletePerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/DeletePerfTest.groovy
new file mode 100644
index 000000000..db36b8809
--- /dev/null
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/DeletePerfTest.groovy
@@ -0,0 +1,170 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the 'License');
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an 'AS IS' BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.integration.performance.cps
+
+import java.time.OffsetDateTime
+import org.onap.cps.api.CpsDataService
+import org.onap.cps.integration.performance.base.CpsPerfTestBase
+
+class DeletePerfTest extends CpsPerfTestBase {
+
+ CpsDataService objectUnderTest
+
+ def setup() { objectUnderTest = cpsDataService }
+
+ def 'Create test data (please note, subsequent tests depend on this running first).'() {
+ when: 'multiple anchors with a node with a large number of descendants is created'
+ stopWatch.start()
+ def data = generateOpenRoadData(50)
+ addAnchorsWithData(9, CPS_PERFORMANCE_TEST_DATASPACE, LARGE_SCHEMA_SET, 'delete', data)
+ stopWatch.stop()
+ def setupDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'setup duration is under 40 seconds'
+ recordAndAssertPerformance('Delete test setup', 40_000, setupDurationInMillis)
+ }
+
+ def 'Delete 10 container nodes'() {
+ when: 'child nodes are deleted'
+ stopWatch.start()
+ (1..10).each {
+ def childPath = "/openroadm-devices/openroadm-device[@device-id='C201-7-1A-" + it + "']/org-openroadm-device"
+ objectUnderTest.deleteDataNode(CPS_PERFORMANCE_TEST_DATASPACE, 'delete1', childPath, OffsetDateTime.now())
+ }
+ stopWatch.stop()
+ def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'delete duration is under 300 milliseconds'
+ recordAndAssertPerformance('Delete 10 containers', 300, deleteDurationInMillis)
+ }
+
+ def 'Batch delete 50 container nodes'() {
+ given: 'a list of xpaths to delete'
+ def xpathsToDelete = (1..50).collect {
+ "/openroadm-devices/openroadm-device[@device-id='C201-7-1A-" + it + "']/org-openroadm-device"
+ }
+ when: 'child nodes are deleted'
+ stopWatch.start()
+ objectUnderTest.deleteDataNodes(CPS_PERFORMANCE_TEST_DATASPACE, 'delete2', xpathsToDelete, OffsetDateTime.now())
+ stopWatch.stop()
+ def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'delete duration is under 300 milliseconds'
+ recordAndAssertPerformance('Batch delete 50 containers', 300, deleteDurationInMillis)
+ }
+
+ def 'Delete 20 list elements'() {
+ when: 'list elements are deleted'
+ stopWatch.start()
+ (1..20).each {
+ def listElementXpath = "/openroadm-devices/openroadm-device[@device-id='C201-7-1A-1']/org-openroadm-device/degree[@degree-number=" + it + "]"
+ objectUnderTest.deleteDataNode(CPS_PERFORMANCE_TEST_DATASPACE, 'delete3', listElementXpath, OffsetDateTime.now())
+ }
+ stopWatch.stop()
+ def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'delete duration is under 300 milliseconds'
+ recordAndAssertPerformance('Delete 20 lists elements', 300, deleteDurationInMillis)
+ }
+
+ def 'Batch delete 1000 list elements'() {
+ given: 'a list of xpaths to delete'
+ def xpathsToDelete = []
+ for (int childIndex = 1; childIndex <= 50; childIndex++) {
+ xpathsToDelete.addAll((1..20).collect {
+ "/openroadm-devices/openroadm-device[@device-id='C201-7-1A-${childIndex}']/org-openroadm-device/degree[@degree-number=${it}]".toString()
+ })
+ }
+ when: 'list elements are deleted'
+ stopWatch.start()
+ objectUnderTest.deleteDataNodes(CPS_PERFORMANCE_TEST_DATASPACE, 'delete4', xpathsToDelete, OffsetDateTime.now())
+ stopWatch.stop()
+ def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'delete duration is under 300 milliseconds'
+ recordAndAssertPerformance('Batch delete 1000 lists elements', 300, deleteDurationInMillis)
+ }
+
+ def 'Delete 10 whole lists'() {
+ when: 'lists are deleted'
+ stopWatch.start()
+ (1..10).each {
+ def childPath = "/openroadm-devices/openroadm-device[@device-id='C201-7-1A-" + it + "']/org-openroadm-device/degree"
+ objectUnderTest.deleteDataNode(CPS_PERFORMANCE_TEST_DATASPACE, 'delete5', childPath, OffsetDateTime.now())
+ }
+ stopWatch.stop()
+ def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'delete duration is under 300 milliseconds'
+ recordAndAssertPerformance('Delete 10 whole lists', 300, deleteDurationInMillis)
+ }
+
+ def 'Batch delete 30 whole lists'() {
+ given: 'a list of xpaths to delete'
+ def xpathsToDelete = (1..30).collect {
+ "/openroadm-devices/openroadm-device[@device-id='C201-7-1A-" + it + "']/org-openroadm-device/degree"
+ }
+ when: 'lists are deleted'
+ stopWatch.start()
+ objectUnderTest.deleteDataNodes(CPS_PERFORMANCE_TEST_DATASPACE, 'delete6', xpathsToDelete, OffsetDateTime.now())
+ stopWatch.stop()
+ def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'delete duration is under 300 milliseconds'
+ recordAndAssertPerformance('Batch delete 30 whole lists', 300, deleteDurationInMillis)
+ }
+
+ def 'Delete 1 large data node'() {
+ when: 'parent node is deleted'
+ stopWatch.start()
+ objectUnderTest.deleteDataNode(CPS_PERFORMANCE_TEST_DATASPACE, 'delete7', '/openroadm-devices', OffsetDateTime.now())
+ stopWatch.stop()
+ def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'delete duration is under 300 milliseconds'
+ recordAndAssertPerformance('Delete one large node', 300, deleteDurationInMillis)
+ }
+
+ def 'Delete root node with many descendants'() {
+ when: 'root node is deleted'
+ stopWatch.start()
+ objectUnderTest.deleteDataNode(CPS_PERFORMANCE_TEST_DATASPACE, 'delete8', '/', OffsetDateTime.now())
+ stopWatch.stop()
+ def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'delete duration is under 300 milliseconds'
+ recordAndAssertPerformance('Delete root node', 300, deleteDurationInMillis)
+ }
+
+ def 'Delete data nodes for an anchor'() {
+ when: 'data nodes are deleted'
+ stopWatch.start()
+ objectUnderTest.deleteDataNodes(CPS_PERFORMANCE_TEST_DATASPACE, 'delete9', OffsetDateTime.now())
+ stopWatch.stop()
+ def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'delete duration is under 300 milliseconds'
+ recordAndAssertPerformance('Delete data nodes for anchor', 300, deleteDurationInMillis)
+ }
+
+ def 'Clean up test data'() {
+ given: 'a list of anchors to delete'
+ def anchorNames = (1..9).collect {'delete' + it}
+ when: 'data nodes are deleted'
+ stopWatch.start()
+ cpsAdminService.deleteAnchors(CPS_PERFORMANCE_TEST_DATASPACE, anchorNames)
+ stopWatch.stop()
+ def deleteDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'delete duration is under 1000 milliseconds'
+ recordAndAssertPerformance('Delete test cleanup', 1000, deleteDurationInMillis)
+ }
+
+}
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/GetPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/GetPerfTest.groovy
index c072755d3..d20da46cc 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/GetPerfTest.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/GetPerfTest.groovy
@@ -45,8 +45,8 @@ class GetPerfTest extends CpsPerfTestBase {
where: 'the following parameters are used'
scenario | fetchDescendantsOption | anchor || durationLimit | expectedNumberOfDataNodes
'no descendants' | OMIT_DESCENDANTS | 'openroadm1' || 100 | 1
- 'direct descendants' | DIRECT_CHILDREN_ONLY | 'openroadm2' || 100 | 1 + 50
- 'all descendants' | INCLUDE_ALL_DESCENDANTS | 'openroadm3' || 350 | 1 + 50 * 86
+ 'direct descendants' | DIRECT_CHILDREN_ONLY | 'openroadm2' || 150 | 1 + 50
+ 'all descendants' | INCLUDE_ALL_DESCENDANTS | 'openroadm3' || 600 | 1 + 50 * 86
}
def 'Read data trees for multiple xpaths'() {
@@ -58,8 +58,8 @@ class GetPerfTest extends CpsPerfTestBase {
stopWatch.stop()
assert countDataNodesInTree(result) == 50 * 86
def durationInMillis = stopWatch.getTotalTimeMillis()
- then: 'all data is read within 350 ms'
- recordAndAssertPerformance("Read datatrees for multiple xpaths", 350, durationInMillis)
+ then: 'all data is read within 500 ms'
+ recordAndAssertPerformance("Read datatrees for multiple xpaths", 500, durationInMillis)
}
def 'Read complete data trees using #scenario.'() {
@@ -75,10 +75,10 @@ class GetPerfTest extends CpsPerfTestBase {
recordAndAssertPerformance("Read datatrees using ${scenario}", durationLimit, durationInMillis)
where: 'the following xpaths are used'
scenario | anchorPrefix | xpath || durationLimit | expectedNumberOfDataNodes
- 'bookstore root' | 'bookstore' | '/' || 250 | 78
- 'bookstore top element' | 'bookstore' | '/bookstore' || 250 | 78
- 'openroadm root' | 'openroadm' | '/' || 1000 | 1 + 50 * 86
- 'openroadm top element' | 'openroadm' | '/openroadm-devices' || 1000 | 1 + 50 * 86
+ 'bookstore root' | 'bookstore' | '/' || 300 | 78
+ 'bookstore top element' | 'bookstore' | '/bookstore' || 300 | 78
+ 'openroadm root' | 'openroadm' | '/' || 1200 | 1 + 50 * 86
+ 'openroadm top element' | 'openroadm' | '/openroadm-devices' || 1200 | 1 + 50 * 86
}
}
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/QueryPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/QueryPerfTest.groovy
index ecc44ff9d..885f1c203 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/QueryPerfTest.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/QueryPerfTest.groovy
@@ -45,10 +45,10 @@ class QueryPerfTest extends CpsPerfTestBase {
recordAndAssertPerformance("Query 1 anchor ${scenario}", durationLimit, durationInMillis)
where: 'the following parameters are used'
scenario | anchor | cpsPath || durationLimit | expectedNumberOfDataNodes
- 'top element' | 'openroadm1' | '/openroadm-devices' || 250 | 50 * 86 + 1
- 'leaf condition' | 'openroadm2' | '//openroadm-device[@ne-state="inservice"]' || 250 | 50 * 86
- 'ancestors' | 'openroadm3' | '//openroadm-device/ancestor::openroadm-devices' || 250 | 50 * 86 + 1
- 'leaf condition + ancestors' | 'openroadm4' | '//openroadm-device[@status="success"]/ancestor::openroadm-devices' || 250 | 50 * 86 + 1
+ 'top element' | 'openroadm1' | '/openroadm-devices' || 500 | 50 * 86 + 1
+ 'leaf condition' | 'openroadm2' | '//openroadm-device[@ne-state="inservice"]' || 500 | 50 * 86
+ 'ancestors' | 'openroadm3' | '//openroadm-device/ancestor::openroadm-devices' || 500 | 50 * 86 + 1
+ 'leaf condition + ancestors' | 'openroadm4' | '//openroadm-device[@status="success"]/ancestor::openroadm-devices' || 500 | 50 * 86 + 1
}
def 'Query complete data trees across all anchors with #scenario.'() {
@@ -63,10 +63,10 @@ class QueryPerfTest extends CpsPerfTestBase {
recordAndAssertPerformance("Query across anchors ${scenario}", durationLimit, durationInMillis)
where: 'the following parameters are used'
scenario | cpspath || durationLimit | expectedNumberOfDataNodes
- 'top element' | '/openroadm-devices' || 1000 | 5 * (50 * 86 + 1)
- 'leaf condition' | '//openroadm-device[@ne-state="inservice"]' || 1000 | 5 * (50 * 86)
- 'ancestors' | '//openroadm-device/ancestor::openroadm-devices' || 1000 | 5 * (50 * 86 + 1)
- 'leaf condition + ancestors' | '//openroadm-device[@status="success"]/ancestor::openroadm-devices' || 1000 | 5 * (50 * 86 + 1)
+ 'top element' | '/openroadm-devices' || 2000 | 5 * (50 * 86 + 1)
+ 'leaf condition' | '//openroadm-device[@ne-state="inservice"]' || 2000 | 5 * (50 * 86)
+ 'ancestors' | '//openroadm-device/ancestor::openroadm-devices' || 2000 | 5 * (50 * 86 + 1)
+ 'leaf condition + ancestors' | '//openroadm-device[@status="success"]/ancestor::openroadm-devices' || 2000 | 5 * (50 * 86 + 1)
}
def 'Query with leaf condition and #scenario.'() {
@@ -82,8 +82,8 @@ class QueryPerfTest extends CpsPerfTestBase {
where: 'the following parameters are used'
scenario | fetchDescendantsOption | anchor || durationLimit | expectedNumberOfDataNodes
'no descendants' | OMIT_DESCENDANTS | 'openroadm1' || 100 | 50
- 'direct descendants' | DIRECT_CHILDREN_ONLY | 'openroadm2' || 150 | 50 * 2
- 'all descendants' | INCLUDE_ALL_DESCENDANTS | 'openroadm3' || 200 | 50 * 86
+ 'direct descendants' | DIRECT_CHILDREN_ONLY | 'openroadm2' || 200 | 50 * 2
+ 'all descendants' | INCLUDE_ALL_DESCENDANTS | 'openroadm3' || 500 | 50 * 86
}
def 'Query ancestors with #scenario.'() {
@@ -100,7 +100,7 @@ class QueryPerfTest extends CpsPerfTestBase {
scenario | fetchDescendantsOption | anchor || durationLimit | expectedNumberOfDataNodes
'no descendants' | OMIT_DESCENDANTS | 'openroadm1' || 100 | 1
'direct descendants' | DIRECT_CHILDREN_ONLY | 'openroadm2' || 200 | 1 + 50
- 'all descendants' | INCLUDE_ALL_DESCENDANTS | 'openroadm3' || 300 | 1 + 50 * 86
+ 'all descendants' | INCLUDE_ALL_DESCENDANTS | 'openroadm3' || 500 | 1 + 50 * 86
}
}
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/UpdatePerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/UpdatePerfTest.groovy
new file mode 100644
index 000000000..c28190865
--- /dev/null
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/UpdatePerfTest.groovy
@@ -0,0 +1,62 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the 'License');
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an 'AS IS' BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.integration.performance.cps
+
+import java.time.OffsetDateTime
+import org.onap.cps.api.CpsDataService
+import org.onap.cps.integration.performance.base.CpsPerfTestBase
+
+class UpdatePerfTest extends CpsPerfTestBase {
+
+ CpsDataService objectUnderTest
+
+ def setup() { objectUnderTest = cpsDataService }
+
+ def 'Update 1 data node with descendants'() {
+ given: 'a list of data nodes to update as JSON'
+ def parentNodeXpath = "/openroadm-devices/openroadm-device[@device-id='C201-7-1A-10']"
+ def jsonData = readResourceDataFile('openroadm/innerNode.json').replace('NODE_ID_HERE', '10')
+ when: 'the fragment entities are updated by the data nodes'
+ stopWatch.start()
+ objectUnderTest.updateDataNodeAndDescendants(CPS_PERFORMANCE_TEST_DATASPACE, 'openroadm1', parentNodeXpath, jsonData, OffsetDateTime.now())
+ stopWatch.stop()
+ def updateDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'update duration is under 1000 milliseconds'
+ recordAndAssertPerformance('Update 1 data node', 1000, updateDurationInMillis)
+ }
+
+ def 'Batch update 10 data nodes with descendants'() {
+ given: 'a list of data nodes to update as JSON'
+ def innerNodeJson = readResourceDataFile('openroadm/innerNode.json')
+ def nodesJsonData = (20..30).collectEntries {[
+ "/openroadm-devices/openroadm-device[@device-id='C201-7-1A-" + it + "']",
+ innerNodeJson.replace('NODE_ID_HERE', it.toString())
+ ]}
+ when: 'the fragment entities are updated by the data nodes'
+ stopWatch.start()
+ objectUnderTest.updateDataNodesAndDescendants(CPS_PERFORMANCE_TEST_DATASPACE, 'openroadm2', nodesJsonData, OffsetDateTime.now())
+ stopWatch.stop()
+ def updateDurationInMillis = stopWatch.getTotalTimeMillis()
+ then: 'update duration is under 5000 milliseconds'
+ recordAndAssertPerformance('Update 10 data nodes', 5000, updateDurationInMillis)
+ }
+
+}
diff --git a/integration-test/src/test/resources/data/bookstore/bookstore.yang b/integration-test/src/test/resources/data/bookstore/bookstore.yang
index 62ebc7320..f3219a01c 100644
--- a/integration-test/src/test/resources/data/bookstore/bookstore.yang
+++ b/integration-test/src/test/resources/data/bookstore/bookstore.yang
@@ -21,6 +21,15 @@ module stores {
type string;
}
+ container webinfo {
+ leaf domain-name {
+ type string;
+ }
+ leaf contact-email {
+ type string;
+ }
+ }
+
container premises {
list addresses {
key "house-number street";