aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--cps-application/src/main/resources/application.yml2
-rw-r--r--cps-dependencies/pom.xml4
-rw-r--r--cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/DataJobControllerForTest.java79
-rw-r--r--cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/DataJobControllerForTestSpec.groovy50
-rw-r--r--cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy5
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DataJobRequest.java24
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DmiAsyncRequestResponseEventConsumer.java1
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java25
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java67
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java8
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java37
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java10
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DataSyncWatchdog.java3
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdog.java2
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy32
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy14
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/MessagingBaseSpec.groovy2
-rw-r--r--cps-parent/pom.xml2
-rw-r--r--docs/deployment.rst88
-rw-r--r--docs/release-notes.rst1
-rw-r--r--integration-test/src/test/resources/application-module-sync-delayed.yml2
-rw-r--r--integration-test/src/test/resources/application.yml1
-rw-r--r--k6-tests/ncmp/common/passthrough-crud.js10
-rw-r--r--k6-tests/ncmp/common/search-base.js4
-rw-r--r--k6-tests/ncmp/common/utils.js32
-rw-r--r--k6-tests/ncmp/ncmp-test-runner.js18
26 files changed, 362 insertions, 161 deletions
diff --git a/cps-application/src/main/resources/application.yml b/cps-application/src/main/resources/application.yml
index decc03b263..9e9d57a441 100644
--- a/cps-application/src/main/resources/application.yml
+++ b/cps-application/src/main/resources/application.yml
@@ -236,8 +236,10 @@ ncmp:
timers:
advised-modules-sync:
+ initial-delay-ms: 40000
sleep-time-ms: 5000
cm-handle-data-sync:
+ initial-delay-ms: 40000
sleep-time-ms: 30000
subscription-forwarding:
dmi-response-timeout-ms: 30000
diff --git a/cps-dependencies/pom.xml b/cps-dependencies/pom.xml
index 82ec504b80..0bb976e7e2 100644
--- a/cps-dependencies/pom.xml
+++ b/cps-dependencies/pom.xml
@@ -86,14 +86,14 @@
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
- <version>3.4.1</version>
+ <version>3.4.4</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-cache</artifactId>
- <version>3.4.1</version>
+ <version>3.4.4</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
diff --git a/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/DataJobControllerForTest.java b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/DataJobControllerForTest.java
new file mode 100644
index 0000000000..d259d91796
--- /dev/null
+++ b/cps-ncmp-rest/src/main/java/org/onap/cps/ncmp/rest/controller/DataJobControllerForTest.java
@@ -0,0 +1,79 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.ncmp.rest.controller;
+
+import io.swagger.v3.oas.annotations.Hidden;
+import java.util.List;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.onap.cps.ncmp.api.datajobs.DataJobService;
+import org.onap.cps.ncmp.api.datajobs.models.DataJobMetadata;
+import org.onap.cps.ncmp.api.datajobs.models.DataJobRequest;
+import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest;
+import org.onap.cps.ncmp.api.datajobs.models.SubJobWriteResponse;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestHeader;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+/**
+ * Controller responsible for handling data job write operations.
+ * This class exposes an API endpoint that accepts a write request for a data job and processes it.
+ */
+@Slf4j
+@RestController
+@RequestMapping("/do-not-use/dataJobs")
+@RequiredArgsConstructor
+public class DataJobControllerForTest {
+
+ private final DataJobService dataJobService;
+
+ /**
+ * Handles POST requests to write a data job. This endpoint is unsupported and intended for testing purposes only.
+ * This internal endpoint processes a data job write request by extracting necessary metadata and data
+ * from the request body and delegating the operation to the {@link DataJobService}.
+ * <p><b>Note:</b> The {@link DataJobRequest} parameter is created and used for testing purposes only.
+ * In a production environment, data job write operations are not triggered through internal workflows.</p>
+ *
+ * @param authorization The optional authorization token sent in the request header.
+ * @param dataJobId The unique identifier for the data job, extracted from the URL path.
+ * @param dataJobRequest The request payload containing metadata and data for the data job write operation.
+ * @return A {@link ResponseEntity} containing a list of {@link SubJobWriteResponse} objects representing the
+ * status of each sub-job within the data job, or an error response with an appropriate HTTP status code.
+ */
+ @PostMapping("/{dataJobId}/write")
+ @Hidden
+ public ResponseEntity<List<SubJobWriteResponse>> writeDataJob(@RequestHeader(value = "Authorization",
+ required = false) final String authorization,
+ @PathVariable("dataJobId") final String dataJobId,
+ @RequestBody final DataJobRequest dataJobRequest) {
+ log.info("Internal API: writeDataJob invoked for {}", dataJobId);
+ final DataJobMetadata dataJobMetadata = dataJobRequest.dataJobMetadata();
+ final DataJobWriteRequest dataJobWriteRequest = dataJobRequest.dataJobWriteRequest();
+ final List<SubJobWriteResponse> subJobWriteResponses = dataJobService.writeDataJob(authorization, dataJobId,
+ dataJobMetadata, dataJobWriteRequest);
+ return ResponseEntity.ok(subJobWriteResponses);
+ }
+}
+
diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/DataJobControllerForTestSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/DataJobControllerForTestSpec.groovy
new file mode 100644
index 0000000000..6fc4a699e5
--- /dev/null
+++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/DataJobControllerForTestSpec.groovy
@@ -0,0 +1,50 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.ncmp.rest.controller
+
+import org.onap.cps.ncmp.api.datajobs.DataJobService
+import org.onap.cps.ncmp.api.datajobs.models.DataJobMetadata
+import org.onap.cps.ncmp.api.datajobs.models.DataJobRequest
+import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest
+import org.onap.cps.ncmp.api.datajobs.models.WriteOperation
+import org.springframework.http.HttpStatus
+import spock.lang.Specification
+
+class DataJobControllerForTestSpec extends Specification {
+
+ DataJobService mockDataJobService = Mock()
+
+ def objectUnderTest = new DataJobControllerForTest(mockDataJobService)
+
+ def 'Write Data Job request'() {
+ given: 'a valid datajob write request'
+ def dataJobMetadata = new DataJobMetadata('some destination', 'some accept type', 'some content type')
+ def writeOperations = [ new WriteOperation('/path/to/node', 'create', 'op123', 'value1') ]
+ def dataJobWriteRequest = new DataJobWriteRequest(writeOperations)
+ def dataJobRequest = new DataJobRequest(dataJobMetadata, dataJobWriteRequest)
+ when: 'write data job is called'
+ def result = objectUnderTest.writeDataJob('my authorization', 'my job', dataJobRequest)
+ then: 'response is 200 OK'
+ assert result.statusCode == HttpStatus.OK
+ and: 'the service method is called once with expected parameters'
+ 1 * mockDataJobService.writeDataJob('my authorization', 'my job', dataJobMetadata, dataJobWriteRequest)
+ }
+}
diff --git a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy
index aad04a18ae..3a9a0bb09c 100644
--- a/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy
+++ b/cps-ncmp-rest/src/test/groovy/org/onap/cps/ncmp/rest/controller/NetworkCmProxyRestExceptionHandlerSpec.groovy
@@ -1,7 +1,7 @@
/*
* ============LICENSE_START=======================================================
* Copyright (C) 2021 highstreet technologies GmbH
- * Modifications Copyright (C) 2021-2024 Nordix Foundation
+ * Modifications Copyright (C) 2021-2025 OpenInfra Foundation Europe. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -102,6 +102,9 @@ class NetworkCmProxyRestExceptionHandlerSpec extends Specification {
@SpringBean
NcmpPassthroughResourceRequestHandler StubbedNcmpPassthroughResourceRequestHandler = Stub()
+ @SpringBean
+ DataJobControllerForTest stubbedDataJobControllerForTest = Stub()
+
@Value('${rest.api.ncmp-base-path}')
def basePathNcmp
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DataJobRequest.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DataJobRequest.java
new file mode 100644
index 0000000000..fe73a601b9
--- /dev/null
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/datajobs/models/DataJobRequest.java
@@ -0,0 +1,24 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.ncmp.api.datajobs.models;
+
+public record DataJobRequest(DataJobMetadata dataJobMetadata, DataJobWriteRequest dataJobWriteRequest) {
+}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DmiAsyncRequestResponseEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DmiAsyncRequestResponseEventConsumer.java
index e2803e89a1..f22a58fd49 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DmiAsyncRequestResponseEventConsumer.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/data/async/DmiAsyncRequestResponseEventConsumer.java
@@ -50,6 +50,7 @@ public class DmiAsyncRequestResponseEventConsumer {
topics = "${app.ncmp.async-m2m.topic}",
filter = "includeNonCloudEventsOnly",
groupId = "ncmp-async-rest-request-event-group",
+ containerFactory = "legacyEventConcurrentKafkaListenerContainerFactory",
properties = {"spring.json.value.default.type=org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent"})
public void consumeAndForward(final DmiAsyncRequestResponseEvent dmiAsyncRequestResponseEvent) {
log.debug("Consuming event {} ...", dmiAsyncRequestResponseEvent);
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java
index 04c3ad2fc6..56352c1c81 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImpl.java
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2024 Nordix Foundation
+ * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -31,6 +31,7 @@ import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest;
import org.onap.cps.ncmp.api.datajobs.models.DmiWriteOperation;
import org.onap.cps.ncmp.api.datajobs.models.ProducerKey;
import org.onap.cps.ncmp.api.datajobs.models.SubJobWriteResponse;
+import org.onap.cps.utils.JsonObjectMapper;
import org.springframework.stereotype.Service;
@Slf4j
@@ -40,6 +41,7 @@ public class DataJobServiceImpl implements DataJobService {
private final DmiSubJobRequestHandler dmiSubJobClient;
private final WriteRequestExaminer writeRequestExaminer;
+ private final JsonObjectMapper jsonObjectMapper;
@Override
public void readDataJob(final String authorization,
@@ -54,14 +56,25 @@ public class DataJobServiceImpl implements DataJobService {
final String dataJobId,
final DataJobMetadata dataJobMetadata,
final DataJobWriteRequest dataJobWriteRequest) {
- log.info("data job id for write operation is: {}", dataJobId);
+
+ log.info("Data Job ID: {} - Total operations received: {}", dataJobId, dataJobWriteRequest.data().size());
+ logJsonRepresentation("Initiating WRITE operation for Data Job ID: " + dataJobId, dataJobWriteRequest);
final Map<ProducerKey, List<DmiWriteOperation>> dmiWriteOperationsPerProducerKey =
writeRequestExaminer.splitDmiWriteOperationsFromRequest(dataJobId, dataJobWriteRequest);
- return dmiSubJobClient.sendRequestsToDmi(authorization,
- dataJobId,
- dataJobMetadata,
- dmiWriteOperationsPerProducerKey);
+ final List<SubJobWriteResponse> subJobWriteResponses = dmiSubJobClient.sendRequestsToDmi(authorization,
+ dataJobId, dataJobMetadata, dmiWriteOperationsPerProducerKey);
+
+ log.info("Data Job ID: {} - Received {} sub-job(s) from DMI.", dataJobId, subJobWriteResponses.size());
+ logJsonRepresentation("Finalized subJobWriteResponses for Data Job ID: " + dataJobId, subJobWriteResponses);
+ return subJobWriteResponses;
+ }
+
+ private void logJsonRepresentation(final String description, final Object object) {
+ if (log.isDebugEnabled()) {
+ final String objectAsJsonString = jsonObjectMapper.asJsonString(object);
+ log.debug("{} (JSON): {}", description, objectAsJsonString);
+ }
}
}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java
index bdf3785a7a..8d1d50ec15 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/CmHandleQueryServiceImpl.java
@@ -29,10 +29,10 @@ import static org.onap.cps.ncmp.impl.inventory.NcmpPersistence.NCMP_DMI_REGISTRY
import com.hazelcast.map.IMap;
import java.util.Collection;
import java.util.Collections;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
-import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import org.onap.cps.api.CpsDataService;
import org.onap.cps.api.CpsQueryService;
@@ -54,6 +54,7 @@ import org.springframework.stereotype.Component;
@Component
public class CmHandleQueryServiceImpl implements CmHandleQueryService {
private static final String ANCESTOR_CM_HANDLES = "/ancestor::cm-handles";
+ public static final String CM_HANDLE_ID = "id";
private static final String ALTERNATE_ID = "alternate-id";
private static final Integer NO_LIMIT = 0;
private final CpsDataService cpsDataService;
@@ -147,7 +148,7 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService {
@Override
public Collection<String> getAllCmHandleReferences(final boolean outputAlternateId) {
- final String attributeName = outputAlternateId ? ALTERNATE_ID : "id";
+ final String attributeName = outputAlternateId ? ALTERNATE_ID : CM_HANDLE_ID;
final String cpsPath = String.format("%s/cm-handles/@%s", NCMP_DMI_REGISTRY_PARENT, attributeName);
return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class);
}
@@ -177,20 +178,23 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService {
for (final Map.Entry<String, TrustLevel> mapEntry : trustLevelPerDmiPlugin.entrySet()) {
final String dmiPluginIdentifier = mapEntry.getKey();
final TrustLevel dmiTrustLevel = mapEntry.getValue();
- final Collection<String> candidateCmHandleIds = getCmHandleReferencesByDmiPluginIdentifier(
- dmiPluginIdentifier, false);
- final Set<String> candidateCmHandleIdsSet = new HashSet<>(candidateCmHandleIds);
+ final Map<String, String> candidateCmHandleReferences =
+ getCmHandleReferencesMapByDmiPluginIdentifier(dmiPluginIdentifier);
final Map<String, TrustLevel> trustLevelPerCmHandleIdInBatch =
- trustLevelPerCmHandleId.getAll(candidateCmHandleIdsSet);
- trustLevelPerCmHandleIdInBatch.forEach((cmHandleId, trustLevel) -> {
- final TrustLevel effectiveTrustLevel = trustLevel.getEffectiveTrustLevel(dmiTrustLevel);
- if (targetTrustLevel.equals(effectiveTrustLevel)) {
- selectedCmHandleReferences.add(cmHandleId);
+ trustLevelPerCmHandleId.getAll(candidateCmHandleReferences.keySet());
+ for (final Map.Entry<String, String> candidateCmHandleReference : candidateCmHandleReferences.entrySet()) {
+ final TrustLevel candidateCmHandleTrustLevel =
+ trustLevelPerCmHandleIdInBatch.get(candidateCmHandleReference.getKey());
+ final TrustLevel effectiveTrustlevel =
+ candidateCmHandleTrustLevel.getEffectiveTrustLevel(dmiTrustLevel);
+ if (targetTrustLevel.equals(effectiveTrustlevel)) {
+ if (outputAlternateId) {
+ selectedCmHandleReferences.add(candidateCmHandleReference.getValue());
+ } else {
+ selectedCmHandleReferences.add(candidateCmHandleReference.getKey());
+ }
}
- });
- }
- if (outputAlternateId) {
- return getAlternateIdsByCmHandleIds(selectedCmHandleReferences);
+ }
}
return selectedCmHandleReferences;
}
@@ -220,40 +224,47 @@ public class CmHandleQueryServiceImpl implements CmHandleQueryService {
private Set<String> getIdsByDmiPluginIdentifierAndDmiProperty(final String dmiPluginIdentifier,
final String dmiProperty,
final boolean outputAlternateId) {
- final String attributeName = outputAlternateId ? ALTERNATE_ID : "id";
+ final String attributeName = outputAlternateId ? ALTERNATE_ID : CM_HANDLE_ID;
final String cpsPath = String.format("%s/cm-handles[@%s='%s']/@%s",
NCMP_DMI_REGISTRY_PARENT, dmiProperty, dmiPluginIdentifier, attributeName);
return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class);
}
- private Collection<String> getAlternateIdsByCmHandleIds(final Collection<String> cmHandleIds) {
-
- final String cpsPath = NCMP_DMI_REGISTRY_PARENT + "/cm-handles["
- + createFormattedQueryString(cmHandleIds) + "]/@alternate-id";
+ private Collection<DataNode> getDataNodesByDmiPluginIdentifierAndDmiProperty(final String dmiPluginIdentifier,
+ final String dmiProperty) {
+ return cpsQueryService.queryDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR,
+ NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@" + dmiProperty + "='" + dmiPluginIdentifier + "']",
+ OMIT_DESCENDANTS);
+ }
- return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class);
+ private Map<String, String> getCmHandleReferencesMapByDmiPluginIdentifier(final String dmiPluginIdentifier) {
+ final Map<String, String> cmHandleReferencesMap = new HashMap<>();
+ for (final ModelledDmiServiceLeaves modelledDmiServiceLeaf : ModelledDmiServiceLeaves.values()) {
+ final Collection<DataNode> cmHandlesAsDataNodes = getDataNodesByDmiPluginIdentifierAndDmiProperty(
+ dmiPluginIdentifier, modelledDmiServiceLeaf.getLeafName());
+ for (final DataNode cmHandleAsDataNode : cmHandlesAsDataNodes) {
+ final String cmHandleId = cmHandleAsDataNode.getLeaves().get(CM_HANDLE_ID).toString();
+ final String alternateId = cmHandleAsDataNode.getLeaves().get(ALTERNATE_ID).toString();
+ cmHandleReferencesMap.put(cmHandleId, alternateId);
+ }
+ }
+ return cmHandleReferencesMap;
}
private Collection<String> getCmHandleReferencesByProperties(final PropertyType propertyType,
final String propertyName,
final String propertyValue,
final boolean outputAlternateId) {
- final String attributeName = outputAlternateId ? ALTERNATE_ID : "id";
+ final String attributeName = outputAlternateId ? ALTERNATE_ID : CM_HANDLE_ID;
final String cpsPath = String.format("//%s[@name='%s' and @value='%s']%s/@%s",
propertyType.getYangContainerName(), propertyName, propertyValue, ANCESTOR_CM_HANDLES, attributeName);
return cpsQueryService.queryDataLeaf(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, cpsPath, String.class);
}
- private String createFormattedQueryString(final Collection<String> cmHandleIds) {
- return cmHandleIds.stream()
- .map(cmHandleId -> "@id='" + cmHandleId + "'")
- .collect(Collectors.joining(" or "));
- }
-
private DataNode getCmHandleState(final String cmHandleId) {
cpsValidator.validateNameCharacters(cmHandleId);
final String xpath = NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@id='" + cmHandleId + "']/state";
return cpsDataService.getDataNodes(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR,
xpath, OMIT_DESCENDANTS).iterator().next();
}
-} \ No newline at end of file
+}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java
index 6bb1bfc86c..aeeb86592c 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistence.java
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2022-2025 Nordix Foundation
+ * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved.
* Modifications Copyright (C) 2023 TechMahindra Ltd.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -118,7 +118,7 @@ public interface InventoryPersistence extends NcmpPersistence {
* Get data node with the given cm handle id.
*
* @param cmHandleId cmHandle ID
- * @param fetchDescendantsOption fetchDescendantsOption
+ * @param fetchDescendantsOption fetch descendants option
* @return data node
*/
Collection<DataNode> getCmHandleDataNodeByCmHandleId(String cmHandleId,
@@ -144,9 +144,11 @@ public interface InventoryPersistence extends NcmpPersistence {
* Get collection of data nodes of given cm handles.
*
* @param cmHandleIds collection of cmHandle IDs
+ * @param fetchDescendantsOption fetch descendants option
* @return collection of data nodes
*/
- Collection<DataNode> getCmHandleDataNodes(Collection<String> cmHandleIds);
+ Collection<DataNode> getCmHandleDataNodes(Collection<String> cmHandleIds,
+ FetchDescendantsOption fetchDescendantsOption);
/**
* get CM handles that has given module names.
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java
index 02e711287e..88322903a3 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImpl.java
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2022-2025 Nordix Foundation
+ * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved.
* Modifications Copyright (C) 2022 Bell Canada
* Modifications Copyright (C) 2024 TechMahindra Ltd.
* ================================================================================
@@ -34,8 +34,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
-import java.util.stream.Stream;
import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
import org.onap.cps.api.CpsAnchorService;
import org.onap.cps.api.CpsDataService;
import org.onap.cps.api.CpsModuleService;
@@ -134,7 +134,7 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv
dataValidationException.getMessage());
}
});
- return YangDataConverter.toYangModelCmHandles(getCmHandleDataNodes(validCmHandleIds));
+ return YangDataConverter.toYangModelCmHandles(getCmHandleDataNodes(validCmHandleIds, INCLUDE_ALL_DESCENDANTS));
}
@Override
@@ -201,22 +201,22 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv
}
@Override
- public Collection<DataNode> getCmHandleDataNodes(final Collection<String> cmHandleIds) {
+ public Collection<DataNode> getCmHandleDataNodes(final Collection<String> cmHandleIds,
+ final FetchDescendantsOption fetchDescendantsOption) {
final Collection<String> xpaths = new ArrayList<>(cmHandleIds.size());
cmHandleIds.forEach(cmHandleId -> xpaths.add(getXPathForCmHandleById(cmHandleId)));
- return this.getDataNodes(xpaths);
+ return this.getDataNodes(xpaths, fetchDescendantsOption);
}
@Override
public Collection<String> getCmHandleReferencesWithGivenModules(final Collection<String> moduleNamesForQuery,
final boolean outputAlternateId) {
- if (outputAlternateId) {
- final Collection<String> cmHandleIds =
+ final Collection<String> cmHandleIds =
cpsAnchorService.queryAnchorNames(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, moduleNamesForQuery);
- return getAlternateIdsFromDataNodes(getCmHandleDataNodes(cmHandleIds));
- } else {
- return cpsAnchorService.queryAnchorNames(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, moduleNamesForQuery);
+ if (outputAlternateId) {
+ return getAlternateIdsForCmHandleIds(cmHandleIds);
}
+ return cmHandleIds;
}
@Override
@@ -241,12 +241,6 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv
NCMP_DMI_REGISTRY_PARENT + "/cm-handles[@alternate-id='", "']"));
}
- private String getCpsPathForCmHandlesByReferences(final Collection<String> cmHandleReferences) {
- return cmHandleReferences.stream()
- .flatMap(id -> Stream.of("@id='" + id + "'", "@alternate-id='" + id + "'"))
- .collect(Collectors.joining(" or ", NCMP_DMI_REGISTRY_PARENT + "/cm-handles[", "]"));
- }
-
private static String createStateJsonData(final String state) {
return "{\"state\":" + state + "}";
}
@@ -255,8 +249,13 @@ public class InventoryPersistenceImpl extends NcmpPersistenceImpl implements Inv
return "{\"cm-handles\":" + jsonObjectMapper.asJsonString(yangModelCmHandles) + "}";
}
- private Collection<String> getAlternateIdsFromDataNodes(final Collection<DataNode> dataNodes) {
- return dataNodes.stream().map(dataNode ->
- (String) dataNode.getLeaves().get("alternate-id")).collect(Collectors.toSet());
+
+ private Collection<String> getAlternateIdsForCmHandleIds(final Collection<String> cmHandleIds) {
+ final Collection<DataNode> dataNodes = getCmHandleDataNodes(cmHandleIds, OMIT_DESCENDANTS);
+ return dataNodes.stream()
+ .map(DataNode::getLeaves)
+ .map(leaves -> (String) leaves.get("alternate-id"))
+ .filter(StringUtils::isNotBlank)
+ .collect(Collectors.toSet());
}
}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java
index 6076895f0f..bafb06578e 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/ParameterizedCmHandleQueryServiceImpl.java
@@ -288,14 +288,4 @@ public class ParameterizedCmHandleQueryServiceImpl implements ParameterizedCmHan
}
}
- private Collection<String> collectCmHandleReferencesFromDataNodes(final Collection<DataNode> dataNodes,
- final boolean outputAlternateId) {
- if (outputAlternateId) {
- return dataNodes.stream().map(dataNode ->
- (String) dataNode.getLeaves().get("alternate-id")).collect(Collectors.toSet());
- } else {
- return dataNodes.stream().map(dataNode ->
- (String) dataNode.getLeaves().get("id")).collect(Collectors.toSet());
- }
- }
}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DataSyncWatchdog.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DataSyncWatchdog.java
index af78d95742..708077915b 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DataSyncWatchdog.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/DataSyncWatchdog.java
@@ -55,7 +55,8 @@ public class DataSyncWatchdog {
* Execute Cm Handle poll which queries the cm handle state in 'READY' and Operational Datastore Sync State in
* 'UNSYNCHRONIZED'.
*/
- @Scheduled(fixedDelayString = "${ncmp.timers.cm-handle-data-sync.sleep-time-ms:30000}")
+ @Scheduled(initialDelayString = "${ncmp.timers.cm-handle-data-sync.initial-delay-ms:40000}",
+ fixedDelayString = "${ncmp.timers.cm-handle-data-sync.sleep-time-ms:30000}")
public void executeUnSynchronizedReadyCmHandlePoll() {
moduleOperationsUtils.getUnsynchronizedReadyCmHandles().forEach(unSynchronizedReadyCmHandle -> {
final String cmHandleId = unSynchronizedReadyCmHandle.getId();
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdog.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdog.java
index 8c9ec03dd9..62eb514953 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdog.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/impl/inventory/sync/ModuleSyncWatchdog.java
@@ -53,7 +53,7 @@ public class ModuleSyncWatchdog {
* This method will only finish when there are no more 'ADVISED' cm handles in the DB.
* This method is triggered on a configurable interval (ncmp.timers.advised-modules-sync.sleep-time-ms)
*/
- @Scheduled(initialDelayString = "${test.ncmp.timers.advised-modules-sync.initial-delay-ms:0}",
+ @Scheduled(initialDelayString = "${ncmp.timers.advised-modules-sync.initial-delay-ms:40000}",
fixedDelayString = "${ncmp.timers.advised-modules-sync.sleep-time-ms:5000}")
public void moduleSyncAdvisedCmHandles() {
log.debug("Processing module sync watchdog waking up.");
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy
index 4b536b9710..9f0e134466 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/datajobs/DataJobServiceImplSpec.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2024 Nordix Foundation
+ * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -29,6 +29,7 @@ import org.onap.cps.ncmp.api.datajobs.models.DataJobReadRequest
import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest
import org.onap.cps.ncmp.api.datajobs.models.ReadOperation
import org.onap.cps.ncmp.api.datajobs.models.WriteOperation
+import org.onap.cps.utils.JsonObjectMapper
import org.slf4j.LoggerFactory
import spock.lang.Specification
@@ -36,8 +37,9 @@ class DataJobServiceImplSpec extends Specification {
def mockWriteRequestExaminer = Mock(WriteRequestExaminer)
def mockDmiSubJobRequestHandler = Mock(DmiSubJobRequestHandler)
+ def mockJsonObjectMapper = Mock(JsonObjectMapper)
- def objectUnderTest = new DataJobServiceImpl(mockDmiSubJobRequestHandler, mockWriteRequestExaminer)
+ def objectUnderTest = new DataJobServiceImpl(mockDmiSubJobRequestHandler, mockWriteRequestExaminer, mockJsonObjectMapper)
def myDataJobMetadata = new DataJobMetadata('', '', '')
def authorization = 'my authorization header'
@@ -45,7 +47,7 @@ class DataJobServiceImplSpec extends Specification {
def logger = Spy(ListAppender<ILoggingEvent>)
def setup() {
- setupLogger()
+ setupLogger(Level.DEBUG)
}
def cleanup() {
@@ -62,22 +64,32 @@ class DataJobServiceImplSpec extends Specification {
assert loggingEvent.formattedMessage.contains('data job id for read operation is: my-job-id')
}
- def 'Write data-job request.'() {
+ def 'Write data-job request and verify logging when info enabled.'() {
given: 'data job metadata and write request'
def dataJobWriteRequest = new DataJobWriteRequest([new WriteOperation('', '', '', null)])
- and: 'a map of producer key and dmi 3gpp write operation'
+ and: 'a map of producer key and DMI 3GPP write operations'
def dmiWriteOperationsPerProducerKey = [:]
- when: 'write data job request is processed'
+ and: 'mocking the splitDmiWriteOperationsFromRequest method to return the expected data'
+ mockWriteRequestExaminer.splitDmiWriteOperationsFromRequest(_, _) >> dmiWriteOperationsPerProducerKey
+ and: 'mocking the sendRequestsToDmi method to simulate empty sub-job responses from the DMI request handler'
+ mockDmiSubJobRequestHandler.sendRequestsToDmi(authorization, 'my-job-id', myDataJobMetadata, dmiWriteOperationsPerProducerKey) >> []
+ when: 'the write data job request is processed'
objectUnderTest.writeDataJob(authorization, 'my-job-id', myDataJobMetadata, dataJobWriteRequest)
then: 'the examiner service is called and a map is returned'
1 * mockWriteRequestExaminer.splitDmiWriteOperationsFromRequest('my-job-id', dataJobWriteRequest) >> dmiWriteOperationsPerProducerKey
- and: 'the dmi request handler is called with the result from the examiner'
- 1 * mockDmiSubJobRequestHandler.sendRequestsToDmi(authorization, 'my-job-id', myDataJobMetadata, dmiWriteOperationsPerProducerKey)
+ and: 'write operation details are logged at debug level'
+ with(logger.list.find { it.level == Level.DEBUG }) {
+ assert it.formattedMessage.contains("Initiating WRITE operation for Data Job ID: my-job-id")
+ }
+ and: 'number of operations are logged at info level'
+ with(logger.list.find { it.level == Level.INFO }) {
+ assert it.formattedMessage.contains("Data Job ID: my-job-id - Total operations received: 1")
+ }
}
- def setupLogger() {
+ def setupLogger(Level level) {
def setupLogger = ((Logger) LoggerFactory.getLogger(DataJobServiceImpl.class))
- setupLogger.setLevel(Level.DEBUG)
+ setupLogger.setLevel(level)
setupLogger.addAppender(logger)
logger.start()
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy
index 2b0997b523..bc21360c47 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/impl/inventory/InventoryPersistenceImplSpec.groovy
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2022-2025 Nordix Foundation
+ * Copyright (C) 2022-2025 OpenInfra Foundation Europe. All rights reserved.
* Modifications Copyright (C) 2022 Bell Canada
* Modifications Copyright (C) 2024 TechMahindra Ltd.
* ================================================================================
@@ -336,15 +336,15 @@ class InventoryPersistenceImplSpec extends Specification {
}
def 'Get Alternate Ids for CM Handles that has given module names'() {
- given: 'A Collection of data nodes'
- def dataNodes = [new DataNode(xpath: "/dmi-registry/cm-handles[@id='ch-1']", leaves: ['id': 'ch-1', 'alternate-id': 'alt-1'])]
- when: 'the methods to get dataNodes is called and returns correct values'
+ given: 'cps anchor service returns a CM-handle ID for the given module name'
mockCpsAnchorService.queryAnchorNames(NFP_OPERATIONAL_DATASTORE_DATASPACE_NAME, ['sample-module-name']) >> ['ch-1']
- mockCpsDataService.getDataNodesForMultipleXpaths(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, ["/dmi-registry/cm-handles[@id='ch-1']"], INCLUDE_ALL_DESCENDANTS) >> dataNodes
- and: 'the method returns a result'
+ and: 'cps data service returns some data nodes for the given CM-handle ID'
+ def dataNodes = [new DataNode(xpath: "/dmi-registry/cm-handles[@id='ch-1']", leaves: ['id': 'ch-1', 'alternate-id': 'alt-1'])]
+ mockCpsDataService.getDataNodesForMultipleXpaths(NCMP_DATASPACE_NAME, NCMP_DMI_REGISTRY_ANCHOR, ["/dmi-registry/cm-handles[@id='ch-1']"], OMIT_DESCENDANTS) >> dataNodes
+ when: 'the method to get cm-handle references by modules is called (outputting alternate IDs)'
def result = objectUnderTest.getCmHandleReferencesWithGivenModules(['sample-module-name'], true)
then: 'the result contains the correct alternate Id'
- assert result == ['alt-1'] as HashSet
+ assert result == ['alt-1'] as Set
}
def 'Replace list content'() {
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/MessagingBaseSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/MessagingBaseSpec.groovy
index d38d5442f2..ab6c3fddbf 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/MessagingBaseSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/utils/events/MessagingBaseSpec.groovy
@@ -44,7 +44,7 @@ class MessagingBaseSpec extends Specification {
static kafkaTestContainer = new ConfluentKafkaContainer("confluentinc/cp-kafka:7.8.0")
- def legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, String>(eventProducerConfigProperties(JsonSerializer)))
+ def legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, ?>(eventProducerConfigProperties(JsonSerializer)))
def cloudEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, CloudEvent>(eventProducerConfigProperties(CloudEventSerializer)))
diff --git a/cps-parent/pom.xml b/cps-parent/pom.xml
index 6ae7c3cc55..c39336df96 100644
--- a/cps-parent/pom.xml
+++ b/cps-parent/pom.xml
@@ -60,7 +60,7 @@
<sonar.version>4.0.0.4121</sonar.version>
<spotbugs.plugin.version>4.8.6.4</spotbugs.plugin.version>
<spotbugs.version>4.8.6</spotbugs.version>
- <spring.boot.maven.plugin.version>3.4.1</spring.boot.maven.plugin.version>
+ <spring.boot.maven.plugin.version>3.4.4</spring.boot.maven.plugin.version>
<swagger.codegen.version>1.2.1</swagger.codegen.version>
<!-- Reporting paths and coverage -->
diff --git a/docs/deployment.rst b/docs/deployment.rst
index 840ab8e116..b3a279f92b 100644
--- a/docs/deployment.rst
+++ b/docs/deployment.rst
@@ -192,100 +192,97 @@ To get a listing of the cps-core Pods, run the following command:
Additional CPS-Core Customizations
==================================
-The following table lists some properties that can be specified as Helm chart
-values to configure the application to be deployed. This list is not exhaustive.
-
-Any spring supported property can be configured by providing in ``config.additional.<spring-supported-property-name>: value`` Example: config.additional.spring.datasource.hikari.maximumPoolSize: 30
+The following table lists some properties that can be configured in the deployment. This list is not exhaustive.
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
| Property | Description | Default Value |
+===========================================+=========================================================================================================+===============================+
-| config.appUserName | User name used by cps-core service to configure the authentication for REST API it exposes. | ``cpsuser`` |
+| appUserName | User name used by cps-core service to configure the authentication for REST API it exposes. | ``cpsuser`` |
| | This is the user name to be used by cps-core REST clients to authenticate themselves. | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.appUserPassword | Password used by cps-core service to configure the authentication for REST API it exposes. | Not defined |
+| appUserPassword | Password used by cps-core service to configure the authentication for REST API it exposes. | Not defined |
| | If not defined, the password is generated when deploying the application. | |
| | See also :ref:`cps_common_credentials_retrieval`. | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| postgres.config.pgUserName | Internal user name used by cps-core to connect to its own database. | ``cps`` |
+| postgres.pgUserName | Internal user name used by cps-core to connect to its own database. | ``cps`` |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| postgres.config.pgUserPassword | Internal password used by cps-core to connect to its own database. | Not defined |
+| postgres.pgUserPassword | Internal password used by cps-core to connect to its own database. | Not defined |
| | If not defined, the password is generated when deploying the application. | |
| | See also :ref:`cps_common_credentials_retrieval`. | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| postgres.config.pgDatabase | Database name used by cps-core | ``cpsdb`` |
+| postgres.pgDatabase | Database name used by cps-core | ``cpsdb`` |
| | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
| logging.level | Logging level set in cps-core | info |
| | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.useStrimziKafka | If targeting a custom kafka cluster, i.e. useStrimziKafka: false, the | true |
-| | config.eventPublisher.spring.kafka values below must be set. | |
+| useStrimziKafka | If targeting a custom kafka cluster, i.e. useStrimziKafka: false, the | true |
+| | eventPublisher.spring.kafka values below must be set. | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.eventPublisher. | Kafka hostname and port | ``<kafka-bootstrap>:9092`` |
+| eventPublisher. | Kafka hostname and port | ``<kafka-bootstrap>:9092`` |
| spring.kafka.bootstrap-servers | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.eventPublisher. | Kafka consumer client id | ``cps-core`` |
+| eventPublisher. | Kafka consumer client id | ``cps-core`` |
| spring.kafka.consumer.client-id | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.eventPublisher. | Kafka security protocol. | ``SASL_PLAINTEXT`` |
+| eventPublisher. | Kafka security protocol. | ``SASL_PLAINTEXT`` |
| spring.kafka.security.protocol | Some possible values are: | |
| | * ``PLAINTEXT`` | |
| | * ``SASL_PLAINTEXT``, for authentication | |
| | * ``SASL_SSL``, for authentication and encryption | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.eventPublisher. | Kafka security SASL mechanism. Required for SASL_PLAINTEXT and SASL_SSL protocols. | Not defined |
+| eventPublisher. | Kafka security SASL mechanism. Required for SASL_PLAINTEXT and SASL_SSL protocols. | Not defined |
| spring.kafka.properties. | Some possible values are: | |
| sasl.mechanism | * ``PLAIN``, for PLAINTEXT | |
| | * ``SCRAM-SHA-512``, for SSL | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.eventPublisher. | Kafka security SASL JAAS configuration. Required for SASL_PLAINTEXT and SASL_SSL protocols. | Not defined |
+| eventPublisher. | Kafka security SASL JAAS configuration. Required for SASL_PLAINTEXT and SASL_SSL protocols. | Not defined |
| spring.kafka.properties. | Some possible values are: | |
| sasl.jaas.config | * ``org.apache.kafka.common.security.plain.PlainLoginModule required username="..." password="...";``, | |
| | for PLAINTEXT | |
| | * ``org.apache.kafka.common.security.scram.ScramLoginModule required username="..." password="...";``, | |
| | for SSL | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.eventPublisher. | Kafka security SASL SSL store type. Required for SASL_SSL protocol. | Not defined |
+| eventPublisher. | Kafka security SASL SSL store type. Required for SASL_SSL protocol. | Not defined |
| spring.kafka.ssl.trust-store-type | Some possible values are: | |
| | * ``JKS`` | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.eventPublisher. | Kafka security SASL SSL store file location. Required for SASL_SSL protocol. | Not defined |
+| eventPublisher. | Kafka security SASL SSL store file location. Required for SASL_SSL protocol. | Not defined |
| spring.kafka.ssl.trust-store-location | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.eventPublisher. | Kafka security SASL SSL store password. Required for SASL_SSL protocol. | Not defined |
+| eventPublisher. | Kafka security SASL SSL store password. Required for SASL_SSL protocol. | Not defined |
| spring.kafka.ssl.trust-store-password | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.eventPublisher. | Kafka security SASL SSL broker hostname identification verification. Required for SASL_SSL protocol. | Not defined |
+| eventPublisher. | Kafka security SASL SSL broker hostname identification verification. Required for SASL_SSL protocol. | Not defined |
| spring.kafka.properties. | Possible value is: | |
| ssl.endpoint.identification.algorithm | | |
| | * ``""``, empty string to disable | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.additional. | Core pool size in asynchronous execution of notification. | ``2`` |
+| additional. | Core pool size in asynchronous execution of notification. | ``2`` |
| notification.async.executor. | | |
| core-pool-size | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.additional. | Max pool size in asynchronous execution of notification. | ``1`` |
+| additional. | Max pool size in asynchronous execution of notification. | ``1`` |
| notification.async.executor. | | |
| max-pool-size | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.additional. | Queue Capacity in asynchronous execution of notification. | ``500`` |
+| additional. | Queue Capacity in asynchronous execution of notification. | ``500`` |
| notification.async.executor. | | |
| queue-capacity | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.additional. | If the executor should wait for the tasks to be completed on shutdown | ``true`` |
+| additional. | If the executor should wait for the tasks to be completed on shutdown | ``true`` |
| notification.async.executor. | | |
| wait-for-tasks-to-complete-on-shutdown | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.additional. | Prefix to be added to the thread name in asynchronous execution of notifications. | ``Async-`` |
+| additional. | Prefix to be added to the thread name in asynchronous execution of notifications. | ``Async-`` |
| notification.async.executor. | | |
| thread-name-prefix | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.additional. | Maximum time allowed by the thread pool executor for execution of one of the threads in milliseconds. | ``60000`` |
+| additional. | Maximum time allowed by the thread pool executor for execution of one of the threads in milliseconds. | ``60000`` |
| notification.async.executor. | | |
| time-out-value-in-ms | | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
-| config.additional. | Specifies number of database connections between database and application. | ``10`` |
+| additional. | Specifies number of database connections between database and application. | ``10`` |
| spring.datasource.hikari. | This property controls the maximum size that the pool is allowed to reach, | |
| maximumPoolSize | including both idle and in-use connections. | |
+-------------------------------------------+---------------------------------------------------------------------------------------------------------+-------------------------------+
@@ -298,72 +295,79 @@ Additional CPS-NCMP Customizations
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
| Property | Description | Default Value |
+=================================================+=======================================================================================+=================================+
-| config.dmiPluginUserName | User name used by cps-core to authenticate themselves for using ncmp-dmi-plugin | ``dmiuser`` |
+| dmiPluginUserName | User name used by cps-core to authenticate themselves for using ncmp-dmi-plugin | ``dmiuser`` |
| | service. | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.dmiPluginUserPassword | Internal password used by cps-core to connect to ncmp-dmi-plugin service. | Not defined |
+| dmiPluginUserPassword | Internal password used by cps-core to connect to ncmp-dmi-plugin service. | Not defined |
| | If not defined, the password is generated when deploying the application. | |
| | See also :ref:`cps_common_credentials_retrieval`. | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.ncmp.timers | Specifies the delay in milliseconds in which the module sync watch dog will wake again| ``5000`` |
+| ncmp.timers | Specifies the delay in milliseconds in which the module sync watch dog will wake again| ``5000`` |
| .advised-modules-sync.sleep-time-ms | after finishing. | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.ncmp.timers | Specifies the delay in milliseconds in which the data sync watch dog will wake again | ``30000`` |
+| ncmp.timers | Specifies the delay in milliseconds in which the module sync watch dog will wake up | ``40000`` |
+| .advised-modules-sync.initial-delay-ms | for the first time. | |
++-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
+| ncmp.timers | Specifies the delay in milliseconds in which the data sync watch dog will wake again | ``30000`` |
| .cm-handle-data-sync.sleep-time-ms | after finishing. | |
| | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp | Maximum size (in MB) of the in-memory buffer for HTTP response data. | ``16`` |
+| ncmp.timers | Specifies the delay in milliseconds in which the data sync watch dog will wake up | ``40000`` |
+| .cm-handle-data-sync.initial-delay-ms | for the first time. | |
+| | | |
++-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
+| additional.ncmp | Maximum size (in MB) of the in-memory buffer for HTTP response data. | ``16`` |
| .[app] | | |
| .httpclient | | |
| .[services] | | |
| .maximumInMemorySizeInMegabytes | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp | Maximum number of simultaneous connections allowed in the connection pool. | ``100`` |
+| additional.ncmp | Maximum number of simultaneous connections allowed in the connection pool. | ``100`` |
| .[app] | | |
| .httpclient | | |
| .[services] | | |
| .maximumConnectionsTotal | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp | Maximum number of pending requests when the connection pool is full. | ``50`` |
+| additional.ncmp | Maximum number of pending requests when the connection pool is full. | ``50`` |
| .[app] | | |
| .httpclient | | |
| .[services] | | |
| .pendingAcquireMaxCount | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp | Specifies the maximum time in seconds, to wait for establishing a connection for the | ``30`` |
+| additional.ncmp | Specifies the maximum time in seconds, to wait for establishing a connection for the | ``30`` |
| .[app] | HTTP Client. | |
| .httpclient | | |
| .[services] | | |
| .connectionTimeoutInSeconds | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp | Timeout (in seconds) for reading data from the server after the connection is | ``30`` |
+| additional.ncmp | Timeout (in seconds) for reading data from the server after the connection is | ``30`` |
| .[app] | established. | |
| .httpclient | | |
| .[services] | | |
| .readTimeoutInSeconds | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp | Timeout (in seconds) for writing data to the server. | ``30`` |
+| additional.ncmp | Timeout (in seconds) for writing data to the server. | ``30`` |
| .[app] | | |
| .httpclient | | |
| .[services] | | |
| .writeTimeoutInSeconds | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp | Total timeout (in seconds) for receiving a complete response, including all processing| ``60`` |
+| additional.ncmp | Total timeout (in seconds) for receiving a complete response, including all processing| ``60`` |
| .[app] | stages. | |
| .httpclient | | |
| .[services] | | |
| .responseTimeoutInSeconds | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp.policy-executor | Enables or disables the policy-executor feature. | ``false`` |
+| additional.ncmp.policy-executor | Enables or disables the policy-executor feature. | ``false`` |
| .enabled | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp.policy-executor | The default (fallback) decision in case a problem with the external service occurs. | ``allow`` |
+| additional.ncmp.policy-executor | The default (fallback) decision in case a problem with the external service occurs. | ``allow`` |
| .defaultDecision | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp.policy-executor | The server address for the external policy executor service. | ``http://policy-executor-stub`` |
+| additional.ncmp.policy-executor | The server address for the external policy executor service. | ``http://policy-executor-stub`` |
| .server.address | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
-| config.additional.ncmp.policy-executor | The port used for the external policy executor service. | ``8093`` |
+| additional.ncmp.policy-executor | The port used for the external policy executor service. | ``8093`` |
| .server.port | | |
+-------------------------------------------------+---------------------------------------------------------------------------------------+---------------------------------+
diff --git a/docs/release-notes.rst b/docs/release-notes.rst
index 376009d06a..29b8844e64 100644
--- a/docs/release-notes.rst
+++ b/docs/release-notes.rst
@@ -38,6 +38,7 @@ Release Data
Features
--------
- `CPS-2416 <https://lf-onap.atlassian.net/browse/CPS-2416>`_ Implement XPath Attribute Axis in CPS
+ - `CPS-2712 <https://lf-onap.atlassian.net/browse/CPS-2712>`_ Use Flux streaming/buffering for CM-handle searches
Version: 3.6.1
==============
diff --git a/integration-test/src/test/resources/application-module-sync-delayed.yml b/integration-test/src/test/resources/application-module-sync-delayed.yml
index 7b9c6aea4f..27c99e93b2 100644
--- a/integration-test/src/test/resources/application-module-sync-delayed.yml
+++ b/integration-test/src/test/resources/application-module-sync-delayed.yml
@@ -1,3 +1,4 @@
+
# ============LICENSE_START=======================================================
# Copyright (C) 2024 Nordix Foundation.
# ================================================================================
@@ -14,7 +15,6 @@
#
# SPDX-License-Identifier: Apache-2.0
# ============LICENSE_END=========================================================
-
test:
ncmp:
timers:
diff --git a/integration-test/src/test/resources/application.yml b/integration-test/src/test/resources/application.yml
index e213a70a59..024ff5b33f 100644
--- a/integration-test/src/test/resources/application.yml
+++ b/integration-test/src/test/resources/application.yml
@@ -180,6 +180,7 @@ ncmp:
timers:
advised-modules-sync:
+ initial-delay-ms: 0
sleep-time-ms: 1000000
cm-handle-data-sync:
sleep-time-ms: 30000
diff --git a/k6-tests/ncmp/common/passthrough-crud.js b/k6-tests/ncmp/common/passthrough-crud.js
index eed1ab5190..c6732571ba 100644
--- a/k6-tests/ncmp/common/passthrough-crud.js
+++ b/k6-tests/ncmp/common/passthrough-crud.js
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2024 Nordix Foundation
+ * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,13 +18,12 @@
* ============LICENSE_END=========================================================
*/
-import { randomIntBetween } from 'https://jslib.k6.io/k6-utils/1.2.0/index.js';
import {
performPostRequest,
performGetRequest,
NCMP_BASE_URL,
LEGACY_BATCH_TOPIC_NAME,
- TOTAL_CM_HANDLES,
+ getRandomCmHandleReference,
} from './utils.js';
export function passthroughRead(useAlternateId) {
@@ -66,11 +65,6 @@ export function legacyBatchRead(cmHandleIds) {
return performPostRequest(url, payload, 'batchRead');
}
-function getRandomCmHandleReference(useAlternateId) {
- const prefix = useAlternateId ? 'Region=NorthAmerica,Segment=' : 'ch-';
- return `${prefix}${randomIntBetween(1, TOTAL_CM_HANDLES)}`;
-}
-
function generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants) {
const descendantsParam = includeDescendants ? `&include-descendants=${includeDescendants}` : '';
return `${NCMP_BASE_URL}/ncmp/v1/ch/${cmHandleReference}/data/ds/${datastoreName}?resourceIdentifier=${resourceIdentifier}${descendantsParam}`;
diff --git a/k6-tests/ncmp/common/search-base.js b/k6-tests/ncmp/common/search-base.js
index af7d153416..91369e818f 100644
--- a/k6-tests/ncmp/common/search-base.js
+++ b/k6-tests/ncmp/common/search-base.js
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2024 Nordix Foundation
+ * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -31,7 +31,7 @@ export function executeCmHandleIdSearch(scenario) {
function executeSearchRequest(searchType, scenario) {
const searchParameters = SEARCH_PARAMETERS_PER_SCENARIO[scenario];
const payload = JSON.stringify(searchParameters);
- const url = `${NCMP_BASE_URL}/ncmp/v1/ch/${searchType}`;
+ const url = `${NCMP_BASE_URL}/ncmp/v1/ch/${searchType}?outputAlternateId=true`;
return performPostRequest(url, payload, searchType);
}
diff --git a/k6-tests/ncmp/common/utils.js b/k6-tests/ncmp/common/utils.js
index 36ce6b48f3..ea77aae176 100644
--- a/k6-tests/ncmp/common/utils.js
+++ b/k6-tests/ncmp/common/utils.js
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2024-2025 Nordix Foundation
+ * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,6 +18,7 @@
* ============LICENSE_END=========================================================
*/
+import { randomIntBetween } from 'https://jslib.k6.io/k6-utils/1.2.0/index.js';
import http from 'k6/http';
export const testConfig = JSON.parse(open(`../config/${__ENV.TEST_PROFILE}.json`));
@@ -48,16 +49,29 @@ export function makeBatchOfCmHandleIds(batchSize, batchNumber) {
}
/**
- * Generates an unordered batch of CM-handle IDs based on batch size.
- * @returns {string[]} Array of CM-handle IDs, for example ['ch-8', 'ch-2' ... 'ch-32432']
+ * Generates an unordered batch of Alternate IDs.
+ * The batch size is determined by `LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE`,
+ * and the IDs are generated within the range of `TOTAL_CM_HANDLES`.
+ *
+ * @returns {string[]} Array of Alternate IDs, for example,
+ * ['Region=NorthAmerica,Segment=8', 'Region=NorthAmerica,Segment=2' ... 'Region=NorthAmerica,Segment=32432']
*/
-export function makeRandomBatchOfCmHandleIds() {
- const cmHandleIds = new Set();
- while (cmHandleIds.size < LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE) {
- const randomNum = Math.floor(Math.random() * TOTAL_CM_HANDLES) + 1;
- cmHandleIds.add('ch-' + randomNum);
+export function makeRandomBatchOfAlternateIds() {
+ const alternateIds = new Set();
+ while (alternateIds.size < LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE) {
+ alternateIds.add(getRandomCmHandleReference(true));
}
- return Array.from(cmHandleIds)
+ return Array.from(alternateIds)
+}
+
+/**
+ * Generates a random CM Handle reference based on the provided flag.
+ * @param useAlternateId
+ * @returns {string} CM Handle reference representing a CM handle ID or an alternate ID.
+ */
+export function getRandomCmHandleReference(useAlternateId) {
+ const prefix = useAlternateId ? 'Region=NorthAmerica,Segment=' : 'ch-';
+ return `${prefix}${randomIntBetween(1, TOTAL_CM_HANDLES)}`;
}
/**
diff --git a/k6-tests/ncmp/ncmp-test-runner.js b/k6-tests/ncmp/ncmp-test-runner.js
index b8fccdd69c..1c53139991 100644
--- a/k6-tests/ncmp/ncmp-test-runner.js
+++ b/k6-tests/ncmp/ncmp-test-runner.js
@@ -1,6 +1,6 @@
/*
* ============LICENSE_START=======================================================
- * Copyright (C) 2024-2025 Nordix Foundation
+ * Copyright (C) 2024-2025 OpenInfra Foundation Europe. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -23,7 +23,7 @@ import { Trend } from 'k6/metrics';
import { Reader } from 'k6/x/kafka';
import {
TOTAL_CM_HANDLES, READ_DATA_FOR_CM_HANDLE_DELAY_MS, WRITE_DATA_FOR_CM_HANDLE_DELAY_MS,
- makeCustomSummaryReport, makeBatchOfCmHandleIds, makeRandomBatchOfCmHandleIds,
+ makeCustomSummaryReport, makeBatchOfCmHandleIds, makeRandomBatchOfAlternateIds,
LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE, REGISTRATION_BATCH_SIZE, LEGACY_BATCH_THROUGHPUT_TEST_NUMBER_OF_REQUESTS,
KAFKA_BOOTSTRAP_SERVERS, LEGACY_BATCH_TOPIC_NAME, CONTAINER_UP_TIME_IN_SECONDS, testConfig
} from './common/utils.js';
@@ -119,7 +119,7 @@ export function passthroughWriteAltIdScenario() {
export function cmHandleIdSearchNoFilterScenario() {
const response = executeCmHandleIdSearch('no-filter');
if (check(response, { 'CM handle ID no-filter search status equals 200': (r) => r.status === 200 })
- && check(response, { 'CM handle ID no-filter search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) {
+ && check(response, { 'CM handle ID no-filter search returned the correct number of ids': (r) => r.json('#') === TOTAL_CM_HANDLES })) {
idSearchNoFilterDurationTrend.add(response.timings.duration);
}
}
@@ -135,7 +135,7 @@ export function cmHandleSearchNoFilterScenario() {
export function cmHandleIdSearchModuleScenario() {
const response = executeCmHandleIdSearch('module');
if (check(response, { 'CM handle ID module search status equals 200': (r) => r.status === 200 })
- && check(response, { 'CM handle ID module search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) {
+ && check(response, { 'CM handle ID module search returned the correct number of ids': (r) => r.json('#') === TOTAL_CM_HANDLES })) {
idSearchModuleDurationTrend.add(response.timings.duration);
}
}
@@ -151,7 +151,7 @@ export function cmHandleSearchModuleScenario() {
export function cmHandleIdSearchPropertyScenario() {
const response = executeCmHandleIdSearch('property');
if (check(response, { 'CM handle ID property search status equals 200': (r) => r.status === 200 })
- && check(response, { 'CM handle ID property search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) {
+ && check(response, { 'CM handle ID property search returned the correct number of ids': (r) => r.json('#') === TOTAL_CM_HANDLES })) {
idSearchPropertyDurationTrend.add(response.timings.duration);
}
}
@@ -167,7 +167,7 @@ export function cmHandleSearchPropertyScenario() {
export function cmHandleIdSearchCpsPathScenario() {
const response = executeCmHandleIdSearch('cps-path-for-ready-cm-handles');
if (check(response, { 'CM handle ID cps path search status equals 200': (r) => r.status === 200 })
- && check(response, { 'CM handle ID cps path search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) {
+ && check(response, { 'CM handle ID cps path search returned the correct number of ids': (r) => r.json('#') === TOTAL_CM_HANDLES })) {
idSearchCpsPathDurationTrend.add(response.timings.duration);
}
}
@@ -183,7 +183,7 @@ export function cmHandleSearchCpsPathScenario() {
export function cmHandleIdSearchTrustLevelScenario() {
const response = executeCmHandleIdSearch('trust-level');
if (check(response, { 'CM handle ID trust level search status equals 200': (r) => r.status === 200 })
- && check(response, { 'CM handle ID trust level search returned expected CM-handles': (r) => r.json('#') === TOTAL_CM_HANDLES })) {
+ && check(response, { 'CM handle ID trust level search returned the correct number of cm handle references': (r) => r.json('#') === TOTAL_CM_HANDLES })) {
idSearchTrustLevelDurationTrend.add(response.timings.duration);
}
}
@@ -197,8 +197,8 @@ export function cmHandleSearchTrustLevelScenario() {
}
export function legacyBatchProduceScenario() {
- const nextBatchOfCmHandleIds = makeRandomBatchOfCmHandleIds();
- const response = legacyBatchRead(nextBatchOfCmHandleIds);
+ const nextBatchOfAlternateIds = makeRandomBatchOfAlternateIds();
+ const response = legacyBatchRead(nextBatchOfAlternateIds);
check(response, { 'data operation batch read status equals 200': (r) => r.status === 200 });
}