summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java6
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/AsyncRestRequestResponseEventConsumer.java (renamed from cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java)8
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumer.java (renamed from cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumer.java)8
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java55
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java77
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java39
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java33
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy6
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumerSpec.groovy (renamed from cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerSpec.groovy)6
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy (renamed from cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerIntegrationSpec.groovy)56
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy2
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy33
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy7
-rw-r--r--cps-ncmp-service/src/test/resources/application.yml3
-rw-r--r--cps-rest/src/test/groovy/org/onap/cps/rest/utils/MultipartFileUtilSpec.groovy37
-rwxr-xr-xcps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java6
-rw-r--r--cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy74
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy158
-rw-r--r--integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy8
19 files changed, 379 insertions, 243 deletions
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java
index 9f7ef1e88..42d813505 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java
@@ -25,8 +25,10 @@ import lombok.Getter;
@Getter
public enum NcmpEventResponseCode {
- CODE_100("100", "cm handle id(s) not found"),
- CODE_101("101", "cm handle(s) not ready");
+ CM_HANDLES_NOT_FOUND("100", "cm handle id(s) not found"),
+ CM_HANDLES_NOT_READY("101", "cm handle(s) not ready"),
+ DMI_SERVICE_NOT_RESPONDING("102", "dmi plugin service is not responding"),
+ UNABLE_TO_READ_RESOURCE_DATA("103", "dmi plugin service is not able to read resource data");
private final String statusCode;
private final String statusMessage;
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/AsyncRestRequestResponseEventConsumer.java
index 0ac0fb92d..0044182dd 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/AsyncRestRequestResponseEventConsumer.java
@@ -36,7 +36,7 @@ import org.springframework.stereotype.Component;
@Slf4j
@RequiredArgsConstructor
@ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true)
-public class NcmpAsyncRequestResponseEventConsumer {
+public class AsyncRestRequestResponseEventConsumer {
private final EventsPublisher<NcmpAsyncRequestResponseEvent> eventsPublisher;
private final NcmpAsyncRequestResponseEventMapper ncmpAsyncRequestResponseEventMapper;
@@ -48,13 +48,15 @@ public class NcmpAsyncRequestResponseEventConsumer {
*/
@KafkaListener(
topics = "${app.ncmp.async-m2m.topic}",
+ filter = "includeNonCloudEventsOnly",
+ groupId = "ncmp-async-rest-request-event-group",
properties = {"spring.json.value.default.type=org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent"})
public void consumeAndForward(final DmiAsyncRequestResponseEvent dmiAsyncRequestResponseEvent) {
log.debug("Consuming event {} ...", dmiAsyncRequestResponseEvent);
-
final NcmpAsyncRequestResponseEvent ncmpAsyncRequestResponseEvent =
ncmpAsyncRequestResponseEventMapper.toNcmpAsyncEvent(dmiAsyncRequestResponseEvent);
eventsPublisher.publishEvent(ncmpAsyncRequestResponseEvent.getEventTarget(),
- ncmpAsyncRequestResponseEvent.getEventId(), ncmpAsyncRequestResponseEvent);
+ ncmpAsyncRequestResponseEvent.getEventId(),
+ ncmpAsyncRequestResponseEvent);
}
}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumer.java
index 4a0ec5c49..9649b0109 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumer.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumer.java
@@ -37,13 +37,13 @@ import org.springframework.stereotype.Component;
@Slf4j
@RequiredArgsConstructor
@ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true)
-public class NcmpAsyncDataOperationEventConsumer {
+public class DataOperationEventConsumer {
private final EventsPublisher<CloudEvent> eventsPublisher;
/**
- * Consume the DataOperationResponseEvent published by producer to topic 'async-m2m.topic'
- * and publish the same to the client specified topic.
+ * Consume the DataOperation cloud event published by producer to topic 'async-m2m.topic'
+ * and publish the same to client specified topic.
*
* @param dataOperationEventConsumerRecord consuming event as a ConsumerRecord.
*/
@@ -51,7 +51,7 @@ public class NcmpAsyncDataOperationEventConsumer {
topics = "${app.ncmp.async-m2m.topic}",
filter = "includeDataOperationEventsOnly",
groupId = "ncmp-data-operation-event-group",
- properties = {"spring.json.value.default.type=org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent"})
+ containerFactory = "cloudEventConcurrentKafkaListenerContainerFactory")
public void consumeAndPublish(final ConsumerRecord<String, CloudEvent> dataOperationEventConsumerRecord) {
log.info("Consuming event payload {} ...", dataOperationEventConsumerRecord.value());
final String eventTarget = KafkaHeaders.getParsedKafkaHeader(
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java
deleted file mode 100644
index 76cc0c4b7..000000000
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2023 Nordix Foundation
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.ncmp.api.impl.async;
-
-import io.cloudevents.CloudEvent;
-import io.cloudevents.kafka.impl.KafkaHeaders;
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.kafka.listener.adapter.RecordFilterStrategy;
-
-/**
- * Data operation record filter strategy, which helps to filter the consumer records.
- *
- */
-@Configuration
-@Slf4j
-public class DataOperationRecordFilterStrategy {
-
- /**
- * Filtering the consumer records based on the eventType header, It
- * returns boolean, true means filter the consumer record and false
- * means not filter the consumer record.
- * @return boolean value.
- */
- @Bean
- public RecordFilterStrategy<String, CloudEvent> includeDataOperationEventsOnly() {
- return consumedRecord -> {
- final String eventTypeHeaderValue = KafkaHeaders.getParsedKafkaHeader(consumedRecord.headers(), "ce_type");
- if (eventTypeHeaderValue == null) {
- log.trace("No ce_type header found, possibly a legacy event (ignored)");
- return true;
- }
- return !(eventTypeHeaderValue.contains("DataOperationEvent"));
- };
- }
-}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java
new file mode 100644
index 000000000..040479040
--- /dev/null
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java
@@ -0,0 +1,77 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.ncmp.api.impl.async;
+
+import io.cloudevents.CloudEvent;
+import io.cloudevents.kafka.impl.KafkaHeaders;
+import java.io.Serializable;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.common.header.Headers;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.kafka.listener.adapter.RecordFilterStrategy;
+
+/**
+ * Record filter strategies, which helps to filter the consumer records based on some conditions.
+ *
+ */
+@Configuration
+@Slf4j
+public class RecordFilterStrategies {
+
+ private static final boolean EXCLUDE_EVENT = true;
+
+ /**
+ * Include only DataOperation events based on the cloud event type header, It
+ * returns boolean, true means exclude the consumer record and false
+ * means include the consumer record.
+ * @return boolean value.
+ */
+ @Bean
+ public RecordFilterStrategy<String, CloudEvent> includeDataOperationEventsOnly() {
+ return consumerRecord ->
+ isNotCloudEventOfType(consumerRecord.headers(), "DataOperationEvent");
+ }
+
+ /**
+ * Includes the consumer records based on the cloud event type header, It returns boolean,
+ * true means exclude the consumer record and false means include the consumer record.
+ * It includes only the legacy events i.e. non-cloud events
+ * @return boolean value.
+ */
+ @Bean
+ public RecordFilterStrategy<String, Serializable> includeNonCloudEventsOnly() {
+ return consumerRecord -> isCloudEvent(consumerRecord.headers());
+ }
+
+ private boolean isCloudEvent(final Headers headers) {
+ return headers.lastHeader("ce_type") != null;
+ }
+
+ private boolean isNotCloudEventOfType(final Headers headers, final String requiredEventType) {
+ final String eventTypeHeaderValue = KafkaHeaders.getParsedKafkaHeader(headers, "ce_type");
+ if (eventTypeHeaderValue == null) {
+ log.trace("No ce_type header found, possibly a legacy event (ignored)");
+ return EXCLUDE_EVENT;
+ }
+ return !(eventTypeHeaderValue.contains(requiredEventType));
+ }
+}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java
index b4784f418..8f0975f17 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java
@@ -30,8 +30,10 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
+import org.onap.cps.ncmp.api.NcmpEventResponseCode;
import org.onap.cps.ncmp.api.impl.client.DmiRestClient;
import org.onap.cps.ncmp.api.impl.config.NcmpConfiguration;
+import org.onap.cps.ncmp.api.impl.exception.HttpClientRequestException;
import org.onap.cps.ncmp.api.impl.executor.TaskExecutor;
import org.onap.cps.ncmp.api.impl.utils.DmiServiceUrlBuilder;
import org.onap.cps.ncmp.api.impl.utils.data.operation.ResourceDataOperationRequestUtils;
@@ -43,7 +45,9 @@ import org.onap.cps.spi.exceptions.CpsException;
import org.onap.cps.utils.JsonObjectMapper;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
+import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
+import org.springframework.web.util.UriComponentsBuilder;
/**
* Operations class for DMI data.
@@ -240,14 +244,37 @@ public class DmiDataOperations extends DmiOperations {
final String dataOperationRequestBodiesAsJsonString =
jsonObjectMapper.asJsonString(dmiDataOperationRequestBodies);
TaskExecutor.executeTask(() -> dmiRestClient.postOperationWithJsonData(dataOperationResourceUrl,
- dataOperationRequestBodiesAsJsonString, READ),
+ dataOperationRequestBodiesAsJsonString, READ),
DEFAULT_ASYNC_TASK_EXECUTOR_TIMEOUT_IN_MILLISECONDS)
- .whenCompleteAsync(this::handleTaskCompletion);
+ .whenCompleteAsync((response, throwable) -> handleTaskCompletionException(throwable,
+ dataOperationResourceUrl, dmiDataOperationRequestBodies));
}
- private void handleTaskCompletion(final Object response, final Throwable throwable) {
- // TODO Need to publish an error response to client given topic.
- // Code should be implemented into https://jira.onap.org/browse/CPS-1558 (
- // NCMP : Handle non responding DMI-Plugin)
+ private void handleTaskCompletionException(final Throwable throwable,
+ final String dataOperationResourceUrl,
+ final List<DmiDataOperation> dmiDataOperationRequestBodies) {
+ if (throwable != null) {
+ final MultiValueMap<String, String> dataOperationResourceUrlParameters =
+ UriComponentsBuilder.fromUriString(dataOperationResourceUrl).build().getQueryParams();
+ final String topicName = dataOperationResourceUrlParameters.get("topic").get(0);
+ final String requestId = dataOperationResourceUrlParameters.get("requestId").get(0);
+
+ final MultiValueMap<String, Map<NcmpEventResponseCode, List<String>>>
+ cmHandleIdsPerResponseCodesPerOperationId = new LinkedMultiValueMap<>();
+
+ dmiDataOperationRequestBodies.forEach(dmiDataOperationRequestBody -> {
+ final List<String> cmHandleIds = dmiDataOperationRequestBody.getCmHandles().stream()
+ .map(CmHandle::getId).collect(Collectors.toList());
+ if (throwable.getCause() instanceof HttpClientRequestException) {
+ cmHandleIdsPerResponseCodesPerOperationId.add(dmiDataOperationRequestBody.getOperationId(),
+ Map.of(NcmpEventResponseCode.UNABLE_TO_READ_RESOURCE_DATA, cmHandleIds));
+ } else {
+ cmHandleIdsPerResponseCodesPerOperationId.add(dmiDataOperationRequestBody.getOperationId(),
+ Map.of(NcmpEventResponseCode.DMI_SERVICE_NOT_RESPONDING, cmHandleIds));
+ }
+ });
+ ResourceDataOperationRequestUtils.publishErrorMessageToClientTopic(topicName, requestId,
+ cmHandleIdsPerResponseCodesPerOperationId);
+ }
}
}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java
index 957f48a86..d8fb904f2 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java
@@ -68,7 +68,7 @@ public class ResourceDataOperationRequestUtils {
final Collection<YangModelCmHandle> yangModelCmHandles) {
final Map<String, List<DmiDataOperation>> dmiDataOperationsOutPerDmiServiceName = new HashMap<>();
- final MultiValueMap<String, Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerOperationIdPerResponseCode
+ final MultiValueMap<String, Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerResponseCodesPerOperationId
= new LinkedMultiValueMap<>();
final Set<String> nonReadyCmHandleIdsLookup = filterAndGetNonReadyCmHandleIds(yangModelCmHandles);
@@ -100,25 +100,34 @@ public class ResourceDataOperationRequestUtils {
}
}
}
- populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerOperationIdPerResponseCode,
- dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CODE_100, nonExistingCmHandleIds);
- populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerOperationIdPerResponseCode,
- dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CODE_101, nonReadyCmHandleIds);
+ populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerResponseCodesPerOperationId,
+ dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CM_HANDLES_NOT_FOUND,
+ nonExistingCmHandleIds);
+ populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerResponseCodesPerOperationId,
+ dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CM_HANDLES_NOT_READY,
+ nonReadyCmHandleIds);
}
- if (!cmHandleIdsPerOperationIdPerResponseCode.isEmpty()) {
- publishErrorMessageToClientTopic(topicParamInQuery, requestId, cmHandleIdsPerOperationIdPerResponseCode);
+ if (!cmHandleIdsPerResponseCodesPerOperationId.isEmpty()) {
+ publishErrorMessageToClientTopic(topicParamInQuery, requestId, cmHandleIdsPerResponseCodesPerOperationId);
}
return dmiDataOperationsOutPerDmiServiceName;
}
+ /**
+ * Creates data operation cloud event and publish it to client topic.
+ *
+ * @param clientTopic client given topic
+ * @param requestId unique identifier per request
+ * @param cmHandleIdsPerResponseCodesPerOperationId list of cm handle ids per operation id with response code
+ */
@Async
- private static void publishErrorMessageToClientTopic(final String clientTopic,
+ public static void publishErrorMessageToClientTopic(final String clientTopic,
final String requestId,
final MultiValueMap<String,
Map<NcmpEventResponseCode, List<String>>>
- cmHandleIdsPerOperationIdPerResponseCode) {
+ cmHandleIdsPerResponseCodesPerOperationId) {
final CloudEvent dataOperationCloudEvent = DataOperationEventCreator.createDataOperationEvent(clientTopic,
- requestId, cmHandleIdsPerOperationIdPerResponseCode);
+ requestId, cmHandleIdsPerResponseCodesPerOperationId);
final EventsPublisher<CloudEvent> eventsPublisher = CpsApplicationContext.getCpsBean(EventsPublisher.class);
eventsPublisher.publishCloudEvent(clientTopic, requestId, dataOperationCloudEvent);
}
@@ -166,13 +175,13 @@ public class ResourceDataOperationRequestUtils {
}
private static void populateCmHandleIdsPerOperationIdPerResponseCode(final MultiValueMap<String,
- Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerOperationIdByResponseCode,
+ Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerResponseCodesPerOperationId,
final String operationId,
final NcmpEventResponseCode
ncmpEventResponseCode,
final List<String> cmHandleIds) {
if (!cmHandleIds.isEmpty()) {
- cmHandleIdsPerOperationIdByResponseCode.add(operationId, Map.of(ncmpEventResponseCode, cmHandleIds));
+ cmHandleIdsPerResponseCodesPerOperationId.add(operationId, Map.of(ncmpEventResponseCode, cmHandleIds));
}
}
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy
index fe7b3f11c..17df7b0ba 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy
@@ -37,7 +37,7 @@ import org.springframework.test.annotation.DirtiesContext
import org.testcontainers.spock.Testcontainers
import java.time.Duration
-@SpringBootTest(classes = [EventsPublisher, NcmpAsyncRequestResponseEventConsumer, ObjectMapper, JsonObjectMapper])
+@SpringBootTest(classes = [EventsPublisher, AsyncRestRequestResponseEventConsumer, ObjectMapper, JsonObjectMapper])
@Testcontainers
@DirtiesContext
class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBaseSpec {
@@ -52,8 +52,8 @@ class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBase
Mappers.getMapper(NcmpAsyncRequestResponseEventMapper.class)
@SpringBean
- NcmpAsyncRequestResponseEventConsumer ncmpAsyncRequestResponseEventConsumer =
- new NcmpAsyncRequestResponseEventConsumer(cpsAsyncRequestResponseEventPublisher,
+ AsyncRestRequestResponseEventConsumer ncmpAsyncRequestResponseEventConsumer =
+ new AsyncRestRequestResponseEventConsumer(cpsAsyncRequestResponseEventPublisher,
ncmpAsyncRequestResponseEventMapper)
@Autowired
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumerSpec.groovy
index 635328871..7b54f591a 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumerSpec.groovy
@@ -44,16 +44,16 @@ import org.springframework.test.annotation.DirtiesContext
import org.testcontainers.spock.Testcontainers
import java.time.Duration
-@SpringBootTest(classes = [EventsPublisher, NcmpAsyncDataOperationEventConsumer, DataOperationRecordFilterStrategy,JsonObjectMapper, ObjectMapper])
+@SpringBootTest(classes = [EventsPublisher, DataOperationEventConsumer, RecordFilterStrategies,JsonObjectMapper, ObjectMapper])
@Testcontainers
@DirtiesContext
-class NcmpAsyncDataOperationEventConsumerSpec extends MessagingBaseSpec {
+class DataOperationEventConsumerSpec extends MessagingBaseSpec {
@SpringBean
EventsPublisher asyncDataOperationEventPublisher = new EventsPublisher<CloudEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate)
@SpringBean
- NcmpAsyncDataOperationEventConsumer objectUnderTest = new NcmpAsyncDataOperationEventConsumer(asyncDataOperationEventPublisher)
+ DataOperationEventConsumer objectUnderTest = new DataOperationEventConsumer(asyncDataOperationEventPublisher)
@Autowired
JsonObjectMapper jsonObjectMapper
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy
index f577f55ba..43d06483a 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerIntegrationSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy
@@ -20,14 +20,11 @@
package org.onap.cps.ncmp.api.impl.async
-import io.cloudevents.CloudEvent
import io.cloudevents.core.builder.CloudEventBuilder
-import io.cloudevents.kafka.CloudEventSerializer
-import org.apache.kafka.clients.producer.KafkaProducer
-import org.apache.kafka.clients.producer.ProducerRecord
-import org.apache.kafka.common.serialization.StringSerializer
+import org.onap.cps.ncmp.api.impl.config.kafka.KafkaConfig
import org.onap.cps.ncmp.api.impl.events.EventsPublisher
import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec
+import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent
import org.spockframework.spring.SpringBean
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.beans.factory.annotation.Value
@@ -39,15 +36,18 @@ import org.springframework.test.annotation.DirtiesContext
import org.testcontainers.spock.Testcontainers
import java.util.concurrent.TimeUnit
-@SpringBootTest(classes =[NcmpAsyncDataOperationEventConsumer, DataOperationRecordFilterStrategy])
+@SpringBootTest(classes =[DataOperationEventConsumer, AsyncRestRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig])
@DirtiesContext
@Testcontainers
@EnableAutoConfiguration
-class NcmpAsyncDataOperationEventConsumerIntegrationSpec extends MessagingBaseSpec {
+class FilterStrategiesIntegrationSpec extends MessagingBaseSpec {
@SpringBean
EventsPublisher mockEventsPublisher = Mock()
+ @SpringBean
+ NcmpAsyncRequestResponseEventMapper mapper = Stub()
+
@Autowired
private KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry
@@ -58,16 +58,39 @@ class NcmpAsyncDataOperationEventConsumerIntegrationSpec extends MessagingBaseSp
activateListeners()
}
+ def 'Legacy event consumer with cloud event.'() {
+ given: 'a cloud event of type: #eventType'
+ def cloudEvent = CloudEventBuilder.v1().withId('some id')
+ .withType('DataOperationEvent')
+ .withSource(URI.create('some-source'))
+ .build()
+ when: 'send the cloud event'
+ cloudEventKafkaTemplate.send(topic, cloudEvent)
+ and: 'wait a little for async processing of message'
+ TimeUnit.MILLISECONDS.sleep(300)
+ then: 'event is not consumed'
+ 0 * mockEventsPublisher.publishEvent(*_)
+ }
+
+ def 'Legacy event consumer with valid legacy event.'() {
+ given: 'a cloud event of type: #eventType'
+ DmiAsyncRequestResponseEvent legacyEvent = new DmiAsyncRequestResponseEvent(eventId:'legacyEventId', eventTarget:'legacyEventTarget')
+ when: 'send the cloud event'
+ legacyEventKafkaTemplate.send(topic, legacyEvent)
+ and: 'wait a little for async processing of message'
+ TimeUnit.MILLISECONDS.sleep(300)
+ then: 'the event is consumed by the (legacy) AsynRestRequest consumer'
+ 1 * mockEventsPublisher.publishEvent(*_)
+ }
+
def 'Filtering Cloud Events on Type.'() {
given: 'a cloud event of type: #eventType'
def cloudEvent = CloudEventBuilder.v1().withId('some id')
- .withType(eventType)
- .withSource(URI.create('some-source'))
- .build()
+ .withType(eventType)
+ .withSource(URI.create('some-source'))
+ .build()
when: 'send the cloud event'
- ProducerRecord<String, CloudEvent> record = new ProducerRecord<>(topic, cloudEvent)
- KafkaProducer<String, CloudEvent> producer = new KafkaProducer<>(eventProducerConfigProperties(CloudEventSerializer))
- producer.send(record)
+ cloudEventKafkaTemplate.send(topic, cloudEvent)
and: 'wait a little for async processing of message'
TimeUnit.MILLISECONDS.sleep(300)
then: 'the event has only been forwarded for the correct type'
@@ -79,11 +102,11 @@ class NcmpAsyncDataOperationEventConsumerIntegrationSpec extends MessagingBaseSp
'any type contain the word "DataOperationEvent"' || 1
}
+ //TODO Toine, add positive test with data to prove event is converted correctly (using correct factory)
+
def 'Non cloud events on same Topic.'() {
when: 'sending a non-cloud event on the same topic'
- ProducerRecord<String, String> record = new ProducerRecord<>(topic, 'simple string event')
- KafkaProducer<String, String> producer = new KafkaProducer<>(eventProducerConfigProperties(StringSerializer))
- producer.send(record)
+ legacyEventKafkaTemplate.send(topic, 'simple string event')
and: 'wait a little for async processing of message'
TimeUnit.MILLISECONDS.sleep(300)
then: 'the event is not processed by this consumer'
@@ -95,5 +118,4 @@ class NcmpAsyncDataOperationEventConsumerIntegrationSpec extends MessagingBaseSp
messageListenerContainer -> { ContainerTestUtils.waitForAssignment(messageListenerContainer, 1) }
)
}
-
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy
index 4c6880421..223c92f37 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy
@@ -38,7 +38,7 @@ import org.testcontainers.spock.Testcontainers
import java.time.Duration
-@SpringBootTest(classes = [EventsPublisher, ObjectMapper, JsonObjectMapper])
+@SpringBootTest(classes = [ObjectMapper, JsonObjectMapper])
@Testcontainers
@DirtiesContext
class LcmEventsPublisherSpec extends MessagingBaseSpec {
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
index 59e62e34d..3f40f430f 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
@@ -22,12 +22,16 @@
package org.onap.cps.ncmp.api.impl.operations
import com.fasterxml.jackson.databind.ObjectMapper
+import io.cloudevents.core.CloudEventUtils
+import io.cloudevents.jackson.PojoCloudEventDataMapper
+import org.onap.cps.ncmp.api.NcmpEventResponseCode
import org.onap.cps.ncmp.api.impl.config.NcmpConfiguration
import org.onap.cps.ncmp.api.impl.events.EventsPublisher
+import org.onap.cps.ncmp.api.impl.exception.HttpClientRequestException
import org.onap.cps.ncmp.api.impl.utils.DmiServiceUrlBuilder
import org.onap.cps.ncmp.api.impl.utils.context.CpsApplicationContext
import org.onap.cps.ncmp.api.models.DataOperationRequest
-import org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent
+import org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent
import org.onap.cps.ncmp.utils.TestUtils
import org.onap.cps.utils.JsonObjectMapper
import org.spockframework.spring.SpringBean
@@ -37,6 +41,7 @@ import org.springframework.http.ResponseEntity
import org.springframework.test.context.ContextConfiguration
import org.springframework.http.HttpStatus
import spock.lang.Shared
+import java.util.concurrent.TimeoutException
import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_OPERATIONAL
import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_RUNNING
@@ -110,6 +115,28 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec {
assert requestBodyAsJsonStringArg == expectedBatchRequestAsJson
}
+ def 'Execute (async) data operation from DMI service for #scenario.'() {
+ given: 'data operation request body and dmi resource url'
+ def dmiDataOperation = DmiDataOperation.builder().operationId('some-operation-id').build()
+ dmiDataOperation.getCmHandles().add(CmHandle.builder().id('some-cm-handle-id').build())
+ def dmiDataOperationResourceDataUrl = "http://dmi-service-name:dmi-port/dmi/v1/data?topic=my-topic-name&requestId=some-request-id"
+ def actualDataOperationCloudEvent = null
+ when: 'exception occurs after sending request to dmi service'
+ objectUnderTest.handleTaskCompletionException(new Throwable(exception), dmiDataOperationResourceDataUrl, List.of(dmiDataOperation))
+ then: 'a cloud event is published'
+ eventsPublisher.publishCloudEvent('my-topic-name', 'some-request-id', _) >> { args -> actualDataOperationCloudEvent = args[2] }
+ and: 'the event contains the expected error details'
+ def eventDataValue = extractDataValue(actualDataOperationCloudEvent)
+ assert eventDataValue.operationId == dmiDataOperation.operationId
+ assert eventDataValue.ids == dmiDataOperation.cmHandles.id
+ assert eventDataValue.statusCode == responseCode.statusCode
+ assert eventDataValue.statusMessage == responseCode.statusMessage
+ where: 'the following exceptions are occurred'
+ scenario | exception || responseCode
+ 'http client request exception' | new HttpClientRequestException('error-message', 'error-details', HttpStatus.SERVICE_UNAVAILABLE.value()) || NcmpEventResponseCode.UNABLE_TO_READ_RESOURCE_DATA
+ 'timeout exception' | new TimeoutException() || NcmpEventResponseCode.DMI_SERVICE_NOT_RESPONDING
+ }
+
def 'call get all resource data.'() {
given: 'the system returns a cm handle with a sample property'
mockYangModelCmHandleRetrieval([yangModelCmHandleProperty])
@@ -142,4 +169,8 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec {
CREATE || 'create'
UPDATE || 'update'
}
+
+ def extractDataValue(actualDataOperationCloudEvent) {
+ return CloudEventUtils.mapData(actualDataOperationCloudEvent, PojoCloudEventDataMapper.from(new ObjectMapper(), DataOperationEvent.class)).getValue().data.responses[0]
+ }
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy
index 603b8cdda..0356c3fcd 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy
@@ -24,7 +24,6 @@ import io.cloudevents.CloudEvent
import io.cloudevents.kafka.CloudEventSerializer
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.kafka.common.serialization.StringSerializer
-import org.spockframework.spring.SpringBean
import org.springframework.kafka.core.DefaultKafkaProducerFactory
import org.springframework.kafka.core.KafkaTemplate
import org.springframework.kafka.support.serializer.JsonSerializer
@@ -46,11 +45,9 @@ class MessagingBaseSpec extends Specification {
static kafkaTestContainer = new KafkaContainer(DockerImageName.parse('registry.nordix.org/onaptest/confluentinc/cp-kafka:6.2.1').asCompatibleSubstituteFor('confluentinc/cp-kafka'))
- @SpringBean
- KafkaTemplate legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<Integer, String>(eventProducerConfigProperties(JsonSerializer)))
+ def legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, String>(eventProducerConfigProperties(JsonSerializer)))
- @SpringBean
- KafkaTemplate cloudEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, CloudEvent>(eventProducerConfigProperties(CloudEventSerializer)))
+ def cloudEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, CloudEvent>(eventProducerConfigProperties(CloudEventSerializer)))
@DynamicPropertySource
static void registerKafkaProperties(DynamicPropertyRegistry dynamicPropertyRegistry) {
diff --git a/cps-ncmp-service/src/test/resources/application.yml b/cps-ncmp-service/src/test/resources/application.yml
index df34f844d..edbd7022f 100644
--- a/cps-ncmp-service/src/test/resources/application.yml
+++ b/cps-ncmp-service/src/test/resources/application.yml
@@ -21,6 +21,7 @@ spring:
producer:
value-serializer: io.cloudevents.kafka.CloudEventSerializer
consumer:
+ value-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer
properties:
spring.deserializer.value.delegate.class: io.cloudevents.kafka.CloudEventDeserializer
@@ -54,4 +55,4 @@ hazelcast:
mode:
kubernetes:
enabled: false
- service-name: "cps-and-ncmp-service" \ No newline at end of file
+ service-name: "cps-and-ncmp-service"
diff --git a/cps-rest/src/test/groovy/org/onap/cps/rest/utils/MultipartFileUtilSpec.groovy b/cps-rest/src/test/groovy/org/onap/cps/rest/utils/MultipartFileUtilSpec.groovy
index 572db005b..e9d559c31 100644
--- a/cps-rest/src/test/groovy/org/onap/cps/rest/utils/MultipartFileUtilSpec.groovy
+++ b/cps-rest/src/test/groovy/org/onap/cps/rest/utils/MultipartFileUtilSpec.groovy
@@ -22,23 +22,12 @@ package org.onap.cps.rest.utils
import org.onap.cps.spi.exceptions.CpsException
import org.onap.cps.spi.exceptions.ModelValidationException
-import org.onap.cps.spi.model.DataNodeBuilder
-import org.onap.cps.utils.DataMapUtils
import org.springframework.mock.web.MockMultipartFile
import org.springframework.web.multipart.MultipartFile
import spock.lang.Specification
class MultipartFileUtilSpec extends Specification {
- def 'Data node without leaves and without children.'() {
- given: 'a datanode with no leaves and no children'
- def dataNodeWithoutData = new DataNodeBuilder().withXpath('some xpath').build()
- when: 'it is converted to a map'
- def result = DataMapUtils.toDataMap(dataNodeWithoutData)
- then: 'an empty object map is returned'
- result.isEmpty()
- }
-
def 'Extract yang resource from yang file.'() {
given: 'uploaded yang file'
def multipartFile = new MockMultipartFile("file", "filename.yang", "text/plain", "content".getBytes())
@@ -116,6 +105,32 @@ class MultipartFileUtilSpec extends Specification {
fileType << ['YANG', 'ZIP']
}
+ def 'Resource name extension checks, with #scenario.'() {
+ expect: 'extension check returns expected result'
+ assert MultipartFileUtil.resourceNameEndsWithExtension(resourceName, '.test') == expectedResult
+ where: 'following resource names are tested'
+ scenario | resourceName || expectedResult
+ 'correct extension'| 'file.test' || true
+ 'mixed case' | 'file.TesT' || true
+ 'other extension' | 'file.other' || false
+ 'no extension' | 'file' || false
+ 'null' | null || false
+ }
+
+ def 'Extract resourcename, with #scenario.'() {
+ expect: 'extension check returns expected result'
+ assert MultipartFileUtil.extractResourceNameFromPath(path) == expectedResoureName
+ where: 'following resource names are tested'
+ scenario | path || expectedResoureName
+ 'no folder' | 'file.test' || 'file.test'
+ 'single folder' | 'folder/file.test' || 'file.test'
+ 'multiple folders' | 'f1/f2/file.test' || 'file.test'
+ 'with root' | '/f1/f2/file.test' || 'file.test'
+ 'windows notation' | 'c:\\f2\\file.test' || 'file.test'
+ 'empty path' | '' || ''
+ 'null path' | null || ''
+ }
+
def multipartZipFileFromResource(resourcePath) {
return new MockMultipartFile("file", "TEST.ZIP", "application/zip",
getClass().getResource(resourcePath).getBytes())
diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java
index 99cda229d..0a7afc8f6 100755
--- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java
+++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java
@@ -39,6 +39,7 @@ import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.onap.cps.api.CpsAdminService;
import org.onap.cps.api.CpsDataService;
+import org.onap.cps.cpspath.parser.CpsPathUtil;
import org.onap.cps.notification.NotificationService;
import org.onap.cps.notification.Operation;
import org.onap.cps.spi.CpsDataPersistenceService;
@@ -354,10 +355,11 @@ public class CpsDataServiceImpl implements CpsDataService {
}
return dataNodes;
}
+ final String normalizedParentNodeXpath = CpsPathUtil.getNormalizedXpath(parentNodeXpath);
final ContainerNode containerNode =
- timedYangParser.parseData(contentType, nodeData, schemaContext, parentNodeXpath);
+ timedYangParser.parseData(contentType, nodeData, schemaContext, normalizedParentNodeXpath);
final Collection<DataNode> dataNodes = new DataNodeBuilder()
- .withParentNodeXpath(parentNodeXpath)
+ .withParentNodeXpath(normalizedParentNodeXpath)
.withContainerNode(containerNode)
.buildCollection();
if (dataNodes.isEmpty()) {
diff --git a/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy
index e27b43763..c636f4b5f 100644
--- a/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy
+++ b/cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy
@@ -1,7 +1,7 @@
/*
* ============LICENSE_START=======================================================
* Copyright (C) 2021 Pantheon.tech
- * Modifications Copyright (C) 2020-2022 Nordix Foundation
+ * Modifications Copyright (C) 2020-2023 Nordix Foundation
* Modifications Copyright (C) 2022 Bell Canada.
* Modifications Copyright (C) 2023 TechMahindra Ltd.
* ================================================================================
@@ -29,50 +29,19 @@ class DataMapUtilsSpec extends Specification {
def noChildren = []
- def dataNode = buildDataNode(
- "/parent",[parentLeaf:'parentLeafValue', parentLeafList:['parentLeafListEntry1','parentLeafListEntry2']],[
- buildDataNode('/parent/child-list[@id=1/2]',[listElementLeaf:'listElement1leafValue'],noChildren),
- buildDataNode('/parent/child-list[@id=2]',[listElementLeaf:'listElement2leafValue'],noChildren),
- buildDataNode('/parent/child-object',[childLeaf:'childLeafValue'],
- [buildDataNode('/parent/child-object/grand-child-object',[grandChildLeaf:'grandChildLeafValue'],noChildren)]
- ),
- ])
-
- def dataNodeWithAnchor = buildDataNodeWithAnchor(
- "/parent", 'anchor01',[parentLeaf:'parentLeafValue', parentLeafList:['parentLeafListEntry1','parentLeafListEntry2']],[
- buildDataNode('/parent/child-list[@id=1/2]',[listElementLeaf:'listElement1leafValue'],noChildren),
- buildDataNode('/parent/child-list[@id=2]',[listElementLeaf:'listElement2leafValue'],noChildren),
- buildDataNode('/parent/child-object',[childLeaf:'childLeafValue'],
- [buildDataNode('/parent/child-object/grand-child-object',[grandChildLeaf:'grandChildLeafValue'],noChildren)]
- ),
- ])
-
- static def buildDataNode(xpath, leaves, children) {
- return new DataNodeBuilder().withXpath(xpath).withLeaves(leaves).withChildDataNodes(children).build()
- }
-
- static def buildDataNodeWithAnchor(xpath, anchorName, leaves, children) {
- return new DataNodeBuilder().withXpath(xpath).withAnchor(anchorName).withLeaves(leaves).withChildDataNodes(children).build()
- }
-
def 'Data node structure conversion to map.'() {
when: 'data node structure is converted to a map'
def result = DataMapUtils.toDataMap(dataNode)
-
then: 'root node identifier is null'
result.parent == null
-
then: 'root node leaves are top level elements'
result.parentLeaf == 'parentLeafValue'
result.parentLeafList == ['parentLeafListEntry1','parentLeafListEntry2']
-
and: 'leaves of child list element are listed as structures under common identifier'
result.'child-list'.collect().containsAll(['listElementLeaf': 'listElement1leafValue'],
['listElementLeaf': 'listElement2leafValue'])
-
and: 'leaves for child element is populated under its node identifier'
result.'child-object'.childLeaf == 'childLeafValue'
-
and: 'leaves for grandchild element is populated under its node identifier'
result.'child-object'.'grand-child-object'.grandChildLeaf == 'grandChildLeafValue'
}
@@ -84,10 +53,8 @@ class DataMapUtilsSpec extends Specification {
def parentNode = result.parent
parentNode.parentLeaf == 'parentLeafValue'
parentNode.parentLeafList == ['parentLeafListEntry1','parentLeafListEntry2']
-
and: 'leaves for child element is populated under its node identifier'
parentNode.'child-object'.childLeaf == 'childLeafValue'
-
and: 'leaves for grandchild element is populated under its node identifier'
parentNode.'child-object'.'grand-child-object'.grandChildLeaf == 'grandChildLeafValue'
}
@@ -112,15 +79,48 @@ class DataMapUtilsSpec extends Specification {
def parentNode = result.get("dataNode").parent
parentNode.parentLeaf == 'parentLeafValue'
parentNode.parentLeafList == ['parentLeafListEntry1','parentLeafListEntry2']
-
and: 'leaves for child element is populated under its node identifier'
assert parentNode.'child-object'.childLeaf == 'childLeafValue'
-
and: 'leaves for grandchild element is populated under its node identifier'
assert parentNode.'child-object'.'grand-child-object'.grandChildLeaf == 'grandChildLeafValue'
-
and: 'data node is associated with anchor name'
assert result.get('anchorName') == 'anchor01'
}
+
+ def 'Data node without leaves and without children.'() {
+ given: 'a datanode with no leaves and no children'
+ def dataNodeWithoutData = new DataNodeBuilder().withXpath('some xpath').build()
+ when: 'it is converted to a map'
+ def result = DataMapUtils.toDataMap(dataNodeWithoutData)
+ then: 'an empty object map is returned'
+ result.isEmpty()
+ }
+
+ def dataNode = buildDataNode(
+ "/parent",[parentLeaf:'parentLeafValue', parentLeafList:['parentLeafListEntry1','parentLeafListEntry2']],[
+ buildDataNode('/parent/child-list[@id=1/2]',[listElementLeaf:'listElement1leafValue'],noChildren),
+ buildDataNode('/parent/child-list[@id=2]',[listElementLeaf:'listElement2leafValue'],noChildren),
+ buildDataNode('/parent/child-object',[childLeaf:'childLeafValue'],
+ [buildDataNode('/parent/child-object/grand-child-object',[grandChildLeaf:'grandChildLeafValue'],noChildren)]
+ ),
+ ])
+
+ def dataNodeWithAnchor = buildDataNodeWithAnchor(
+ "/parent", 'anchor01',[parentLeaf:'parentLeafValue', parentLeafList:['parentLeafListEntry1','parentLeafListEntry2']],[
+ buildDataNode('/parent/child-list[@id=1/2]',[listElementLeaf:'listElement1leafValue'],noChildren),
+ buildDataNode('/parent/child-list[@id=2]',[listElementLeaf:'listElement2leafValue'],noChildren),
+ buildDataNode('/parent/child-object',[childLeaf:'childLeafValue'],
+ [buildDataNode('/parent/child-object/grand-child-object',[grandChildLeaf:'grandChildLeafValue'],noChildren)]
+ ),
+ ])
+
+ def buildDataNode(xpath, leaves, children) {
+ return new DataNodeBuilder().withXpath(xpath).withLeaves(leaves).withChildDataNodes(children).build()
+ }
+
+ def buildDataNodeWithAnchor(xpath, anchorName, leaves, children) {
+ return new DataNodeBuilder().withXpath(xpath).withAnchor(anchorName).withLeaves(leaves).withChildDataNodes(children).build()
+ }
+
}
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy
index 2efbcb2af..5c9ced34e 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy
@@ -47,85 +47,87 @@ class CpsDataServiceIntegrationSpec extends FunctionalSpecBase {
def setup() {
objectUnderTest = cpsDataService
- originalCountBookstoreChildNodes = countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
-}
+ originalCountBookstoreChildNodes = countDataNodesInBookstore()
+ }
-def 'Read bookstore top-level container(s) using #fetchDescendantsOption.'() {
- when: 'get data nodes for bookstore container'
- def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', fetchDescendantsOption)
- then: 'the tree consist ouf of #expectNumberOfDataNodes data nodes'
- assert countDataNodesInTree(result) == expectNumberOfDataNodes
- and: 'the top level data node has the expected attribute and value'
- assert result.leaves['bookstore-name'] == ['Easons']
- and: 'they are from the correct dataspace'
- assert result.dataspace == [FUNCTIONAL_TEST_DATASPACE_1]
- and: 'they are from the correct anchor'
- assert result.anchorName == [BOOKSTORE_ANCHOR_1]
- where: 'the following option is used'
- fetchDescendantsOption || expectNumberOfDataNodes
- OMIT_DESCENDANTS || 1
- DIRECT_CHILDREN_ONLY || 6
- INCLUDE_ALL_DESCENDANTS || 17
- new FetchDescendantsOption(2) || 17
-}
+ def 'Read bookstore top-level container(s) using #fetchDescendantsOption.'() {
+ when: 'get data nodes for bookstore container'
+ def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', fetchDescendantsOption)
+ then: 'the tree consist ouf of #expectNumberOfDataNodes data nodes'
+ assert countDataNodesInTree(result) == expectNumberOfDataNodes
+ and: 'the top level data node has the expected attribute and value'
+ assert result.leaves['bookstore-name'] == ['Easons']
+ and: 'they are from the correct dataspace'
+ assert result.dataspace == [FUNCTIONAL_TEST_DATASPACE_1]
+ and: 'they are from the correct anchor'
+ assert result.anchorName == [BOOKSTORE_ANCHOR_1]
+ where: 'the following option is used'
+ fetchDescendantsOption || expectNumberOfDataNodes
+ OMIT_DESCENDANTS || 1
+ DIRECT_CHILDREN_ONLY || 6
+ INCLUDE_ALL_DESCENDANTS || 17
+ new FetchDescendantsOption(2) || 17
+ }
-def 'Read bookstore top-level container(s) using "root" path variations.'() {
- when: 'get data nodes for bookstore container'
- def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, root, OMIT_DESCENDANTS)
- then: 'the tree consist ouf of one data node'
- assert countDataNodesInTree(result) == 1
- and: 'the top level data node has the expected attribute and value'
- assert result.leaves['bookstore-name'] == ['Easons']
- where: 'the following variations of "root" are used'
- root << [ '/', '' ]
-}
+ def 'Read bookstore top-level container(s) using "root" path variations.'() {
+ when: 'get data nodes for bookstore container'
+ def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, root, OMIT_DESCENDANTS)
+ then: 'the tree consist ouf of one data node'
+ assert countDataNodesInTree(result) == 1
+ and: 'the top level data node has the expected attribute and value'
+ assert result.leaves['bookstore-name'] == ['Easons']
+ where: 'the following variations of "root" are used'
+ root << [ '/', '' ]
+ }
-def 'Read data nodes with error: #cpsPath'() {
- when: 'attempt to get data nodes using invalid path'
- objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, DIRECT_CHILDREN_ONLY)
- then: 'a #expectedException is thrown'
- thrown(expectedException)
- where:
- cpsPath || expectedException
- 'invalid path' || CpsPathException
- '/non-existing-path' || DataNodeNotFoundException
-}
+ def 'Read data nodes with error: #cpsPath'() {
+ when: 'attempt to get data nodes using invalid path'
+ objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, DIRECT_CHILDREN_ONLY)
+ then: 'a #expectedException is thrown'
+ thrown(expectedException)
+ where:
+ cpsPath || expectedException
+ 'invalid path' || CpsPathException
+ '/non-existing-path' || DataNodeNotFoundException
+ }
-def 'Read (multiple) data nodes (batch) with #cpsPath'() {
- when: 'attempt to get data nodes using invalid path'
- objectUnderTest.getDataNodesForMultipleXpaths(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ cpsPath ], DIRECT_CHILDREN_ONLY)
- then: 'no exception is thrown'
- noExceptionThrown()
- where:
- cpsPath << [ 'invalid path', '/non-existing-path' ]
-}
+ def 'Read (multiple) data nodes (batch) with #cpsPath'() {
+ when: 'attempt to get data nodes using invalid path'
+ objectUnderTest.getDataNodesForMultipleXpaths(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ cpsPath ], DIRECT_CHILDREN_ONLY)
+ then: 'no exception is thrown'
+ noExceptionThrown()
+ where:
+ cpsPath << [ 'invalid path', '/non-existing-path' ]
+ }
-def 'Delete root data node.'() {
- when: 'the "root" is deleted'
- objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ '/' ], now)
- and: 'attempt to get the top level data node'
- objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)
- then: 'an datanode not found exception is thrown'
- thrown(DataNodeNotFoundException)
- cleanup:
- restoreBookstoreDataAnchor(1)
-}
+ def 'Delete root data node.'() {
+ when: 'the "root" is deleted'
+ objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ '/' ], now)
+ and: 'attempt to get the top level data node'
+ objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)
+ then: 'an datanode not found exception is thrown'
+ thrown(DataNodeNotFoundException)
+ cleanup:
+ restoreBookstoreDataAnchor(1)
+ }
-def 'Add and Delete a (container) data node.'() {
- given: 'new (webinfo) datanode'
- def json = '{"webinfo": {"domain-name":"ourbookstore.com" ,"contact-email":"info@ourbookstore.com" }}'
- when: 'the new datanode is saved'
- objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now)
- then: 'it can be retrieved by its xpath'
- def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', DIRECT_CHILDREN_ONLY)
+ def 'Add and Delete a (container) data node using #scenario.'() {
+ when: 'the new datanode is saved'
+ objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , parentXpath, json, now)
+ then: 'it can be retrieved by its normalized xpath'
+ def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, normalizedXpathToNode, DIRECT_CHILDREN_ONLY)
assert result.size() == 1
- assert result[0].xpath == '/bookstore/webinfo'
+ assert result[0].xpath == normalizedXpathToNode
and: 'there is now one extra datanode'
- assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+ assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore()
when: 'the new datanode is deleted'
- objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', now)
+ objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, normalizedXpathToNode, now)
then: 'the original number of data nodes is restored'
- assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+ assert originalCountBookstoreChildNodes == countDataNodesInBookstore()
+ where:
+ scenario | parentXpath | json || normalizedXpathToNode
+ 'normalized parent xpath' | '/bookstore' | '{"webinfo": {"domain-name":"ourbookstore.com", "contact-email":"info@ourbookstore.com" }}' || "/bookstore/webinfo"
+ 'non-normalized parent xpath' | '/bookstore/categories[ @code="1"]' | '{"books": {"title":"new" }}' || "/bookstore/categories[@code='1']/books[@title='new']"
}
def 'Attempt to create a top level data node using root.'() {
@@ -186,12 +188,12 @@ def 'Add and Delete a (container) data node.'() {
objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', DIRECT_CHILDREN_ONLY).size() == 1
objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', DIRECT_CHILDREN_ONLY).size() == 1
and: 'there are now two extra data nodes'
- assert originalCountBookstoreChildNodes + 2 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+ assert originalCountBookstoreChildNodes + 2 == countDataNodesInBookstore()
when: 'the new elements are deleted'
objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now)
objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', now)
then: 'the original number of data nodes is restored'
- assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+ assert originalCountBookstoreChildNodes == countDataNodesInBookstore()
}
def 'Add list (element) data nodes that already exist.'() {
@@ -203,7 +205,7 @@ def 'Add and Delete a (container) data node.'() {
def exceptionThrown = thrown(AlreadyDefinedExceptionBatch)
exceptionThrown.alreadyDefinedXpaths == [ '/bookstore/categories[@code=\'1\']' ] as Set
and: 'there is now one extra data nodes'
- assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+ assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore()
cleanup:
restoreBookstoreDataAnchor(1)
}
@@ -216,7 +218,7 @@ def 'Add and Delete a (container) data node.'() {
when: 'the new element is deleted'
objectUnderTest.deleteListOrListElement(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now)
then: 'the original number of data nodes is restored'
- assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+ assert originalCountBookstoreChildNodes == countDataNodesInBookstore()
}
def 'Add and Delete a batch of lists (element) data nodes.'() {
@@ -229,12 +231,12 @@ def 'Add and Delete a (container) data node.'() {
assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', DIRECT_CHILDREN_ONLY).size() == 1
assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', DIRECT_CHILDREN_ONLY).size() == 1
and: 'there are now two extra data nodes'
- assert originalCountBookstoreChildNodes + 2 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+ assert originalCountBookstoreChildNodes + 2 == countDataNodesInBookstore()
when: 'the new elements are deleted'
objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now)
objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', now)
then: 'the original number of data nodes is restored'
- assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+ assert originalCountBookstoreChildNodes == countDataNodesInBookstore()
}
def 'Add and Delete a batch of lists (element) data nodes with partial success.'() {
@@ -247,7 +249,7 @@ def 'Add and Delete a (container) data node.'() {
def exceptionThrown = thrown(AlreadyDefinedExceptionBatch)
assert exceptionThrown.alreadyDefinedXpaths == [ '/bookstore/categories[@code=\'1\']' ] as Set
and: 'there is now one extra data node'
- assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+ assert originalCountBookstoreChildNodes + 1 == countDataNodesInBookstore()
cleanup:
restoreBookstoreDataAnchor(1)
}
@@ -362,4 +364,8 @@ def 'Add and Delete a (container) data node.'() {
cleanup:
restoreBookstoreDataAnchor(1)
}
+
+ def countDataNodesInBookstore() {
+ return countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', INCLUDE_ALL_DESCENDANTS))
+ }
}
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy
index 6b1efe955..74070b1d8 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy
@@ -64,7 +64,7 @@ class CpsPerfTestBase extends PerfTestBase {
addAnchorsWithData(5, CPS_PERFORMANCE_TEST_DATASPACE, BOOKSTORE_SCHEMA_SET, 'bookstore', data)
stopWatch.stop()
def durationInMillis = stopWatch.getTotalTimeMillis()
- recordAndAssertPerformance('Creating bookstore anchors with large data tree', 3_000, durationInMillis)
+ recordAndAssertPerformance('Creating bookstore anchors with large data tree', 1_500, durationInMillis)
}
def addOpenRoadModel() {
@@ -81,7 +81,7 @@ class CpsPerfTestBase extends PerfTestBase {
addAnchorsWithData(5, CPS_PERFORMANCE_TEST_DATASPACE, LARGE_SCHEMA_SET, 'openroadm', data)
stopWatch.stop()
def durationInMillis = stopWatch.getTotalTimeMillis()
- recordAndAssertPerformance('Creating openroadm anchors with large data tree', 30_000, durationInMillis)
+ recordAndAssertPerformance('Creating openroadm anchors with large data tree', 20_000, durationInMillis)
}
def generateOpenRoadData(numberOfNodes) {
@@ -98,8 +98,8 @@ class CpsPerfTestBase extends PerfTestBase {
assert countDataNodesInTree(result) == 1
stopWatch.stop()
def durationInMillis = stopWatch.getTotalTimeMillis()
- then: 'all data is read within 30 seconds (warm up not critical)'
- recordAndAssertPerformance("Warming database", 30_000, durationInMillis)
+ then: 'all data is read within 20 seconds'
+ recordAndAssertPerformance("Warming database", 20_000, durationInMillis)
}
}