summaryrefslogtreecommitdiffstats
path: root/cps-ncmp-service/src
diff options
context:
space:
mode:
Diffstat (limited to 'cps-ncmp-service/src')
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java6
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/AsyncRestRequestResponseEventConsumer.java (renamed from cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java)8
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumer.java (renamed from cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumer.java)8
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java55
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java77
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java39
-rw-r--r--cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java33
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy6
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumerSpec.groovy (renamed from cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerSpec.groovy)6
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy (renamed from cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerIntegrationSpec.groovy)56
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy2
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy33
-rw-r--r--cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy7
-rw-r--r--cps-ncmp-service/src/test/resources/application.yml3
14 files changed, 226 insertions, 113 deletions
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java
index 9f7ef1e882..42d8135057 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/NcmpEventResponseCode.java
@@ -25,8 +25,10 @@ import lombok.Getter;
@Getter
public enum NcmpEventResponseCode {
- CODE_100("100", "cm handle id(s) not found"),
- CODE_101("101", "cm handle(s) not ready");
+ CM_HANDLES_NOT_FOUND("100", "cm handle id(s) not found"),
+ CM_HANDLES_NOT_READY("101", "cm handle(s) not ready"),
+ DMI_SERVICE_NOT_RESPONDING("102", "dmi plugin service is not responding"),
+ UNABLE_TO_READ_RESOURCE_DATA("103", "dmi plugin service is not able to read resource data");
private final String statusCode;
private final String statusMessage;
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/AsyncRestRequestResponseEventConsumer.java
index 0ac0fb92de..0044182ddd 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/AsyncRestRequestResponseEventConsumer.java
@@ -36,7 +36,7 @@ import org.springframework.stereotype.Component;
@Slf4j
@RequiredArgsConstructor
@ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true)
-public class NcmpAsyncRequestResponseEventConsumer {
+public class AsyncRestRequestResponseEventConsumer {
private final EventsPublisher<NcmpAsyncRequestResponseEvent> eventsPublisher;
private final NcmpAsyncRequestResponseEventMapper ncmpAsyncRequestResponseEventMapper;
@@ -48,13 +48,15 @@ public class NcmpAsyncRequestResponseEventConsumer {
*/
@KafkaListener(
topics = "${app.ncmp.async-m2m.topic}",
+ filter = "includeNonCloudEventsOnly",
+ groupId = "ncmp-async-rest-request-event-group",
properties = {"spring.json.value.default.type=org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent"})
public void consumeAndForward(final DmiAsyncRequestResponseEvent dmiAsyncRequestResponseEvent) {
log.debug("Consuming event {} ...", dmiAsyncRequestResponseEvent);
-
final NcmpAsyncRequestResponseEvent ncmpAsyncRequestResponseEvent =
ncmpAsyncRequestResponseEventMapper.toNcmpAsyncEvent(dmiAsyncRequestResponseEvent);
eventsPublisher.publishEvent(ncmpAsyncRequestResponseEvent.getEventTarget(),
- ncmpAsyncRequestResponseEvent.getEventId(), ncmpAsyncRequestResponseEvent);
+ ncmpAsyncRequestResponseEvent.getEventId(),
+ ncmpAsyncRequestResponseEvent);
}
}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumer.java
index 4a0ec5c493..9649b0109d 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumer.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumer.java
@@ -37,13 +37,13 @@ import org.springframework.stereotype.Component;
@Slf4j
@RequiredArgsConstructor
@ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true)
-public class NcmpAsyncDataOperationEventConsumer {
+public class DataOperationEventConsumer {
private final EventsPublisher<CloudEvent> eventsPublisher;
/**
- * Consume the DataOperationResponseEvent published by producer to topic 'async-m2m.topic'
- * and publish the same to the client specified topic.
+ * Consume the DataOperation cloud event published by producer to topic 'async-m2m.topic'
+ * and publish the same to client specified topic.
*
* @param dataOperationEventConsumerRecord consuming event as a ConsumerRecord.
*/
@@ -51,7 +51,7 @@ public class NcmpAsyncDataOperationEventConsumer {
topics = "${app.ncmp.async-m2m.topic}",
filter = "includeDataOperationEventsOnly",
groupId = "ncmp-data-operation-event-group",
- properties = {"spring.json.value.default.type=org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent"})
+ containerFactory = "cloudEventConcurrentKafkaListenerContainerFactory")
public void consumeAndPublish(final ConsumerRecord<String, CloudEvent> dataOperationEventConsumerRecord) {
log.info("Consuming event payload {} ...", dataOperationEventConsumerRecord.value());
final String eventTarget = KafkaHeaders.getParsedKafkaHeader(
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java
deleted file mode 100644
index 76cc0c4b7b..0000000000
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2023 Nordix Foundation
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.ncmp.api.impl.async;
-
-import io.cloudevents.CloudEvent;
-import io.cloudevents.kafka.impl.KafkaHeaders;
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.kafka.listener.adapter.RecordFilterStrategy;
-
-/**
- * Data operation record filter strategy, which helps to filter the consumer records.
- *
- */
-@Configuration
-@Slf4j
-public class DataOperationRecordFilterStrategy {
-
- /**
- * Filtering the consumer records based on the eventType header, It
- * returns boolean, true means filter the consumer record and false
- * means not filter the consumer record.
- * @return boolean value.
- */
- @Bean
- public RecordFilterStrategy<String, CloudEvent> includeDataOperationEventsOnly() {
- return consumedRecord -> {
- final String eventTypeHeaderValue = KafkaHeaders.getParsedKafkaHeader(consumedRecord.headers(), "ce_type");
- if (eventTypeHeaderValue == null) {
- log.trace("No ce_type header found, possibly a legacy event (ignored)");
- return true;
- }
- return !(eventTypeHeaderValue.contains("DataOperationEvent"));
- };
- }
-}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java
new file mode 100644
index 0000000000..0404790408
--- /dev/null
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java
@@ -0,0 +1,77 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.cps.ncmp.api.impl.async;
+
+import io.cloudevents.CloudEvent;
+import io.cloudevents.kafka.impl.KafkaHeaders;
+import java.io.Serializable;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.common.header.Headers;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.kafka.listener.adapter.RecordFilterStrategy;
+
+/**
+ * Record filter strategies, which helps to filter the consumer records based on some conditions.
+ *
+ */
+@Configuration
+@Slf4j
+public class RecordFilterStrategies {
+
+ private static final boolean EXCLUDE_EVENT = true;
+
+ /**
+ * Include only DataOperation events based on the cloud event type header, It
+ * returns boolean, true means exclude the consumer record and false
+ * means include the consumer record.
+ * @return boolean value.
+ */
+ @Bean
+ public RecordFilterStrategy<String, CloudEvent> includeDataOperationEventsOnly() {
+ return consumerRecord ->
+ isNotCloudEventOfType(consumerRecord.headers(), "DataOperationEvent");
+ }
+
+ /**
+ * Includes the consumer records based on the cloud event type header, It returns boolean,
+ * true means exclude the consumer record and false means include the consumer record.
+ * It includes only the legacy events i.e. non-cloud events
+ * @return boolean value.
+ */
+ @Bean
+ public RecordFilterStrategy<String, Serializable> includeNonCloudEventsOnly() {
+ return consumerRecord -> isCloudEvent(consumerRecord.headers());
+ }
+
+ private boolean isCloudEvent(final Headers headers) {
+ return headers.lastHeader("ce_type") != null;
+ }
+
+ private boolean isNotCloudEventOfType(final Headers headers, final String requiredEventType) {
+ final String eventTypeHeaderValue = KafkaHeaders.getParsedKafkaHeader(headers, "ce_type");
+ if (eventTypeHeaderValue == null) {
+ log.trace("No ce_type header found, possibly a legacy event (ignored)");
+ return EXCLUDE_EVENT;
+ }
+ return !(eventTypeHeaderValue.contains(requiredEventType));
+ }
+}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java
index b4784f418f..8f0975f177 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/operations/DmiDataOperations.java
@@ -30,8 +30,10 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
+import org.onap.cps.ncmp.api.NcmpEventResponseCode;
import org.onap.cps.ncmp.api.impl.client.DmiRestClient;
import org.onap.cps.ncmp.api.impl.config.NcmpConfiguration;
+import org.onap.cps.ncmp.api.impl.exception.HttpClientRequestException;
import org.onap.cps.ncmp.api.impl.executor.TaskExecutor;
import org.onap.cps.ncmp.api.impl.utils.DmiServiceUrlBuilder;
import org.onap.cps.ncmp.api.impl.utils.data.operation.ResourceDataOperationRequestUtils;
@@ -43,7 +45,9 @@ import org.onap.cps.spi.exceptions.CpsException;
import org.onap.cps.utils.JsonObjectMapper;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
+import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
+import org.springframework.web.util.UriComponentsBuilder;
/**
* Operations class for DMI data.
@@ -240,14 +244,37 @@ public class DmiDataOperations extends DmiOperations {
final String dataOperationRequestBodiesAsJsonString =
jsonObjectMapper.asJsonString(dmiDataOperationRequestBodies);
TaskExecutor.executeTask(() -> dmiRestClient.postOperationWithJsonData(dataOperationResourceUrl,
- dataOperationRequestBodiesAsJsonString, READ),
+ dataOperationRequestBodiesAsJsonString, READ),
DEFAULT_ASYNC_TASK_EXECUTOR_TIMEOUT_IN_MILLISECONDS)
- .whenCompleteAsync(this::handleTaskCompletion);
+ .whenCompleteAsync((response, throwable) -> handleTaskCompletionException(throwable,
+ dataOperationResourceUrl, dmiDataOperationRequestBodies));
}
- private void handleTaskCompletion(final Object response, final Throwable throwable) {
- // TODO Need to publish an error response to client given topic.
- // Code should be implemented into https://jira.onap.org/browse/CPS-1558 (
- // NCMP : Handle non responding DMI-Plugin)
+ private void handleTaskCompletionException(final Throwable throwable,
+ final String dataOperationResourceUrl,
+ final List<DmiDataOperation> dmiDataOperationRequestBodies) {
+ if (throwable != null) {
+ final MultiValueMap<String, String> dataOperationResourceUrlParameters =
+ UriComponentsBuilder.fromUriString(dataOperationResourceUrl).build().getQueryParams();
+ final String topicName = dataOperationResourceUrlParameters.get("topic").get(0);
+ final String requestId = dataOperationResourceUrlParameters.get("requestId").get(0);
+
+ final MultiValueMap<String, Map<NcmpEventResponseCode, List<String>>>
+ cmHandleIdsPerResponseCodesPerOperationId = new LinkedMultiValueMap<>();
+
+ dmiDataOperationRequestBodies.forEach(dmiDataOperationRequestBody -> {
+ final List<String> cmHandleIds = dmiDataOperationRequestBody.getCmHandles().stream()
+ .map(CmHandle::getId).collect(Collectors.toList());
+ if (throwable.getCause() instanceof HttpClientRequestException) {
+ cmHandleIdsPerResponseCodesPerOperationId.add(dmiDataOperationRequestBody.getOperationId(),
+ Map.of(NcmpEventResponseCode.UNABLE_TO_READ_RESOURCE_DATA, cmHandleIds));
+ } else {
+ cmHandleIdsPerResponseCodesPerOperationId.add(dmiDataOperationRequestBody.getOperationId(),
+ Map.of(NcmpEventResponseCode.DMI_SERVICE_NOT_RESPONDING, cmHandleIds));
+ }
+ });
+ ResourceDataOperationRequestUtils.publishErrorMessageToClientTopic(topicName, requestId,
+ cmHandleIdsPerResponseCodesPerOperationId);
+ }
}
}
diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java
index 957f48a862..d8fb904f21 100644
--- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java
+++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/utils/data/operation/ResourceDataOperationRequestUtils.java
@@ -68,7 +68,7 @@ public class ResourceDataOperationRequestUtils {
final Collection<YangModelCmHandle> yangModelCmHandles) {
final Map<String, List<DmiDataOperation>> dmiDataOperationsOutPerDmiServiceName = new HashMap<>();
- final MultiValueMap<String, Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerOperationIdPerResponseCode
+ final MultiValueMap<String, Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerResponseCodesPerOperationId
= new LinkedMultiValueMap<>();
final Set<String> nonReadyCmHandleIdsLookup = filterAndGetNonReadyCmHandleIds(yangModelCmHandles);
@@ -100,25 +100,34 @@ public class ResourceDataOperationRequestUtils {
}
}
}
- populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerOperationIdPerResponseCode,
- dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CODE_100, nonExistingCmHandleIds);
- populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerOperationIdPerResponseCode,
- dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CODE_101, nonReadyCmHandleIds);
+ populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerResponseCodesPerOperationId,
+ dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CM_HANDLES_NOT_FOUND,
+ nonExistingCmHandleIds);
+ populateCmHandleIdsPerOperationIdPerResponseCode(cmHandleIdsPerResponseCodesPerOperationId,
+ dataOperationDefinitionIn.getOperationId(), NcmpEventResponseCode.CM_HANDLES_NOT_READY,
+ nonReadyCmHandleIds);
}
- if (!cmHandleIdsPerOperationIdPerResponseCode.isEmpty()) {
- publishErrorMessageToClientTopic(topicParamInQuery, requestId, cmHandleIdsPerOperationIdPerResponseCode);
+ if (!cmHandleIdsPerResponseCodesPerOperationId.isEmpty()) {
+ publishErrorMessageToClientTopic(topicParamInQuery, requestId, cmHandleIdsPerResponseCodesPerOperationId);
}
return dmiDataOperationsOutPerDmiServiceName;
}
+ /**
+ * Creates data operation cloud event and publish it to client topic.
+ *
+ * @param clientTopic client given topic
+ * @param requestId unique identifier per request
+ * @param cmHandleIdsPerResponseCodesPerOperationId list of cm handle ids per operation id with response code
+ */
@Async
- private static void publishErrorMessageToClientTopic(final String clientTopic,
+ public static void publishErrorMessageToClientTopic(final String clientTopic,
final String requestId,
final MultiValueMap<String,
Map<NcmpEventResponseCode, List<String>>>
- cmHandleIdsPerOperationIdPerResponseCode) {
+ cmHandleIdsPerResponseCodesPerOperationId) {
final CloudEvent dataOperationCloudEvent = DataOperationEventCreator.createDataOperationEvent(clientTopic,
- requestId, cmHandleIdsPerOperationIdPerResponseCode);
+ requestId, cmHandleIdsPerResponseCodesPerOperationId);
final EventsPublisher<CloudEvent> eventsPublisher = CpsApplicationContext.getCpsBean(EventsPublisher.class);
eventsPublisher.publishCloudEvent(clientTopic, requestId, dataOperationCloudEvent);
}
@@ -166,13 +175,13 @@ public class ResourceDataOperationRequestUtils {
}
private static void populateCmHandleIdsPerOperationIdPerResponseCode(final MultiValueMap<String,
- Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerOperationIdByResponseCode,
+ Map<NcmpEventResponseCode, List<String>>> cmHandleIdsPerResponseCodesPerOperationId,
final String operationId,
final NcmpEventResponseCode
ncmpEventResponseCode,
final List<String> cmHandleIds) {
if (!cmHandleIds.isEmpty()) {
- cmHandleIdsPerOperationIdByResponseCode.add(operationId, Map.of(ncmpEventResponseCode, cmHandleIds));
+ cmHandleIdsPerResponseCodesPerOperationId.add(operationId, Map.of(ncmpEventResponseCode, cmHandleIds));
}
}
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy
index fe7b3f11cb..17df7b0ba6 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy
@@ -37,7 +37,7 @@ import org.springframework.test.annotation.DirtiesContext
import org.testcontainers.spock.Testcontainers
import java.time.Duration
-@SpringBootTest(classes = [EventsPublisher, NcmpAsyncRequestResponseEventConsumer, ObjectMapper, JsonObjectMapper])
+@SpringBootTest(classes = [EventsPublisher, AsyncRestRequestResponseEventConsumer, ObjectMapper, JsonObjectMapper])
@Testcontainers
@DirtiesContext
class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBaseSpec {
@@ -52,8 +52,8 @@ class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBase
Mappers.getMapper(NcmpAsyncRequestResponseEventMapper.class)
@SpringBean
- NcmpAsyncRequestResponseEventConsumer ncmpAsyncRequestResponseEventConsumer =
- new NcmpAsyncRequestResponseEventConsumer(cpsAsyncRequestResponseEventPublisher,
+ AsyncRestRequestResponseEventConsumer ncmpAsyncRequestResponseEventConsumer =
+ new AsyncRestRequestResponseEventConsumer(cpsAsyncRequestResponseEventPublisher,
ncmpAsyncRequestResponseEventMapper)
@Autowired
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumerSpec.groovy
index 6353288713..7b54f591a1 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumerSpec.groovy
@@ -44,16 +44,16 @@ import org.springframework.test.annotation.DirtiesContext
import org.testcontainers.spock.Testcontainers
import java.time.Duration
-@SpringBootTest(classes = [EventsPublisher, NcmpAsyncDataOperationEventConsumer, DataOperationRecordFilterStrategy,JsonObjectMapper, ObjectMapper])
+@SpringBootTest(classes = [EventsPublisher, DataOperationEventConsumer, RecordFilterStrategies,JsonObjectMapper, ObjectMapper])
@Testcontainers
@DirtiesContext
-class NcmpAsyncDataOperationEventConsumerSpec extends MessagingBaseSpec {
+class DataOperationEventConsumerSpec extends MessagingBaseSpec {
@SpringBean
EventsPublisher asyncDataOperationEventPublisher = new EventsPublisher<CloudEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate)
@SpringBean
- NcmpAsyncDataOperationEventConsumer objectUnderTest = new NcmpAsyncDataOperationEventConsumer(asyncDataOperationEventPublisher)
+ DataOperationEventConsumer objectUnderTest = new DataOperationEventConsumer(asyncDataOperationEventPublisher)
@Autowired
JsonObjectMapper jsonObjectMapper
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy
index f577f55ba2..43d06483aa 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerIntegrationSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy
@@ -20,14 +20,11 @@
package org.onap.cps.ncmp.api.impl.async
-import io.cloudevents.CloudEvent
import io.cloudevents.core.builder.CloudEventBuilder
-import io.cloudevents.kafka.CloudEventSerializer
-import org.apache.kafka.clients.producer.KafkaProducer
-import org.apache.kafka.clients.producer.ProducerRecord
-import org.apache.kafka.common.serialization.StringSerializer
+import org.onap.cps.ncmp.api.impl.config.kafka.KafkaConfig
import org.onap.cps.ncmp.api.impl.events.EventsPublisher
import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec
+import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent
import org.spockframework.spring.SpringBean
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.beans.factory.annotation.Value
@@ -39,15 +36,18 @@ import org.springframework.test.annotation.DirtiesContext
import org.testcontainers.spock.Testcontainers
import java.util.concurrent.TimeUnit
-@SpringBootTest(classes =[NcmpAsyncDataOperationEventConsumer, DataOperationRecordFilterStrategy])
+@SpringBootTest(classes =[DataOperationEventConsumer, AsyncRestRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig])
@DirtiesContext
@Testcontainers
@EnableAutoConfiguration
-class NcmpAsyncDataOperationEventConsumerIntegrationSpec extends MessagingBaseSpec {
+class FilterStrategiesIntegrationSpec extends MessagingBaseSpec {
@SpringBean
EventsPublisher mockEventsPublisher = Mock()
+ @SpringBean
+ NcmpAsyncRequestResponseEventMapper mapper = Stub()
+
@Autowired
private KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry
@@ -58,16 +58,39 @@ class NcmpAsyncDataOperationEventConsumerIntegrationSpec extends MessagingBaseSp
activateListeners()
}
+ def 'Legacy event consumer with cloud event.'() {
+ given: 'a cloud event of type: #eventType'
+ def cloudEvent = CloudEventBuilder.v1().withId('some id')
+ .withType('DataOperationEvent')
+ .withSource(URI.create('some-source'))
+ .build()
+ when: 'send the cloud event'
+ cloudEventKafkaTemplate.send(topic, cloudEvent)
+ and: 'wait a little for async processing of message'
+ TimeUnit.MILLISECONDS.sleep(300)
+ then: 'event is not consumed'
+ 0 * mockEventsPublisher.publishEvent(*_)
+ }
+
+ def 'Legacy event consumer with valid legacy event.'() {
+ given: 'a cloud event of type: #eventType'
+ DmiAsyncRequestResponseEvent legacyEvent = new DmiAsyncRequestResponseEvent(eventId:'legacyEventId', eventTarget:'legacyEventTarget')
+ when: 'send the cloud event'
+ legacyEventKafkaTemplate.send(topic, legacyEvent)
+ and: 'wait a little for async processing of message'
+ TimeUnit.MILLISECONDS.sleep(300)
+ then: 'the event is consumed by the (legacy) AsynRestRequest consumer'
+ 1 * mockEventsPublisher.publishEvent(*_)
+ }
+
def 'Filtering Cloud Events on Type.'() {
given: 'a cloud event of type: #eventType'
def cloudEvent = CloudEventBuilder.v1().withId('some id')
- .withType(eventType)
- .withSource(URI.create('some-source'))
- .build()
+ .withType(eventType)
+ .withSource(URI.create('some-source'))
+ .build()
when: 'send the cloud event'
- ProducerRecord<String, CloudEvent> record = new ProducerRecord<>(topic, cloudEvent)
- KafkaProducer<String, CloudEvent> producer = new KafkaProducer<>(eventProducerConfigProperties(CloudEventSerializer))
- producer.send(record)
+ cloudEventKafkaTemplate.send(topic, cloudEvent)
and: 'wait a little for async processing of message'
TimeUnit.MILLISECONDS.sleep(300)
then: 'the event has only been forwarded for the correct type'
@@ -79,11 +102,11 @@ class NcmpAsyncDataOperationEventConsumerIntegrationSpec extends MessagingBaseSp
'any type contain the word "DataOperationEvent"' || 1
}
+ //TODO Toine, add positive test with data to prove event is converted correctly (using correct factory)
+
def 'Non cloud events on same Topic.'() {
when: 'sending a non-cloud event on the same topic'
- ProducerRecord<String, String> record = new ProducerRecord<>(topic, 'simple string event')
- KafkaProducer<String, String> producer = new KafkaProducer<>(eventProducerConfigProperties(StringSerializer))
- producer.send(record)
+ legacyEventKafkaTemplate.send(topic, 'simple string event')
and: 'wait a little for async processing of message'
TimeUnit.MILLISECONDS.sleep(300)
then: 'the event is not processed by this consumer'
@@ -95,5 +118,4 @@ class NcmpAsyncDataOperationEventConsumerIntegrationSpec extends MessagingBaseSp
messageListenerContainer -> { ContainerTestUtils.waitForAssignment(messageListenerContainer, 1) }
)
}
-
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy
index 4c6880421b..223c92f373 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy
@@ -38,7 +38,7 @@ import org.testcontainers.spock.Testcontainers
import java.time.Duration
-@SpringBootTest(classes = [EventsPublisher, ObjectMapper, JsonObjectMapper])
+@SpringBootTest(classes = [ObjectMapper, JsonObjectMapper])
@Testcontainers
@DirtiesContext
class LcmEventsPublisherSpec extends MessagingBaseSpec {
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
index 59e62e34d0..3f40f430f3 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/operations/DmiDataOperationsSpec.groovy
@@ -22,12 +22,16 @@
package org.onap.cps.ncmp.api.impl.operations
import com.fasterxml.jackson.databind.ObjectMapper
+import io.cloudevents.core.CloudEventUtils
+import io.cloudevents.jackson.PojoCloudEventDataMapper
+import org.onap.cps.ncmp.api.NcmpEventResponseCode
import org.onap.cps.ncmp.api.impl.config.NcmpConfiguration
import org.onap.cps.ncmp.api.impl.events.EventsPublisher
+import org.onap.cps.ncmp.api.impl.exception.HttpClientRequestException
import org.onap.cps.ncmp.api.impl.utils.DmiServiceUrlBuilder
import org.onap.cps.ncmp.api.impl.utils.context.CpsApplicationContext
import org.onap.cps.ncmp.api.models.DataOperationRequest
-import org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent
+import org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent
import org.onap.cps.ncmp.utils.TestUtils
import org.onap.cps.utils.JsonObjectMapper
import org.spockframework.spring.SpringBean
@@ -37,6 +41,7 @@ import org.springframework.http.ResponseEntity
import org.springframework.test.context.ContextConfiguration
import org.springframework.http.HttpStatus
import spock.lang.Shared
+import java.util.concurrent.TimeoutException
import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_OPERATIONAL
import static org.onap.cps.ncmp.api.impl.operations.DatastoreType.PASSTHROUGH_RUNNING
@@ -110,6 +115,28 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec {
assert requestBodyAsJsonStringArg == expectedBatchRequestAsJson
}
+ def 'Execute (async) data operation from DMI service for #scenario.'() {
+ given: 'data operation request body and dmi resource url'
+ def dmiDataOperation = DmiDataOperation.builder().operationId('some-operation-id').build()
+ dmiDataOperation.getCmHandles().add(CmHandle.builder().id('some-cm-handle-id').build())
+ def dmiDataOperationResourceDataUrl = "http://dmi-service-name:dmi-port/dmi/v1/data?topic=my-topic-name&requestId=some-request-id"
+ def actualDataOperationCloudEvent = null
+ when: 'exception occurs after sending request to dmi service'
+ objectUnderTest.handleTaskCompletionException(new Throwable(exception), dmiDataOperationResourceDataUrl, List.of(dmiDataOperation))
+ then: 'a cloud event is published'
+ eventsPublisher.publishCloudEvent('my-topic-name', 'some-request-id', _) >> { args -> actualDataOperationCloudEvent = args[2] }
+ and: 'the event contains the expected error details'
+ def eventDataValue = extractDataValue(actualDataOperationCloudEvent)
+ assert eventDataValue.operationId == dmiDataOperation.operationId
+ assert eventDataValue.ids == dmiDataOperation.cmHandles.id
+ assert eventDataValue.statusCode == responseCode.statusCode
+ assert eventDataValue.statusMessage == responseCode.statusMessage
+ where: 'the following exceptions are occurred'
+ scenario | exception || responseCode
+ 'http client request exception' | new HttpClientRequestException('error-message', 'error-details', HttpStatus.SERVICE_UNAVAILABLE.value()) || NcmpEventResponseCode.UNABLE_TO_READ_RESOURCE_DATA
+ 'timeout exception' | new TimeoutException() || NcmpEventResponseCode.DMI_SERVICE_NOT_RESPONDING
+ }
+
def 'call get all resource data.'() {
given: 'the system returns a cm handle with a sample property'
mockYangModelCmHandleRetrieval([yangModelCmHandleProperty])
@@ -142,4 +169,8 @@ class DmiDataOperationsSpec extends DmiOperationsBaseSpec {
CREATE || 'create'
UPDATE || 'update'
}
+
+ def extractDataValue(actualDataOperationCloudEvent) {
+ return CloudEventUtils.mapData(actualDataOperationCloudEvent, PojoCloudEventDataMapper.from(new ObjectMapper(), DataOperationEvent.class)).getValue().data.responses[0]
+ }
}
diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy
index 603b8cdda6..0356c3fcdc 100644
--- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy
+++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy
@@ -24,7 +24,6 @@ import io.cloudevents.CloudEvent
import io.cloudevents.kafka.CloudEventSerializer
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.kafka.common.serialization.StringSerializer
-import org.spockframework.spring.SpringBean
import org.springframework.kafka.core.DefaultKafkaProducerFactory
import org.springframework.kafka.core.KafkaTemplate
import org.springframework.kafka.support.serializer.JsonSerializer
@@ -46,11 +45,9 @@ class MessagingBaseSpec extends Specification {
static kafkaTestContainer = new KafkaContainer(DockerImageName.parse('registry.nordix.org/onaptest/confluentinc/cp-kafka:6.2.1').asCompatibleSubstituteFor('confluentinc/cp-kafka'))
- @SpringBean
- KafkaTemplate legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<Integer, String>(eventProducerConfigProperties(JsonSerializer)))
+ def legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, String>(eventProducerConfigProperties(JsonSerializer)))
- @SpringBean
- KafkaTemplate cloudEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, CloudEvent>(eventProducerConfigProperties(CloudEventSerializer)))
+ def cloudEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory<String, CloudEvent>(eventProducerConfigProperties(CloudEventSerializer)))
@DynamicPropertySource
static void registerKafkaProperties(DynamicPropertyRegistry dynamicPropertyRegistry) {
diff --git a/cps-ncmp-service/src/test/resources/application.yml b/cps-ncmp-service/src/test/resources/application.yml
index df34f844d2..edbd7022f2 100644
--- a/cps-ncmp-service/src/test/resources/application.yml
+++ b/cps-ncmp-service/src/test/resources/application.yml
@@ -21,6 +21,7 @@ spring:
producer:
value-serializer: io.cloudevents.kafka.CloudEventSerializer
consumer:
+ value-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer
properties:
spring.deserializer.value.delegate.class: io.cloudevents.kafka.CloudEventDeserializer
@@ -54,4 +55,4 @@ hazelcast:
mode:
kubernetes:
enabled: false
- service-name: "cps-and-ncmp-service" \ No newline at end of file
+ service-name: "cps-and-ncmp-service"