From ffe0e1ee121e2ca89f82824f193ed0a0e599a2c0 Mon Sep 17 00:00:00 2001 From: "raviteja.karumuri" Date: Thu, 29 Jun 2023 13:57:40 +0100 Subject: Updating the Kafka listener compliance to could events and legacy # Added filter property to the legay(DMIAsync) Consumer # Added containerFactory property to the CloudEvent(DataOperation) consumer to use 'cloudEventDeserializtion' Issue-ID: CPS-1746 Signed-off-by: raviteja.karumuri Change-Id: I73315b816eb6446b7e163fd8779bc35fd6cbef63 --- .../AsyncRestRequestResponseEventConsumer.java | 62 ++++++++++ .../api/impl/async/DataOperationEventConsumer.java | 63 ++++++++++ .../async/DataOperationRecordFilterStrategy.java | 55 --------- .../async/NcmpAsyncDataOperationEventConsumer.java | 63 ---------- .../NcmpAsyncRequestResponseEventConsumer.java | 60 --------- .../api/impl/async/RecordFilterStrategies.java | 77 ++++++++++++ ...AsyncRequestResponseEventIntegrationSpec.groovy | 6 +- .../async/DataOperationEventConsumerSpec.groovy | 136 +++++++++++++++++++++ .../async/FilterStrategiesIntegrationSpec.groovy | 121 ++++++++++++++++++ ...ataOperationEventConsumerIntegrationSpec.groovy | 99 --------------- .../NcmpAsyncDataOperationEventConsumerSpec.groovy | 136 --------------------- .../impl/events/lcm/LcmEventsPublisherSpec.groovy | 2 +- .../cps/ncmp/api/kafka/MessagingBaseSpec.groovy | 7 +- .../src/test/resources/application.yml | 3 +- 14 files changed, 467 insertions(+), 423 deletions(-) create mode 100644 cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/AsyncRestRequestResponseEventConsumer.java create mode 100644 cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumer.java delete mode 100644 cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java delete mode 100644 cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumer.java delete mode 100644 cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java create mode 100644 cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java create mode 100644 cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumerSpec.groovy create mode 100644 cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy delete mode 100644 cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerIntegrationSpec.groovy delete mode 100644 cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerSpec.groovy diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/AsyncRestRequestResponseEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/AsyncRestRequestResponseEventConsumer.java new file mode 100644 index 000000000..0044182dd --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/AsyncRestRequestResponseEventConsumer.java @@ -0,0 +1,62 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (c) 2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.impl.async; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.ncmp.api.impl.events.EventsPublisher; +import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent; +import org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.stereotype.Component; + +/** + * Listener for cps-ncmp async request response events. + */ +@Component +@Slf4j +@RequiredArgsConstructor +@ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true) +public class AsyncRestRequestResponseEventConsumer { + + private final EventsPublisher eventsPublisher; + private final NcmpAsyncRequestResponseEventMapper ncmpAsyncRequestResponseEventMapper; + + /** + * Consume the specified event. + * + * @param dmiAsyncRequestResponseEvent the event to be consumed and produced. + */ + @KafkaListener( + topics = "${app.ncmp.async-m2m.topic}", + filter = "includeNonCloudEventsOnly", + groupId = "ncmp-async-rest-request-event-group", + properties = {"spring.json.value.default.type=org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent"}) + public void consumeAndForward(final DmiAsyncRequestResponseEvent dmiAsyncRequestResponseEvent) { + log.debug("Consuming event {} ...", dmiAsyncRequestResponseEvent); + final NcmpAsyncRequestResponseEvent ncmpAsyncRequestResponseEvent = + ncmpAsyncRequestResponseEventMapper.toNcmpAsyncEvent(dmiAsyncRequestResponseEvent); + eventsPublisher.publishEvent(ncmpAsyncRequestResponseEvent.getEventTarget(), + ncmpAsyncRequestResponseEvent.getEventId(), + ncmpAsyncRequestResponseEvent); + } +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumer.java new file mode 100644 index 000000000..9649b0109 --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumer.java @@ -0,0 +1,63 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.impl.async; + +import io.cloudevents.CloudEvent; +import io.cloudevents.kafka.impl.KafkaHeaders; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.onap.cps.ncmp.api.impl.events.EventsPublisher; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.stereotype.Component; + +/** + * Listener for cps-ncmp async data operation events. + */ +@Component +@Slf4j +@RequiredArgsConstructor +@ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true) +public class DataOperationEventConsumer { + + private final EventsPublisher eventsPublisher; + + /** + * Consume the DataOperation cloud event published by producer to topic 'async-m2m.topic' + * and publish the same to client specified topic. + * + * @param dataOperationEventConsumerRecord consuming event as a ConsumerRecord. + */ + @KafkaListener( + topics = "${app.ncmp.async-m2m.topic}", + filter = "includeDataOperationEventsOnly", + groupId = "ncmp-data-operation-event-group", + containerFactory = "cloudEventConcurrentKafkaListenerContainerFactory") + public void consumeAndPublish(final ConsumerRecord dataOperationEventConsumerRecord) { + log.info("Consuming event payload {} ...", dataOperationEventConsumerRecord.value()); + final String eventTarget = KafkaHeaders.getParsedKafkaHeader( + dataOperationEventConsumerRecord.headers(), "ce_destination"); + final String eventId = KafkaHeaders.getParsedKafkaHeader( + dataOperationEventConsumerRecord.headers(), "ce_id"); + eventsPublisher.publishCloudEvent(eventTarget, eventId, dataOperationEventConsumerRecord.value()); + } +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java deleted file mode 100644 index 76cc0c4b7..000000000 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/DataOperationRecordFilterStrategy.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.ncmp.api.impl.async; - -import io.cloudevents.CloudEvent; -import io.cloudevents.kafka.impl.KafkaHeaders; -import lombok.extern.slf4j.Slf4j; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.kafka.listener.adapter.RecordFilterStrategy; - -/** - * Data operation record filter strategy, which helps to filter the consumer records. - * - */ -@Configuration -@Slf4j -public class DataOperationRecordFilterStrategy { - - /** - * Filtering the consumer records based on the eventType header, It - * returns boolean, true means filter the consumer record and false - * means not filter the consumer record. - * @return boolean value. - */ - @Bean - public RecordFilterStrategy includeDataOperationEventsOnly() { - return consumedRecord -> { - final String eventTypeHeaderValue = KafkaHeaders.getParsedKafkaHeader(consumedRecord.headers(), "ce_type"); - if (eventTypeHeaderValue == null) { - log.trace("No ce_type header found, possibly a legacy event (ignored)"); - return true; - } - return !(eventTypeHeaderValue.contains("DataOperationEvent")); - }; - } -} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumer.java deleted file mode 100644 index 4a0ec5c49..000000000 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumer.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.ncmp.api.impl.async; - -import io.cloudevents.CloudEvent; -import io.cloudevents.kafka.impl.KafkaHeaders; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.onap.cps.ncmp.api.impl.events.EventsPublisher; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.kafka.annotation.KafkaListener; -import org.springframework.stereotype.Component; - -/** - * Listener for cps-ncmp async data operation events. - */ -@Component -@Slf4j -@RequiredArgsConstructor -@ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true) -public class NcmpAsyncDataOperationEventConsumer { - - private final EventsPublisher eventsPublisher; - - /** - * Consume the DataOperationResponseEvent published by producer to topic 'async-m2m.topic' - * and publish the same to the client specified topic. - * - * @param dataOperationEventConsumerRecord consuming event as a ConsumerRecord. - */ - @KafkaListener( - topics = "${app.ncmp.async-m2m.topic}", - filter = "includeDataOperationEventsOnly", - groupId = "ncmp-data-operation-event-group", - properties = {"spring.json.value.default.type=org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent"}) - public void consumeAndPublish(final ConsumerRecord dataOperationEventConsumerRecord) { - log.info("Consuming event payload {} ...", dataOperationEventConsumerRecord.value()); - final String eventTarget = KafkaHeaders.getParsedKafkaHeader( - dataOperationEventConsumerRecord.headers(), "ce_destination"); - final String eventId = KafkaHeaders.getParsedKafkaHeader( - dataOperationEventConsumerRecord.headers(), "ce_id"); - eventsPublisher.publishCloudEvent(eventTarget, eventId, dataOperationEventConsumerRecord.value()); - } -} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java deleted file mode 100644 index 0ac0fb92d..000000000 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (c) 2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.ncmp.api.impl.async; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.onap.cps.ncmp.api.impl.events.EventsPublisher; -import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent; -import org.onap.cps.ncmp.event.model.NcmpAsyncRequestResponseEvent; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.kafka.annotation.KafkaListener; -import org.springframework.stereotype.Component; - -/** - * Listener for cps-ncmp async request response events. - */ -@Component -@Slf4j -@RequiredArgsConstructor -@ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true) -public class NcmpAsyncRequestResponseEventConsumer { - - private final EventsPublisher eventsPublisher; - private final NcmpAsyncRequestResponseEventMapper ncmpAsyncRequestResponseEventMapper; - - /** - * Consume the specified event. - * - * @param dmiAsyncRequestResponseEvent the event to be consumed and produced. - */ - @KafkaListener( - topics = "${app.ncmp.async-m2m.topic}", - properties = {"spring.json.value.default.type=org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent"}) - public void consumeAndForward(final DmiAsyncRequestResponseEvent dmiAsyncRequestResponseEvent) { - log.debug("Consuming event {} ...", dmiAsyncRequestResponseEvent); - - final NcmpAsyncRequestResponseEvent ncmpAsyncRequestResponseEvent = - ncmpAsyncRequestResponseEventMapper.toNcmpAsyncEvent(dmiAsyncRequestResponseEvent); - eventsPublisher.publishEvent(ncmpAsyncRequestResponseEvent.getEventTarget(), - ncmpAsyncRequestResponseEvent.getEventId(), ncmpAsyncRequestResponseEvent); - } -} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java new file mode 100644 index 000000000..040479040 --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/RecordFilterStrategies.java @@ -0,0 +1,77 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.impl.async; + +import io.cloudevents.CloudEvent; +import io.cloudevents.kafka.impl.KafkaHeaders; +import java.io.Serializable; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.common.header.Headers; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.kafka.listener.adapter.RecordFilterStrategy; + +/** + * Record filter strategies, which helps to filter the consumer records based on some conditions. + * + */ +@Configuration +@Slf4j +public class RecordFilterStrategies { + + private static final boolean EXCLUDE_EVENT = true; + + /** + * Include only DataOperation events based on the cloud event type header, It + * returns boolean, true means exclude the consumer record and false + * means include the consumer record. + * @return boolean value. + */ + @Bean + public RecordFilterStrategy includeDataOperationEventsOnly() { + return consumerRecord -> + isNotCloudEventOfType(consumerRecord.headers(), "DataOperationEvent"); + } + + /** + * Includes the consumer records based on the cloud event type header, It returns boolean, + * true means exclude the consumer record and false means include the consumer record. + * It includes only the legacy events i.e. non-cloud events + * @return boolean value. + */ + @Bean + public RecordFilterStrategy includeNonCloudEventsOnly() { + return consumerRecord -> isCloudEvent(consumerRecord.headers()); + } + + private boolean isCloudEvent(final Headers headers) { + return headers.lastHeader("ce_type") != null; + } + + private boolean isNotCloudEventOfType(final Headers headers, final String requiredEventType) { + final String eventTypeHeaderValue = KafkaHeaders.getParsedKafkaHeader(headers, "ce_type"); + if (eventTypeHeaderValue == null) { + log.trace("No ce_type header found, possibly a legacy event (ignored)"); + return EXCLUDE_EVENT; + } + return !(eventTypeHeaderValue.contains(requiredEventType)); + } +} diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy index fe7b3f11c..17df7b0ba 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/CpsAsyncRequestResponseEventIntegrationSpec.groovy @@ -37,7 +37,7 @@ import org.springframework.test.annotation.DirtiesContext import org.testcontainers.spock.Testcontainers import java.time.Duration -@SpringBootTest(classes = [EventsPublisher, NcmpAsyncRequestResponseEventConsumer, ObjectMapper, JsonObjectMapper]) +@SpringBootTest(classes = [EventsPublisher, AsyncRestRequestResponseEventConsumer, ObjectMapper, JsonObjectMapper]) @Testcontainers @DirtiesContext class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBaseSpec { @@ -52,8 +52,8 @@ class NcmpAsyncRequestResponseEventProducerIntegrationSpec extends MessagingBase Mappers.getMapper(NcmpAsyncRequestResponseEventMapper.class) @SpringBean - NcmpAsyncRequestResponseEventConsumer ncmpAsyncRequestResponseEventConsumer = - new NcmpAsyncRequestResponseEventConsumer(cpsAsyncRequestResponseEventPublisher, + AsyncRestRequestResponseEventConsumer ncmpAsyncRequestResponseEventConsumer = + new AsyncRestRequestResponseEventConsumer(cpsAsyncRequestResponseEventPublisher, ncmpAsyncRequestResponseEventMapper) @Autowired diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumerSpec.groovy new file mode 100644 index 000000000..7b54f591a --- /dev/null +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/DataOperationEventConsumerSpec.groovy @@ -0,0 +1,136 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.impl.async + +import com.fasterxml.jackson.databind.ObjectMapper +import io.cloudevents.CloudEvent +import io.cloudevents.kafka.CloudEventDeserializer +import io.cloudevents.kafka.CloudEventSerializer +import io.cloudevents.kafka.impl.KafkaHeaders +import io.cloudevents.core.CloudEventUtils +import io.cloudevents.core.builder.CloudEventBuilder +import io.cloudevents.jackson.PojoCloudEventDataMapper +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.clients.consumer.KafkaConsumer +import org.apache.kafka.common.header.internals.RecordHeaders +import org.onap.cps.ncmp.api.impl.events.EventsPublisher +import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec +import org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent +import org.onap.cps.ncmp.utils.TestUtils +import org.onap.cps.utils.JsonObjectMapper +import org.spockframework.spring.SpringBean +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.boot.test.context.SpringBootTest +import org.springframework.kafka.listener.adapter.RecordFilterStrategy +import org.springframework.test.annotation.DirtiesContext +import org.testcontainers.spock.Testcontainers +import java.time.Duration + +@SpringBootTest(classes = [EventsPublisher, DataOperationEventConsumer, RecordFilterStrategies,JsonObjectMapper, ObjectMapper]) +@Testcontainers +@DirtiesContext +class DataOperationEventConsumerSpec extends MessagingBaseSpec { + + @SpringBean + EventsPublisher asyncDataOperationEventPublisher = new EventsPublisher(legacyEventKafkaTemplate, cloudEventKafkaTemplate) + + @SpringBean + DataOperationEventConsumer objectUnderTest = new DataOperationEventConsumer(asyncDataOperationEventPublisher) + + @Autowired + JsonObjectMapper jsonObjectMapper + + @Autowired + RecordFilterStrategy dataOperationRecordFilterStrategy + + @Autowired + ObjectMapper objectMapper + + def cloudEventKafkaConsumer = new KafkaConsumer<>(eventConsumerConfigProperties('test', CloudEventDeserializer)) + def static clientTopic = 'client-topic' + def static dataOperationType = 'org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent' + + def 'Consume and publish event to client specified topic'() { + given: 'consumer subscribing to client topic' + cloudEventKafkaConsumer.subscribe([clientTopic]) + and: 'consumer record for data operation event' + def consumerRecordIn = createConsumerRecord(dataOperationType) + when: 'the data operation event is consumed and published to client specified topic' + objectUnderTest.consumeAndPublish(consumerRecordIn) + and: 'the client specified topic is polled' + def consumerRecordOut = cloudEventKafkaConsumer.poll(Duration.ofMillis(1500))[0] + then: 'verify cloud compliant headers' + def consumerRecordOutHeaders = consumerRecordOut.headers() + assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_correlationid') == 'request-id' + assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_id') == 'some-uuid' + assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_type') == dataOperationType + and: 'verify that extension is included into header' + assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_destination') == clientTopic + and: 'map consumer record to expected event type' + def dataOperationResponseEvent = CloudEventUtils.mapData(consumerRecordOut.value(), + PojoCloudEventDataMapper.from(objectMapper, DataOperationEvent.class)).getValue() + and: 'verify published response data properties' + def response = dataOperationResponseEvent.data.responses[0] + response.operationId == 'some-operation-id' + response.statusCode == 'any-success-status-code' + response.statusMessage == 'Successfully applied changes' + response.result as String == '[some-key:some-value]' + } + + def 'Filter an event with type #eventType'() { + given: 'consumer record for event with type #eventType' + def consumerRecord = createConsumerRecord(eventType) + when: 'while consuming the topic ncmp-async-m2m it executes the filter strategy' + def result = dataOperationRecordFilterStrategy.filter(consumerRecord) + then: 'the event is #description' + assert result == expectedResult + where: 'filter the event based on the eventType #eventType' + description | eventType || expectedResult + 'not filtered(the consumer will see the event)' | dataOperationType || false + 'filtered(the consumer will not see the event)' | 'wrongType' || true + } + + def createConsumerRecord(eventTypeAsString) { + def jsonData = TestUtils.getResourceFileContent('dataOperationEvent.json') + def testEventSentAsBytes = jsonObjectMapper.asJsonBytes(jsonObjectMapper.convertJsonString(jsonData, DataOperationEvent.class)) + + CloudEvent cloudEvent = getCloudEvent(eventTypeAsString, testEventSentAsBytes) + + def headers = new RecordHeaders() + def cloudEventSerializer = new CloudEventSerializer() + cloudEventSerializer.serialize(clientTopic, headers, cloudEvent) + + def consumerRecord = new ConsumerRecord(clientTopic, 0, 0L, 'sample-message-key', cloudEvent) + headers.forEach(header -> consumerRecord.headers().add(header)) + return consumerRecord + } + + def getCloudEvent(eventTypeAsString, byte[] testEventSentAsBytes) { + return CloudEventBuilder.v1() + .withId("some-uuid") + .withType(eventTypeAsString) + .withSource(URI.create("sample-test-source")) + .withData(testEventSentAsBytes) + .withExtension("correlationid", "request-id") + .withExtension("destination", clientTopic) + .build(); + } +} diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy new file mode 100644 index 000000000..43d06483a --- /dev/null +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/FilterStrategiesIntegrationSpec.groovy @@ -0,0 +1,121 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (c) 2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.impl.async + +import io.cloudevents.core.builder.CloudEventBuilder +import org.onap.cps.ncmp.api.impl.config.kafka.KafkaConfig +import org.onap.cps.ncmp.api.impl.events.EventsPublisher +import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec +import org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent +import org.spockframework.spring.SpringBean +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.beans.factory.annotation.Value +import org.springframework.boot.autoconfigure.EnableAutoConfiguration +import org.springframework.boot.test.context.SpringBootTest +import org.springframework.kafka.config.KafkaListenerEndpointRegistry +import org.springframework.kafka.test.utils.ContainerTestUtils +import org.springframework.test.annotation.DirtiesContext +import org.testcontainers.spock.Testcontainers +import java.util.concurrent.TimeUnit + +@SpringBootTest(classes =[DataOperationEventConsumer, AsyncRestRequestResponseEventConsumer, RecordFilterStrategies, KafkaConfig]) +@DirtiesContext +@Testcontainers +@EnableAutoConfiguration +class FilterStrategiesIntegrationSpec extends MessagingBaseSpec { + + @SpringBean + EventsPublisher mockEventsPublisher = Mock() + + @SpringBean + NcmpAsyncRequestResponseEventMapper mapper = Stub() + + @Autowired + private KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry + + @Value('${app.ncmp.async-m2m.topic}') + def topic + + def setup() { + activateListeners() + } + + def 'Legacy event consumer with cloud event.'() { + given: 'a cloud event of type: #eventType' + def cloudEvent = CloudEventBuilder.v1().withId('some id') + .withType('DataOperationEvent') + .withSource(URI.create('some-source')) + .build() + when: 'send the cloud event' + cloudEventKafkaTemplate.send(topic, cloudEvent) + and: 'wait a little for async processing of message' + TimeUnit.MILLISECONDS.sleep(300) + then: 'event is not consumed' + 0 * mockEventsPublisher.publishEvent(*_) + } + + def 'Legacy event consumer with valid legacy event.'() { + given: 'a cloud event of type: #eventType' + DmiAsyncRequestResponseEvent legacyEvent = new DmiAsyncRequestResponseEvent(eventId:'legacyEventId', eventTarget:'legacyEventTarget') + when: 'send the cloud event' + legacyEventKafkaTemplate.send(topic, legacyEvent) + and: 'wait a little for async processing of message' + TimeUnit.MILLISECONDS.sleep(300) + then: 'the event is consumed by the (legacy) AsynRestRequest consumer' + 1 * mockEventsPublisher.publishEvent(*_) + } + + def 'Filtering Cloud Events on Type.'() { + given: 'a cloud event of type: #eventType' + def cloudEvent = CloudEventBuilder.v1().withId('some id') + .withType(eventType) + .withSource(URI.create('some-source')) + .build() + when: 'send the cloud event' + cloudEventKafkaTemplate.send(topic, cloudEvent) + and: 'wait a little for async processing of message' + TimeUnit.MILLISECONDS.sleep(300) + then: 'the event has only been forwarded for the correct type' + expectedNUmberOfCallsToPublishForwardedEvent * mockEventsPublisher.publishCloudEvent(*_) + where: 'the following event types are used' + eventType || expectedNUmberOfCallsToPublishForwardedEvent + 'DataOperationEvent' || 1 + 'other type' || 0 + 'any type contain the word "DataOperationEvent"' || 1 + } + + //TODO Toine, add positive test with data to prove event is converted correctly (using correct factory) + + def 'Non cloud events on same Topic.'() { + when: 'sending a non-cloud event on the same topic' + legacyEventKafkaTemplate.send(topic, 'simple string event') + and: 'wait a little for async processing of message' + TimeUnit.MILLISECONDS.sleep(300) + then: 'the event is not processed by this consumer' + 0 * mockEventsPublisher.publishCloudEvent(*_) + } + + def activateListeners() { + kafkaListenerEndpointRegistry.getListenerContainers().forEach( + messageListenerContainer -> { ContainerTestUtils.waitForAssignment(messageListenerContainer, 1) } + ) + } +} diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerIntegrationSpec.groovy deleted file mode 100644 index f577f55ba..000000000 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerIntegrationSpec.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (c) 2023 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.ncmp.api.impl.async - -import io.cloudevents.CloudEvent -import io.cloudevents.core.builder.CloudEventBuilder -import io.cloudevents.kafka.CloudEventSerializer -import org.apache.kafka.clients.producer.KafkaProducer -import org.apache.kafka.clients.producer.ProducerRecord -import org.apache.kafka.common.serialization.StringSerializer -import org.onap.cps.ncmp.api.impl.events.EventsPublisher -import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec -import org.spockframework.spring.SpringBean -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.beans.factory.annotation.Value -import org.springframework.boot.autoconfigure.EnableAutoConfiguration -import org.springframework.boot.test.context.SpringBootTest -import org.springframework.kafka.config.KafkaListenerEndpointRegistry -import org.springframework.kafka.test.utils.ContainerTestUtils -import org.springframework.test.annotation.DirtiesContext -import org.testcontainers.spock.Testcontainers -import java.util.concurrent.TimeUnit - -@SpringBootTest(classes =[NcmpAsyncDataOperationEventConsumer, DataOperationRecordFilterStrategy]) -@DirtiesContext -@Testcontainers -@EnableAutoConfiguration -class NcmpAsyncDataOperationEventConsumerIntegrationSpec extends MessagingBaseSpec { - - @SpringBean - EventsPublisher mockEventsPublisher = Mock() - - @Autowired - private KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry - - @Value('${app.ncmp.async-m2m.topic}') - def topic - - def setup() { - activateListeners() - } - - def 'Filtering Cloud Events on Type.'() { - given: 'a cloud event of type: #eventType' - def cloudEvent = CloudEventBuilder.v1().withId('some id') - .withType(eventType) - .withSource(URI.create('some-source')) - .build() - when: 'send the cloud event' - ProducerRecord record = new ProducerRecord<>(topic, cloudEvent) - KafkaProducer producer = new KafkaProducer<>(eventProducerConfigProperties(CloudEventSerializer)) - producer.send(record) - and: 'wait a little for async processing of message' - TimeUnit.MILLISECONDS.sleep(300) - then: 'the event has only been forwarded for the correct type' - expectedNUmberOfCallsToPublishForwardedEvent * mockEventsPublisher.publishCloudEvent(*_) - where: 'the following event types are used' - eventType || expectedNUmberOfCallsToPublishForwardedEvent - 'DataOperationEvent' || 1 - 'other type' || 0 - 'any type contain the word "DataOperationEvent"' || 1 - } - - def 'Non cloud events on same Topic.'() { - when: 'sending a non-cloud event on the same topic' - ProducerRecord record = new ProducerRecord<>(topic, 'simple string event') - KafkaProducer producer = new KafkaProducer<>(eventProducerConfigProperties(StringSerializer)) - producer.send(record) - and: 'wait a little for async processing of message' - TimeUnit.MILLISECONDS.sleep(300) - then: 'the event is not processed by this consumer' - 0 * mockEventsPublisher.publishCloudEvent(*_) - } - - def activateListeners() { - kafkaListenerEndpointRegistry.getListenerContainers().forEach( - messageListenerContainer -> { ContainerTestUtils.waitForAssignment(messageListenerContainer, 1) } - ) - } - -} diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerSpec.groovy deleted file mode 100644 index 635328871..000000000 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/async/NcmpAsyncDataOperationEventConsumerSpec.groovy +++ /dev/null @@ -1,136 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2023 Nordix Foundation - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.cps.ncmp.api.impl.async - -import com.fasterxml.jackson.databind.ObjectMapper -import io.cloudevents.CloudEvent -import io.cloudevents.kafka.CloudEventDeserializer -import io.cloudevents.kafka.CloudEventSerializer -import io.cloudevents.kafka.impl.KafkaHeaders -import io.cloudevents.core.CloudEventUtils -import io.cloudevents.core.builder.CloudEventBuilder -import io.cloudevents.jackson.PojoCloudEventDataMapper -import org.apache.kafka.clients.consumer.ConsumerRecord -import org.apache.kafka.clients.consumer.KafkaConsumer -import org.apache.kafka.common.header.internals.RecordHeaders -import org.onap.cps.ncmp.api.impl.events.EventsPublisher -import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec -import org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent -import org.onap.cps.ncmp.utils.TestUtils -import org.onap.cps.utils.JsonObjectMapper -import org.spockframework.spring.SpringBean -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.boot.test.context.SpringBootTest -import org.springframework.kafka.listener.adapter.RecordFilterStrategy -import org.springframework.test.annotation.DirtiesContext -import org.testcontainers.spock.Testcontainers -import java.time.Duration - -@SpringBootTest(classes = [EventsPublisher, NcmpAsyncDataOperationEventConsumer, DataOperationRecordFilterStrategy,JsonObjectMapper, ObjectMapper]) -@Testcontainers -@DirtiesContext -class NcmpAsyncDataOperationEventConsumerSpec extends MessagingBaseSpec { - - @SpringBean - EventsPublisher asyncDataOperationEventPublisher = new EventsPublisher(legacyEventKafkaTemplate, cloudEventKafkaTemplate) - - @SpringBean - NcmpAsyncDataOperationEventConsumer objectUnderTest = new NcmpAsyncDataOperationEventConsumer(asyncDataOperationEventPublisher) - - @Autowired - JsonObjectMapper jsonObjectMapper - - @Autowired - RecordFilterStrategy dataOperationRecordFilterStrategy - - @Autowired - ObjectMapper objectMapper - - def cloudEventKafkaConsumer = new KafkaConsumer<>(eventConsumerConfigProperties('test', CloudEventDeserializer)) - def static clientTopic = 'client-topic' - def static dataOperationType = 'org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent' - - def 'Consume and publish event to client specified topic'() { - given: 'consumer subscribing to client topic' - cloudEventKafkaConsumer.subscribe([clientTopic]) - and: 'consumer record for data operation event' - def consumerRecordIn = createConsumerRecord(dataOperationType) - when: 'the data operation event is consumed and published to client specified topic' - objectUnderTest.consumeAndPublish(consumerRecordIn) - and: 'the client specified topic is polled' - def consumerRecordOut = cloudEventKafkaConsumer.poll(Duration.ofMillis(1500))[0] - then: 'verify cloud compliant headers' - def consumerRecordOutHeaders = consumerRecordOut.headers() - assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_correlationid') == 'request-id' - assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_id') == 'some-uuid' - assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_type') == dataOperationType - and: 'verify that extension is included into header' - assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_destination') == clientTopic - and: 'map consumer record to expected event type' - def dataOperationResponseEvent = CloudEventUtils.mapData(consumerRecordOut.value(), - PojoCloudEventDataMapper.from(objectMapper, DataOperationEvent.class)).getValue() - and: 'verify published response data properties' - def response = dataOperationResponseEvent.data.responses[0] - response.operationId == 'some-operation-id' - response.statusCode == 'any-success-status-code' - response.statusMessage == 'Successfully applied changes' - response.result as String == '[some-key:some-value]' - } - - def 'Filter an event with type #eventType'() { - given: 'consumer record for event with type #eventType' - def consumerRecord = createConsumerRecord(eventType) - when: 'while consuming the topic ncmp-async-m2m it executes the filter strategy' - def result = dataOperationRecordFilterStrategy.filter(consumerRecord) - then: 'the event is #description' - assert result == expectedResult - where: 'filter the event based on the eventType #eventType' - description | eventType || expectedResult - 'not filtered(the consumer will see the event)' | dataOperationType || false - 'filtered(the consumer will not see the event)' | 'wrongType' || true - } - - def createConsumerRecord(eventTypeAsString) { - def jsonData = TestUtils.getResourceFileContent('dataOperationEvent.json') - def testEventSentAsBytes = jsonObjectMapper.asJsonBytes(jsonObjectMapper.convertJsonString(jsonData, DataOperationEvent.class)) - - CloudEvent cloudEvent = getCloudEvent(eventTypeAsString, testEventSentAsBytes) - - def headers = new RecordHeaders() - def cloudEventSerializer = new CloudEventSerializer() - cloudEventSerializer.serialize(clientTopic, headers, cloudEvent) - - def consumerRecord = new ConsumerRecord(clientTopic, 0, 0L, 'sample-message-key', cloudEvent) - headers.forEach(header -> consumerRecord.headers().add(header)) - return consumerRecord - } - - def getCloudEvent(eventTypeAsString, byte[] testEventSentAsBytes) { - return CloudEventBuilder.v1() - .withId("some-uuid") - .withType(eventTypeAsString) - .withSource(URI.create("sample-test-source")) - .withData(testEventSentAsBytes) - .withExtension("correlationid", "request-id") - .withExtension("destination", clientTopic) - .build(); - } -} diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy index 4c6880421..223c92f37 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/events/lcm/LcmEventsPublisherSpec.groovy @@ -38,7 +38,7 @@ import org.testcontainers.spock.Testcontainers import java.time.Duration -@SpringBootTest(classes = [EventsPublisher, ObjectMapper, JsonObjectMapper]) +@SpringBootTest(classes = [ObjectMapper, JsonObjectMapper]) @Testcontainers @DirtiesContext class LcmEventsPublisherSpec extends MessagingBaseSpec { diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy index 603b8cdda..0356c3fcd 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy @@ -24,7 +24,6 @@ import io.cloudevents.CloudEvent import io.cloudevents.kafka.CloudEventSerializer import org.apache.kafka.common.serialization.StringDeserializer import org.apache.kafka.common.serialization.StringSerializer -import org.spockframework.spring.SpringBean import org.springframework.kafka.core.DefaultKafkaProducerFactory import org.springframework.kafka.core.KafkaTemplate import org.springframework.kafka.support.serializer.JsonSerializer @@ -46,11 +45,9 @@ class MessagingBaseSpec extends Specification { static kafkaTestContainer = new KafkaContainer(DockerImageName.parse('registry.nordix.org/onaptest/confluentinc/cp-kafka:6.2.1').asCompatibleSubstituteFor('confluentinc/cp-kafka')) - @SpringBean - KafkaTemplate legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory(eventProducerConfigProperties(JsonSerializer))) + def legacyEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory(eventProducerConfigProperties(JsonSerializer))) - @SpringBean - KafkaTemplate cloudEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory(eventProducerConfigProperties(CloudEventSerializer))) + def cloudEventKafkaTemplate = new KafkaTemplate<>(new DefaultKafkaProducerFactory(eventProducerConfigProperties(CloudEventSerializer))) @DynamicPropertySource static void registerKafkaProperties(DynamicPropertyRegistry dynamicPropertyRegistry) { diff --git a/cps-ncmp-service/src/test/resources/application.yml b/cps-ncmp-service/src/test/resources/application.yml index df34f844d..edbd7022f 100644 --- a/cps-ncmp-service/src/test/resources/application.yml +++ b/cps-ncmp-service/src/test/resources/application.yml @@ -21,6 +21,7 @@ spring: producer: value-serializer: io.cloudevents.kafka.CloudEventSerializer consumer: + value-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer properties: spring.deserializer.value.delegate.class: io.cloudevents.kafka.CloudEventDeserializer @@ -54,4 +55,4 @@ hazelcast: mode: kubernetes: enabled: false - service-name: "cps-and-ncmp-service" \ No newline at end of file + service-name: "cps-and-ncmp-service" -- cgit 1.2.3-korg