diff options
Diffstat (limited to 'cps-service')
5 files changed, 359 insertions, 18 deletions
diff --git a/cps-service/pom.xml b/cps-service/pom.xml index 9c2ee86912..e661f3421a 100644 --- a/cps-service/pom.xml +++ b/cps-service/pom.xml @@ -128,6 +128,18 @@ <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> </dependency> + <dependency> + <groupId>io.cloudevents</groupId> + <artifactId>cloudevents-json-jackson</artifactId> + </dependency> + <dependency> + <groupId>io.cloudevents</groupId> + <artifactId>cloudevents-kafka</artifactId> + </dependency> + <dependency> + <groupId>io.cloudevents</groupId> + <artifactId>cloudevents-spring</artifactId> + </dependency> <!-- T E S T D E P E N D E N C I E S --> <dependency> <groupId>org.springframework</groupId> diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDeltaServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDeltaServiceImpl.java index 1e1fe819ac..2f99dbf7bb 100644 --- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDeltaServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDeltaServiceImpl.java @@ -165,10 +165,11 @@ public class CpsDeltaServiceImpl implements CpsDeltaService { sourceDataInDeltaReport.put(key, sourceLeaf); targetDataInDeltaReport.put(key, targetLeaf); } - } else if (sourceLeaf != null) { - sourceDataInDeltaReport.put(key, sourceLeaf); - } else if (targetLeaf != null) { + } else if (sourceLeaf == null) { targetDataInDeltaReport.put(key, targetLeaf); + + } else { + sourceDataInDeltaReport.put(key, sourceLeaf); } } diff --git a/cps-service/src/main/java/org/onap/cps/events/EventsPublisher.java b/cps-service/src/main/java/org/onap/cps/events/EventsPublisher.java new file mode 100644 index 0000000000..8023fbfb25 --- /dev/null +++ b/cps-service/src/main/java/org/onap/cps/events/EventsPublisher.java @@ -0,0 +1,136 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.events; + +import io.cloudevents.CloudEvent; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.header.Headers; +import org.apache.kafka.common.header.internals.RecordHeaders; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.support.SendResult; +import org.springframework.stereotype.Service; +import org.springframework.util.SerializationUtils; + +/** + * EventsPublisher to publish events. + */ + +@Slf4j +@Service +@RequiredArgsConstructor +public class EventsPublisher<T> { + + /** + * KafkaTemplate for legacy (non-cloud) events. + * Note: Cloud events should be used. This will be addressed as part of https://jira.onap.org/browse/CPS-1717. + */ + private final KafkaTemplate<String, T> legacyKafkaEventTemplate; + + private final KafkaTemplate<String, CloudEvent> cloudEventKafkaTemplate; + + /** + * Generic CloudEvent publisher. + * + * @param topicName valid topic name + * @param eventKey message key + * @param event message payload + */ + public void publishCloudEvent(final String topicName, final String eventKey, final CloudEvent event) { + final CompletableFuture<SendResult<String, CloudEvent>> eventFuture = + cloudEventKafkaTemplate.send(topicName, eventKey, event); + eventFuture.whenComplete((result, e) -> { + if (e == null) { + log.debug("Successfully published event to topic : {} , Event : {}", result.getRecordMetadata().topic(), + result.getProducerRecord().value()); + + } else { + log.error("Unable to publish event to topic : {} due to {}", topicName, e.getMessage()); + } + }); + } + + /** + * Generic Event publisher. + * Note: Cloud events should be used. This will be addressed as part of https://jira.onap.org/browse/CPS-1717. + * + * @param topicName valid topic name + * @param eventKey message key + * @param event message payload + */ + public void publishEvent(final String topicName, final String eventKey, final T event) { + final CompletableFuture<SendResult<String, T>> eventFuture = + legacyKafkaEventTemplate.send(topicName, eventKey, event); + handleLegacyEventCallback(topicName, eventFuture); + } + + /** + * Generic Event Publisher with headers. + * + * @param topicName valid topic name + * @param eventKey message key + * @param eventHeaders event headers + * @param event message payload + */ + public void publishEvent(final String topicName, final String eventKey, final Headers eventHeaders, final T event) { + + final ProducerRecord<String, T> producerRecord = + new ProducerRecord<>(topicName, null, eventKey, event, eventHeaders); + final CompletableFuture<SendResult<String, T>> eventFuture = legacyKafkaEventTemplate.send(producerRecord); + handleLegacyEventCallback(topicName, eventFuture); + } + + /** + * Generic Event Publisher with headers. + * + * @param topicName valid topic name + * @param eventKey message key + * @param eventHeaders map of event headers + * @param event message payload + */ + public void publishEvent(final String topicName, final String eventKey, final Map<String, Object> eventHeaders, + final T event) { + + publishEvent(topicName, eventKey, convertToKafkaHeaders(eventHeaders), event); + } + + private void handleLegacyEventCallback(final String topicName, + final CompletableFuture<SendResult<String, T>> eventFuture) { + eventFuture.whenComplete((result, e) -> { + if (e == null) { + log.debug("Successfully published event to topic : {} , Event : {}", result.getRecordMetadata().topic(), + result.getProducerRecord().value()); + } else { + log.error("Unable to publish event to topic : {} due to {}", topicName, e.getMessage()); + } + }); + } + + private Headers convertToKafkaHeaders(final Map<String, Object> eventMessageHeaders) { + final Headers eventHeaders = new RecordHeaders(); + eventMessageHeaders.forEach((key, value) -> eventHeaders.add(key, SerializationUtils.serialize(value))); + return eventHeaders; + } + +} diff --git a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDeltaServiceImplSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDeltaServiceImplSpec.groovy index e21c6f0e2f..42d75f3eab 100644 --- a/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDeltaServiceImplSpec.groovy +++ b/cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDeltaServiceImplSpec.groovy @@ -21,7 +21,6 @@ package org.onap.cps.api.impl import org.onap.cps.spi.model.DataNode -import spock.lang.Shared import spock.lang.Specification class CpsDeltaServiceImplSpec extends Specification{ @@ -36,7 +35,7 @@ class CpsDeltaServiceImplSpec extends Specification{ static def sourceDataNodeWithMultipleLeaves = [new DataNode(xpath: '/parent', leaves: ['leaf-1': 'leaf-1-in-source', 'leaf-2': 'leaf-2-in-source'])] static def targetDataNodeWithMultipleLeaves = [new DataNode(xpath: '/parent', leaves: ['leaf-1': 'leaf-1-in-target', 'leaf-2': 'leaf-2-in-target'])] - def 'Get delta between data nodes for REMOVED data where source data node has #scenario'() { + def 'Get delta between data nodes for REMOVED data'() { when: 'attempt to get delta between 2 data nodes' def result = objectUnderTest.getDeltaReports(sourceDataNodeWithLeafData, []) then: 'the delta report contains expected "remove" action' @@ -49,7 +48,7 @@ class CpsDeltaServiceImplSpec extends Specification{ assert result[0].targetData == null } - def 'Get delta between data nodes with ADDED data where target data node has #scenario'() { + def 'Get delta between data nodes for ADDED data'() { when: 'attempt to get delta between 2 data nodes' def result = objectUnderTest.getDeltaReports([], targetDataNodeWithLeafData) then: 'the delta report contains expected "add" action' @@ -62,23 +61,22 @@ class CpsDeltaServiceImplSpec extends Specification{ assert result[0].targetData == ['parent-leaf': 'parent-payload-in-target'] } - def 'Delta Report between leaves for parent and child nodes, #scenario'() { + def 'Delta Report between leaves for parent and child nodes'() { given: 'Two data nodes' def sourceDataNode = [new DataNode(xpath: '/parent', leaves: ['parent-leaf': 'parent-payload'], childDataNodes: [new DataNode(xpath: '/parent/child', leaves: ['child-leaf': 'child-payload'])])] def targetDataNode = [new DataNode(xpath: '/parent', leaves: ['parent-leaf': 'parent-payload-updated'], childDataNodes: [new DataNode(xpath: '/parent/child', leaves: ['child-leaf': 'child-payload-updated'])])] when: 'attempt to get delta between 2 data nodes' def result = objectUnderTest.getDeltaReports(sourceDataNode, targetDataNode) - then: 'the delta report contains expected "update" action' - assert result[index].action.equals('update') - and: 'the delta report contains expected xpath' - assert result[index].xpath == expectedXpath - and: 'the delta report contains expected source and target data' - assert result[index].sourceData == expectedSourceData - assert result[index].targetData == expectedTargetData - where: 'the following data was used' - scenario | index || expectedXpath | expectedSourceData | expectedTargetData - 'parent data node' | 0 || '/parent' | ['parent-leaf': 'parent-payload'] | ['parent-leaf': 'parent-payload-updated'] - 'child data node' | 1 || '/parent/child' | ['child-leaf': 'child-payload'] | ['child-leaf': 'child-payload-updated'] + then: 'the delta report contains expected details for parent node' + assert result[0].action.equals('update') + assert result[0].xpath == '/parent' + assert result[0].sourceData == ['parent-leaf': 'parent-payload'] + assert result[0].targetData == ['parent-leaf': 'parent-payload-updated'] + and: 'the delta report contains expected details for child node' + assert result[1].action.equals('update') + assert result[1].xpath == '/parent/child' + assert result[1].sourceData == ['child-leaf': 'child-payload'] + assert result[1].targetData == ['child-leaf': 'child-payload-updated'] } def 'Delta report between leaves, #scenario'() { diff --git a/cps-service/src/test/groovy/org/onap/cps/events/EventsPublisherSpec.groovy b/cps-service/src/test/groovy/org/onap/cps/events/EventsPublisherSpec.groovy new file mode 100644 index 0000000000..fe67287dec --- /dev/null +++ b/cps-service/src/test/groovy/org/onap/cps/events/EventsPublisherSpec.groovy @@ -0,0 +1,194 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2024 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.events + +import ch.qos.logback.classic.Level +import ch.qos.logback.classic.Logger +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.core.read.ListAppender +import io.cloudevents.CloudEvent +import org.apache.kafka.clients.producer.ProducerRecord +import org.apache.kafka.clients.producer.RecordMetadata +import org.apache.kafka.common.TopicPartition +import org.apache.kafka.common.header.Headers +import org.apache.kafka.common.header.internals.RecordHeader +import org.apache.kafka.common.header.internals.RecordHeaders +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.BeforeEach +import org.slf4j.LoggerFactory +import org.springframework.kafka.core.KafkaTemplate +import org.springframework.kafka.support.SendResult +import org.springframework.util.SerializationUtils +import spock.lang.Specification + +import java.util.concurrent.CompletableFuture + +class EventsPublisherSpec extends Specification { + + def legacyKafkaTemplateMock = Mock(KafkaTemplate) + def mockCloudEventKafkaTemplate = Mock(KafkaTemplate) + def logger = Spy(ListAppender<ILoggingEvent>) + + void setup() { + def setupLogger = ((Logger) LoggerFactory.getLogger(EventsPublisher.class)) + setupLogger.setLevel(Level.DEBUG) + setupLogger.addAppender(logger) + logger.start() + } + + void cleanup() { + ((Logger) LoggerFactory.getLogger(EventsPublisher.class)).detachAndStopAllAppenders() + } + + def objectUnderTest = new EventsPublisher(legacyKafkaTemplateMock, mockCloudEventKafkaTemplate) + + def 'Publish Cloud Event'() { + given: 'a successfully published event' + def eventFuture = CompletableFuture.completedFuture( + new SendResult( + new ProducerRecord('some-topic', 'some-value'), + new RecordMetadata(new TopicPartition('some-topic', 0), 0, 0, 0, 0, 0) + ) + ) + def someCloudEvent = Mock(CloudEvent) + 1 * mockCloudEventKafkaTemplate.send('some-topic', 'some-event-key', someCloudEvent) >> eventFuture + when: 'publishing the cloud event' + objectUnderTest.publishCloudEvent('some-topic', 'some-event-key', someCloudEvent) + then: 'the correct debug message is logged' + def lastLoggingEvent = logger.list[0] + assert lastLoggingEvent.level == Level.DEBUG + assert lastLoggingEvent.formattedMessage.contains('Successfully published event') + } + + def 'Publish Cloud Event with Exception'() { + given: 'a failed event' + def eventFutureWithFailure = new CompletableFuture<SendResult<String, String>>() + eventFutureWithFailure.completeExceptionally(new RuntimeException('some exception')) + def someCloudEvent = Mock(CloudEvent) + 1 * mockCloudEventKafkaTemplate.send('some-topic', 'some-event-key', someCloudEvent) >> eventFutureWithFailure + when: 'publishing the cloud event' + objectUnderTest.publishCloudEvent('some-topic', 'some-event-key', someCloudEvent) + then: 'the correct error message is logged' + def lastLoggingEvent = logger.list[0] + assert lastLoggingEvent.level == Level.ERROR + assert lastLoggingEvent.formattedMessage.contains('Unable to publish event') + } + + def 'Publish Legacy Event'() { + given: 'a successfully published event' + def eventFuture = CompletableFuture.completedFuture( + new SendResult( + new ProducerRecord('some-topic', 'some-value'), + new RecordMetadata(new TopicPartition('some-topic', 0), 0, 0, 0, 0, 0) + ) + ) + def someEvent = Mock(Object) + 1 * legacyKafkaTemplateMock.send('some-topic', 'some-event-key', someEvent) >> eventFuture + when: 'publishing the cloud event' + objectUnderTest.publishEvent('some-topic', 'some-event-key', someEvent) + then: 'the correct debug message is logged' + def lastLoggingEvent = logger.list[0] + assert lastLoggingEvent.level == Level.DEBUG + assert lastLoggingEvent.formattedMessage.contains('Successfully published event') + } + + def 'Publish Legacy Event with Headers as Map'() { + given: 'a successfully published event' + def sampleEventHeaders = ['k1': SerializationUtils.serialize('v1')] + def eventFuture = CompletableFuture.completedFuture( + new SendResult( + new ProducerRecord('some-topic', 'some-value'), + new RecordMetadata(new TopicPartition('some-topic', 0), 0, 0, 0, 0, 0) + ) + ) + def someEvent = Mock(Object.class) + when: 'publishing the legacy event' + objectUnderTest.publishEvent('some-topic', 'some-event-key', sampleEventHeaders, someEvent) + then: 'event is published' + 1 * legacyKafkaTemplateMock.send(_) >> eventFuture + and: 'the correct debug message is logged' + def lastLoggingEvent = logger.list[0] + assert lastLoggingEvent.level == Level.DEBUG + assert lastLoggingEvent.formattedMessage.contains('Successfully published event') + } + + def 'Publish Legacy Event with Record Headers'() { + given: 'a successfully published event' + def sampleEventHeaders = new RecordHeaders([new RecordHeader('k1', SerializationUtils.serialize('v1'))]) + def sampleProducerRecord = new ProducerRecord('some-topic', null, 'some-key', 'some-value', sampleEventHeaders) + def eventFuture = CompletableFuture.completedFuture( + new SendResult( + sampleProducerRecord, + new RecordMetadata(new TopicPartition('some-topic', 0), 0, 0, 0, 0, 0) + ) + ) + def someEvent = Mock(Object.class) + when: 'publishing the legacy event' + objectUnderTest.publishEvent('some-topic', 'some-event-key', sampleEventHeaders, someEvent) + then: 'event is published' + 1 * legacyKafkaTemplateMock.send(_) >> eventFuture + and: 'the correct debug message is logged' + def lastLoggingEvent = logger.list[0] + assert lastLoggingEvent.level == Level.DEBUG + assert lastLoggingEvent.formattedMessage.contains('Successfully published event') + } + + def 'Handle Legacy Event Callback'() { + given: 'an event is successfully published' + def eventFuture = CompletableFuture.completedFuture( + new SendResult( + new ProducerRecord('some-topic', 'some-value'), + new RecordMetadata(new TopicPartition('some-topic', 0), 0, 0, 0, 0, 0) + ) + ) + when: 'handling legacy event callback' + objectUnderTest.handleLegacyEventCallback('some-topic', eventFuture) + then: 'the correct debug message is logged' + def lastLoggingEvent = logger.list[0] + assert lastLoggingEvent.level == Level.DEBUG + assert lastLoggingEvent.formattedMessage.contains('Successfully published event') + } + + def 'Handle Legacy Event Callback with Exception'() { + given: 'a failure to publish an event' + def eventFutureWithFailure = new CompletableFuture<SendResult<String, String>>() + eventFutureWithFailure.completeExceptionally(new RuntimeException('some exception')) + when: 'handling legacy event callback' + objectUnderTest.handleLegacyEventCallback('some-topic', eventFutureWithFailure) + then: 'the correct error message is logged' + def lastLoggingEvent = logger.list[0] + assert lastLoggingEvent.level == Level.ERROR + assert lastLoggingEvent.formattedMessage.contains('Unable to publish event') + } + + def 'Convert to kafka headers'() { + given: 'Few key value pairs' + def someKeyValue = ['key1': 'value1', 'key2': 'value2'] + when: 'we convert to headers' + def headers = objectUnderTest.convertToKafkaHeaders(someKeyValue) + then: 'it is correctly converted' + assert headers instanceof Headers + and: 'also has correct values' + assert headers[0].key() == 'key1' + assert headers[1].key() == 'key2' + } + +}
\ No newline at end of file |