diff options
author | Ram Krishna Verma <ram_krishna.verma@bell.ca> | 2022-09-28 14:43:47 -0400 |
---|---|---|
committer | Ram Krishna Verma <ram_krishna.verma@bell.ca> | 2022-09-29 11:43:34 -0400 |
commit | 9bf76281dae15941f3d53c1f1a419a8faa5f20f4 (patch) | |
tree | 978fc1e68bab74f39e4afbc9f5037c42cf8a8b1b | |
parent | addf55c8f8be996539deb4aac5a8a520d541bbc1 (diff) |
Add support for KafkaAvroSerializer in apex-pdp
Adding the support for KafkaAvroSerializer to deserialize the
messages sent on a kafka topic using the KafkaAvroSerializer.
The default StringDeserializer that comes from KafkaConsumer
is not able to work with avro encoded messages.
Issue-ID: POLICY-4369
Change-Id: Ic12dc156b88e1ef323f8b600e464beef4a02d72e
Signed-off-by: Ram Krishna Verma <ram_krishna.verma@bell.ca>
2 files changed, 19 insertions, 6 deletions
diff --git a/plugins/plugins-event/plugins-event-carrier/plugins-event-carrier-kafka/pom.xml b/plugins/plugins-event/plugins-event-carrier/plugins-event-carrier-kafka/pom.xml index 92cec0696..88df7afab 100644 --- a/plugins/plugins-event/plugins-event-carrier/plugins-event-carrier-kafka/pom.xml +++ b/plugins/plugins-event/plugins-event-carrier/plugins-event-carrier-kafka/pom.xml @@ -1,6 +1,7 @@ <!-- ============LICENSE_START======================================================= Copyright (C) 2018 Ericsson. All rights reserved. + Modifications Copyright (C) 2022 Bell Canada. All rights reserved. ================================================================================ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -42,5 +43,17 @@ </exclusion> </exclusions> </dependency> + <dependency> + <groupId>io.confluent</groupId> + <artifactId>kafka-avro-serializer</artifactId> + <version>7.2.1</version> + </dependency> </dependencies> + + <repositories> + <repository> + <id>confluent</id> + <url>https://packages.confluent.io/maven/</url> + </repository> + </repositories> </project> diff --git a/plugins/plugins-event/plugins-event-carrier/plugins-event-carrier-kafka/src/main/java/org/onap/policy/apex/plugins/event/carrier/kafka/ApexKafkaConsumer.java b/plugins/plugins-event/plugins-event-carrier/plugins-event-carrier-kafka/src/main/java/org/onap/policy/apex/plugins/event/carrier/kafka/ApexKafkaConsumer.java index 2957a1a11..21ffd63a3 100644 --- a/plugins/plugins-event/plugins-event-carrier/plugins-event-carrier-kafka/src/main/java/org/onap/policy/apex/plugins/event/carrier/kafka/ApexKafkaConsumer.java +++ b/plugins/plugins-event/plugins-event-carrier/plugins-event-carrier-kafka/src/main/java/org/onap/policy/apex/plugins/event/carrier/kafka/ApexKafkaConsumer.java @@ -2,7 +2,7 @@ * ============LICENSE_START======================================================= * Copyright (C) 2016-2018 Ericsson. All rights reserved. * Modifications Copyright (C) 2019-2020 Nordix Foundation. - * Modifications Copyright (C) 2021 Bell Canada. All rights reserved. + * Modifications Copyright (C) 2021-2022 Bell Canada. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -74,7 +74,7 @@ public class ApexKafkaConsumer extends ApexPluginsEventConsumer { @Override public void run() { // Kick off the Kafka consumer - try (KafkaConsumer<String, String> kafkaConsumer = + try (KafkaConsumer<String, Object> kafkaConsumer = new KafkaConsumer<>(kafkaConsumerProperties.getKafkaConsumerProperties())) { kafkaConsumer.subscribe(kafkaConsumerProperties.getConsumerTopicListAsCollection()); if (LOGGER.isDebugEnabled()) { @@ -85,11 +85,11 @@ public class ApexKafkaConsumer extends ApexPluginsEventConsumer { // The endless loop that receives events over Kafka while (consumerThread.isAlive() && !stopOrderedFlag) { try { - final ConsumerRecords<String, String> records = + final ConsumerRecords<String, Object> records = kafkaConsumer.poll(kafkaConsumerProperties.getConsumerPollDuration()); - for (final ConsumerRecord<String, String> record : records) { + for (final ConsumerRecord<String, Object> record : records) { traceIfTraceEnabled(record); - eventReceiver.receiveEvent(new Properties(), record.value()); + eventReceiver.receiveEvent(new Properties(), record.value().toString()); } } catch (final Exception e) { LOGGER.debug("error receiving events on thread {}", consumerThread.getName(), e); @@ -103,7 +103,7 @@ public class ApexKafkaConsumer extends ApexPluginsEventConsumer { * * @param record the record to trace */ - private void traceIfTraceEnabled(final ConsumerRecord<String, String> record) { + private void traceIfTraceEnabled(final ConsumerRecord<String, Object> record) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("event received for {} for forwarding to Apex engine : {} {}", this.getClass().getName() + ":" + this.name, record.key(), record.value()); |