diff options
author | 2023-01-06 11:15:15 +0000 | |
---|---|---|
committer | 2023-03-09 10:00:42 +0000 | |
commit | 38f5b4b9dc667c52561867d4e36f940109f3e3a5 (patch) | |
tree | 7f3b4f9ec6a897a3e5b7cf11de72e5ad7da9adc2 /kafkaClient/src/main/java | |
parent | a6b96912d1fa3ee369577c50079b6f1d25907607 (diff) |
[KAFKA] Adding new client code
Signed-off-by: david.mcweeney <david.mcweeney@est.tech>
Change-Id: I38b930b1b5f4233f961d51bbab4b1828d034e67a
Issue-ID: DMAAP-1847
Diffstat (limited to 'kafkaClient/src/main/java')
4 files changed, 383 insertions, 0 deletions
diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java new file mode 100644 index 0000000..ebf8863 --- /dev/null +++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java @@ -0,0 +1,105 @@ +/*- + * ============LICENSE_START======================================================= + * dmaap-kafka-client + * ================================================================================ + * Copyright (C) 2023 Nordix Foundation. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.kafka; + +import java.util.List; +import org.apache.kafka.common.KafkaException; + +public interface IKafkaConfig { + + /** + * Returns the list of kafka bootstrap servers. + * + * @return List of kafka bootstrap servers. + */ + List<String> getKafkaBootstrapServers(); + + /** + * Kafka security protocol to be used by the client to Auth towards the kafka cluster + * + * @return Kafka security.protocol. Default is SASL_PLAINTEXT in the current onap kafka config + */ + default String getKafkaSecurityProtocolConfig() { + return "SASL_PLAINTEXT"; + } + + /** + * Kafka SASL mechanism to be used by the client to Auth towards the kafka cluster + * + * @return Kafka sasl.mechanism. Default is SCRAM-SHA-512 in the current onap kafka config + */ + default String getKafkaSaslMechanism() { + return "SCRAM-SHA-512"; + } + + /** + * Kafka JAAS config to be used by the client to Auth towards the kafka cluster. + * If overridden, must align with sasl.jaas.config convention set out by the sasl.mechanism being used + * otherwise, mandatory setting of the environment variable SASL_JAAS_CONFIG is required to provide default behaviour + * @return Kafka sasl.jaas.config + */ + default String getKafkaSaslJaasConfig() { + String saslJaasConfFromEnv = System.getenv("SASL_JAAS_CONFIG"); + if(saslJaasConfFromEnv != null) { + return saslJaasConfFromEnv; + } else { + throw new KafkaException("sasl.jaas.config not set for Kafka Consumer"); + } + } + + /** + * The timeout in seconds to wait for a response from each poll. + * + * @return Client Timeout in seconds. Default is 10 seconds + */ + default int getPollingTimeout() { + return 10; + } + + /** + * Returns the kafka consumer group defined for this component. + * + * @return KafkaConsumer group. + */ + String getConsumerGroup(); + + /** + * Returns the kafka consumer id defined for this component. + * + * @return KafkaConsumer id or null. + */ + String getConsumerID(); + + /** + * Returns a list of kafka topics to consume from. + * + * @return List of kafka topics or empty. + */ + List<String> getConsumerTopics(); + + /** + * Returns a list of kafka topics to produce to. + * + * @return List of kafka topics or empty. + */ + List<String> getProducerTopics(); + +} diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java new file mode 100644 index 0000000..7986869 --- /dev/null +++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java @@ -0,0 +1,79 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2022 Nordix Foundation. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.dmaap.kafka; + +import java.util.ArrayList; +import java.util.List; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.common.KafkaException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Utility class that provides a handler for Kafka interactions + */ +public class OnapKafkaClient { + + private final Logger log = LoggerFactory.getLogger(OnapKafkaClient.class.getName()); + + private OnapKafkaConsumer onapKafkaConsumer = null; + + private final OnapKafkaProducer onapKafkaProducer; + + public OnapKafkaClient(IKafkaConfig configuration) { + if (!configuration.getConsumerTopics().isEmpty()) { + onapKafkaConsumer = new OnapKafkaConsumer(configuration); + onapKafkaConsumer.subscribeConsumerToTopics(); + } + onapKafkaProducer = new OnapKafkaProducer(configuration); + } + + /** + * @param topicName The topic from which messages will be fetched + * @return A list of messages from a specific topic + */ + public List<String> fetchFromTopic(String topicName) { + List<String> messages = new ArrayList<>(); + if (onapKafkaConsumer != null) { + try { + log.debug("Polling for messages from topic: {}", topicName); + messages = onapKafkaConsumer.poll(topicName); + log.debug("Returning messages from topic {}", topicName); + return messages; + } catch (KafkaException e) { + log.error("Failed to fetch from kafka for topic: {}", topicName, e); + } + } else { + log.error("Consumer has not been initialised with the required topic list"); + } + return messages; + } + + /** + * Publish data to a given topic + * @param topicName The topic to which the message should be published + * @param data The data to publish to the topic specified + * @return + */ + public RecordMetadata publishToTopic(String topicName, String data) { + // Should we check the data size and chunk it if necessary? Do we need to? + return onapKafkaProducer.sendDataSynch(topicName, data); + } +} diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java new file mode 100644 index 0000000..e08e229 --- /dev/null +++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java @@ -0,0 +1,115 @@ +/*- + * ============LICENSE_START======================================================= + * dmaap-kafka-client + * ================================================================================ + * Copyright (C) 2023 Nordix Foundation. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.kafka; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; +import java.util.UUID; +import org.apache.kafka.clients.CommonClientConfigs; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.KafkaException; +import org.apache.kafka.common.config.SaslConfigs; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Utility class that provides a KafkaConsumer to communicate with a kafka cluster + */ +public class OnapKafkaConsumer { + + private final Logger log = LoggerFactory.getLogger(OnapKafkaConsumer.class); + private final KafkaConsumer<String, String> consumer; + private final int pollTimeout; + private final List<String> consumerTopics; + + /** + * + * @param configuration The config provided to the client + */ + public OnapKafkaConsumer(IKafkaConfig configuration) { + consumerTopics = configuration.getConsumerTopics(); + log.debug("Instantiating kafka consumer for topics {}", consumerTopics); + + Properties props = new Properties(); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + props.put(ConsumerConfig.CLIENT_ID_CONFIG, configuration.getConsumerID() + "-consumer-" + UUID.randomUUID()); + props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, configuration.getKafkaSecurityProtocolConfig()); + props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, configuration.getKafkaBootstrapServers()); + props.put(SaslConfigs.SASL_JAAS_CONFIG, configuration.getKafkaSaslJaasConfig()); + props.put(SaslConfigs.SASL_MECHANISM, configuration.getKafkaSaslMechanism()); + props.put(ConsumerConfig.GROUP_ID_CONFIG, configuration.getConsumerGroup()); + props.put(ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG, false); + props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); + props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true); + + consumer = new KafkaConsumer<>(props); + + pollTimeout = configuration.getPollingTimeout(); + } + + /** + * Poll specified topic for existing messages + * + * @return List of messages from a specific topic + */ + List<String> poll(String topicName) throws KafkaException { + List<String> msgs = new ArrayList<>(); + log.debug("Polling records for topic {}", topicName); + ConsumerRecords<String, String> consumerRecordsForSpecificTopic = consumer.poll(Duration.ofSeconds(pollTimeout)); + for(ConsumerRecord<String, String> rec : consumerRecordsForSpecificTopic){ + if (rec.topic().equals(topicName)) { + msgs.add(rec.value()); + } + } + return msgs; + } + + /** + * Poll topics for existing messages + * + * @return List of messages from all subscribed topic + */ + List<String> poll() throws KafkaException { + List<String> msgs = new ArrayList<>(); + log.debug("Polling all records"); + ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(pollTimeout)); + for(ConsumerRecord<String, String> rec : consumerRecords){ + msgs.add(rec.value()); + } + return msgs; + } + + public void subscribeConsumerToTopics() { + try { + consumer.subscribe(consumerTopics); + } + catch (KafkaException e) { + log.error("Failed to subscribe to given topic(s) {} : {}", consumerTopics, e.getMessage()); + throw e; + } + } +}
\ No newline at end of file diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java new file mode 100644 index 0000000..1129e14 --- /dev/null +++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java @@ -0,0 +1,84 @@ +/*- + * ============LICENSE_START======================================================= + * dmaap-kafka-client + * ================================================================================ + * Copyright (C) 2023 Nordix Foundation. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.kafka; + +import java.util.List; +import java.util.Properties; +import java.util.UUID; +import java.util.concurrent.ExecutionException; +import org.apache.kafka.clients.CommonClientConfigs; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.common.KafkaException; +import org.apache.kafka.common.config.SaslConfigs; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Utility class that provides a KafkaProducer to communicate with a kafka cluster + */ +public class OnapKafkaProducer { + + private final Logger log = LoggerFactory.getLogger(OnapKafkaProducer.class); + private final KafkaProducer<String, String> producer; + private final List<String> producerTopics; + + /** + * + * @param configuration The config provided to the client + */ + public OnapKafkaProducer(IKafkaConfig configuration) { + producerTopics = configuration.getProducerTopics(); + log.debug("Instantiating kafka producer for topics {}", producerTopics); + Properties props = new Properties(); + + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); + props.put(ProducerConfig.CLIENT_ID_CONFIG, configuration.getConsumerID() + "-producer-" + UUID.randomUUID()); + props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, configuration.getKafkaSecurityProtocolConfig()); + props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, configuration.getKafkaBootstrapServers()); + props.put(SaslConfigs.SASL_JAAS_CONFIG, configuration.getKafkaSaslJaasConfig()); + props.put(SaslConfigs.SASL_MECHANISM, configuration.getKafkaSaslMechanism()); + props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 10000); + producer = new KafkaProducer<>(props); + } + + /** + * + * @param topicName The name of the topic to publish the data to + * @param value The value of the data + * @return The RecordMetedata of the request + */ + public RecordMetadata sendDataSynch(String topicName, String value) { + RecordMetadata data = null; + try { + data = producer.send(new ProducerRecord<>(topicName, value)).get(); + log.debug("Data sent to topic {} at partition no {} and offset {}", topicName, data.partition(), data.offset()); + } catch (KafkaException | ExecutionException | InterruptedException e) { + log.error("Failed the send data: exc {}", e.getMessage()); + } finally { + producer.flush(); + } + return data; + } +}
\ No newline at end of file |