summaryrefslogtreecommitdiffstats
path: root/kafkaClient/src
diff options
context:
space:
mode:
Diffstat (limited to 'kafkaClient/src')
-rw-r--r--kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java105
-rw-r--r--kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java79
-rw-r--r--kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java115
-rw-r--r--kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java84
-rw-r--r--kafkaClient/src/main/resources/logback.xml11
-rw-r--r--kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java126
-rw-r--r--kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java110
-rw-r--r--kafkaClient/src/test/resources/application.properties6
-rw-r--r--kafkaClient/src/test/resources/invalid-application.properties6
-rw-r--r--kafkaClient/src/test/resources/jaas.conf20
-rw-r--r--kafkaClient/src/test/resources/logback-test.xml20
11 files changed, 682 insertions, 0 deletions
diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java
new file mode 100644
index 0000000..ebf8863
--- /dev/null
+++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java
@@ -0,0 +1,105 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka;
+
+import java.util.List;
+import org.apache.kafka.common.KafkaException;
+
+public interface IKafkaConfig {
+
+ /**
+ * Returns the list of kafka bootstrap servers.
+ *
+ * @return List of kafka bootstrap servers.
+ */
+ List<String> getKafkaBootstrapServers();
+
+ /**
+ * Kafka security protocol to be used by the client to Auth towards the kafka cluster
+ *
+ * @return Kafka security.protocol. Default is SASL_PLAINTEXT in the current onap kafka config
+ */
+ default String getKafkaSecurityProtocolConfig() {
+ return "SASL_PLAINTEXT";
+ }
+
+ /**
+ * Kafka SASL mechanism to be used by the client to Auth towards the kafka cluster
+ *
+ * @return Kafka sasl.mechanism. Default is SCRAM-SHA-512 in the current onap kafka config
+ */
+ default String getKafkaSaslMechanism() {
+ return "SCRAM-SHA-512";
+ }
+
+ /**
+ * Kafka JAAS config to be used by the client to Auth towards the kafka cluster.
+ * If overridden, must align with sasl.jaas.config convention set out by the sasl.mechanism being used
+ * otherwise, mandatory setting of the environment variable SASL_JAAS_CONFIG is required to provide default behaviour
+ * @return Kafka sasl.jaas.config
+ */
+ default String getKafkaSaslJaasConfig() {
+ String saslJaasConfFromEnv = System.getenv("SASL_JAAS_CONFIG");
+ if(saslJaasConfFromEnv != null) {
+ return saslJaasConfFromEnv;
+ } else {
+ throw new KafkaException("sasl.jaas.config not set for Kafka Consumer");
+ }
+ }
+
+ /**
+ * The timeout in seconds to wait for a response from each poll.
+ *
+ * @return Client Timeout in seconds. Default is 10 seconds
+ */
+ default int getPollingTimeout() {
+ return 10;
+ }
+
+ /**
+ * Returns the kafka consumer group defined for this component.
+ *
+ * @return KafkaConsumer group.
+ */
+ String getConsumerGroup();
+
+ /**
+ * Returns the kafka consumer id defined for this component.
+ *
+ * @return KafkaConsumer id or null.
+ */
+ String getConsumerID();
+
+ /**
+ * Returns a list of kafka topics to consume from.
+ *
+ * @return List of kafka topics or empty.
+ */
+ List<String> getConsumerTopics();
+
+ /**
+ * Returns a list of kafka topics to produce to.
+ *
+ * @return List of kafka topics or empty.
+ */
+ List<String> getProducerTopics();
+
+}
diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java
new file mode 100644
index 0000000..7986869
--- /dev/null
+++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java
@@ -0,0 +1,79 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2022 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.dmaap.kafka;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.kafka.clients.producer.RecordMetadata;
+import org.apache.kafka.common.KafkaException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Utility class that provides a handler for Kafka interactions
+ */
+public class OnapKafkaClient {
+
+ private final Logger log = LoggerFactory.getLogger(OnapKafkaClient.class.getName());
+
+ private OnapKafkaConsumer onapKafkaConsumer = null;
+
+ private final OnapKafkaProducer onapKafkaProducer;
+
+ public OnapKafkaClient(IKafkaConfig configuration) {
+ if (!configuration.getConsumerTopics().isEmpty()) {
+ onapKafkaConsumer = new OnapKafkaConsumer(configuration);
+ onapKafkaConsumer.subscribeConsumerToTopics();
+ }
+ onapKafkaProducer = new OnapKafkaProducer(configuration);
+ }
+
+ /**
+ * @param topicName The topic from which messages will be fetched
+ * @return A list of messages from a specific topic
+ */
+ public List<String> fetchFromTopic(String topicName) {
+ List<String> messages = new ArrayList<>();
+ if (onapKafkaConsumer != null) {
+ try {
+ log.debug("Polling for messages from topic: {}", topicName);
+ messages = onapKafkaConsumer.poll(topicName);
+ log.debug("Returning messages from topic {}", topicName);
+ return messages;
+ } catch (KafkaException e) {
+ log.error("Failed to fetch from kafka for topic: {}", topicName, e);
+ }
+ } else {
+ log.error("Consumer has not been initialised with the required topic list");
+ }
+ return messages;
+ }
+
+ /**
+ * Publish data to a given topic
+ * @param topicName The topic to which the message should be published
+ * @param data The data to publish to the topic specified
+ * @return
+ */
+ public RecordMetadata publishToTopic(String topicName, String data) {
+ // Should we check the data size and chunk it if necessary? Do we need to?
+ return onapKafkaProducer.sendDataSynch(topicName, data);
+ }
+}
diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java
new file mode 100644
index 0000000..e08e229
--- /dev/null
+++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java
@@ -0,0 +1,115 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka;
+
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+import java.util.UUID;
+import org.apache.kafka.clients.CommonClientConfigs;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.KafkaException;
+import org.apache.kafka.common.config.SaslConfigs;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Utility class that provides a KafkaConsumer to communicate with a kafka cluster
+ */
+public class OnapKafkaConsumer {
+
+ private final Logger log = LoggerFactory.getLogger(OnapKafkaConsumer.class);
+ private final KafkaConsumer<String, String> consumer;
+ private final int pollTimeout;
+ private final List<String> consumerTopics;
+
+ /**
+ *
+ * @param configuration The config provided to the client
+ */
+ public OnapKafkaConsumer(IKafkaConfig configuration) {
+ consumerTopics = configuration.getConsumerTopics();
+ log.debug("Instantiating kafka consumer for topics {}", consumerTopics);
+
+ Properties props = new Properties();
+ props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
+ props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
+ props.put(ConsumerConfig.CLIENT_ID_CONFIG, configuration.getConsumerID() + "-consumer-" + UUID.randomUUID());
+ props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, configuration.getKafkaSecurityProtocolConfig());
+ props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, configuration.getKafkaBootstrapServers());
+ props.put(SaslConfigs.SASL_JAAS_CONFIG, configuration.getKafkaSaslJaasConfig());
+ props.put(SaslConfigs.SASL_MECHANISM, configuration.getKafkaSaslMechanism());
+ props.put(ConsumerConfig.GROUP_ID_CONFIG, configuration.getConsumerGroup());
+ props.put(ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG, false);
+ props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
+ props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
+
+ consumer = new KafkaConsumer<>(props);
+
+ pollTimeout = configuration.getPollingTimeout();
+ }
+
+ /**
+ * Poll specified topic for existing messages
+ *
+ * @return List of messages from a specific topic
+ */
+ List<String> poll(String topicName) throws KafkaException {
+ List<String> msgs = new ArrayList<>();
+ log.debug("Polling records for topic {}", topicName);
+ ConsumerRecords<String, String> consumerRecordsForSpecificTopic = consumer.poll(Duration.ofSeconds(pollTimeout));
+ for(ConsumerRecord<String, String> rec : consumerRecordsForSpecificTopic){
+ if (rec.topic().equals(topicName)) {
+ msgs.add(rec.value());
+ }
+ }
+ return msgs;
+ }
+
+ /**
+ * Poll topics for existing messages
+ *
+ * @return List of messages from all subscribed topic
+ */
+ List<String> poll() throws KafkaException {
+ List<String> msgs = new ArrayList<>();
+ log.debug("Polling all records");
+ ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(pollTimeout));
+ for(ConsumerRecord<String, String> rec : consumerRecords){
+ msgs.add(rec.value());
+ }
+ return msgs;
+ }
+
+ public void subscribeConsumerToTopics() {
+ try {
+ consumer.subscribe(consumerTopics);
+ }
+ catch (KafkaException e) {
+ log.error("Failed to subscribe to given topic(s) {} : {}", consumerTopics, e.getMessage());
+ throw e;
+ }
+ }
+} \ No newline at end of file
diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java
new file mode 100644
index 0000000..1129e14
--- /dev/null
+++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java
@@ -0,0 +1,84 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka;
+
+import java.util.List;
+import java.util.Properties;
+import java.util.UUID;
+import java.util.concurrent.ExecutionException;
+import org.apache.kafka.clients.CommonClientConfigs;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.clients.producer.RecordMetadata;
+import org.apache.kafka.common.KafkaException;
+import org.apache.kafka.common.config.SaslConfigs;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Utility class that provides a KafkaProducer to communicate with a kafka cluster
+ */
+public class OnapKafkaProducer {
+
+ private final Logger log = LoggerFactory.getLogger(OnapKafkaProducer.class);
+ private final KafkaProducer<String, String> producer;
+ private final List<String> producerTopics;
+
+ /**
+ *
+ * @param configuration The config provided to the client
+ */
+ public OnapKafkaProducer(IKafkaConfig configuration) {
+ producerTopics = configuration.getProducerTopics();
+ log.debug("Instantiating kafka producer for topics {}", producerTopics);
+ Properties props = new Properties();
+
+ props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
+ props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
+ props.put(ProducerConfig.CLIENT_ID_CONFIG, configuration.getConsumerID() + "-producer-" + UUID.randomUUID());
+ props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, configuration.getKafkaSecurityProtocolConfig());
+ props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, configuration.getKafkaBootstrapServers());
+ props.put(SaslConfigs.SASL_JAAS_CONFIG, configuration.getKafkaSaslJaasConfig());
+ props.put(SaslConfigs.SASL_MECHANISM, configuration.getKafkaSaslMechanism());
+ props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 10000);
+ producer = new KafkaProducer<>(props);
+ }
+
+ /**
+ *
+ * @param topicName The name of the topic to publish the data to
+ * @param value The value of the data
+ * @return The RecordMetedata of the request
+ */
+ public RecordMetadata sendDataSynch(String topicName, String value) {
+ RecordMetadata data = null;
+ try {
+ data = producer.send(new ProducerRecord<>(topicName, value)).get();
+ log.debug("Data sent to topic {} at partition no {} and offset {}", topicName, data.partition(), data.offset());
+ } catch (KafkaException | ExecutionException | InterruptedException e) {
+ log.error("Failed the send data: exc {}", e.getMessage());
+ } finally {
+ producer.flush();
+ }
+ return data;
+ }
+} \ No newline at end of file
diff --git a/kafkaClient/src/main/resources/logback.xml b/kafkaClient/src/main/resources/logback.xml
new file mode 100644
index 0000000..8798706
--- /dev/null
+++ b/kafkaClient/src/main/resources/logback.xml
@@ -0,0 +1,11 @@
+<configuration>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n</pattern>
+ </encoder>
+ </appender>
+
+ <root level="INFO">
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration> \ No newline at end of file
diff --git a/kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java b/kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java
new file mode 100644
index 0000000..9708f3b
--- /dev/null
+++ b/kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java
@@ -0,0 +1,126 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka;
+
+import com.salesforce.kafka.test.KafkaTestCluster;
+import com.salesforce.kafka.test.KafkaTestUtils;
+import com.salesforce.kafka.test.listeners.BrokerListener;
+import com.salesforce.kafka.test.listeners.SaslPlainListener;
+import io.github.netmikey.logunit.api.LogCapturer;
+import java.util.Collections;
+import java.util.List;
+import java.util.Properties;
+import org.apache.kafka.clients.producer.RecordMetadata;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.RegisterExtension;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+class OnapKafkaClientTest {
+
+ @RegisterExtension
+ LogCapturer producerLogs = LogCapturer.create().captureForType(OnapKafkaProducer.class);
+
+ @RegisterExtension
+ LogCapturer clientLogs = LogCapturer.create().captureForType(OnapKafkaClient.class);
+
+ private static final Logger logger = LoggerFactory.getLogger(OnapKafkaClientTest.class);
+
+ private static TestConfiguration configuration = new TestConfiguration("application.properties");
+ private static final List<String> consumerTopics = configuration.getConsumerTopics();
+ private static KafkaTestCluster kafkaTestCluster = null;
+
+ @BeforeAll
+ static void before() throws Exception {
+ startKafkaService();
+ KafkaTestUtils utils = new KafkaTestUtils(kafkaTestCluster);
+ for (String topic: consumerTopics) {
+ utils.createTopic(topic, 1, (short) 1);
+ }
+ configuration.setBootstrapServers(Collections.singletonList(kafkaTestCluster.getKafkaConnectString()));
+ }
+
+ @AfterAll
+ static void after() throws Exception {
+ kafkaTestCluster.close();
+ kafkaTestCluster.stop();
+ }
+
+ @Test
+ void whenProducingCorrectRecordsArePresent() {
+ OnapKafkaClient handler = new OnapKafkaClient(configuration);
+ Assertions.assertEquals(handler.fetchFromTopic(consumerTopics.get(0)).size(), 0);
+ handler.publishToTopic(consumerTopics.get(0), "blahblahblahblah");
+ handler.publishToTopic(consumerTopics.get(1), "iaerugfoiaeurgfoaiuerf");
+ List<String> eventsFrom1 = handler.fetchFromTopic(consumerTopics.get(0));
+ Assertions.assertEquals(1, eventsFrom1.size());
+ handler.fetchFromTopic(consumerTopics.get(0));
+ List<String> events2 = handler.fetchFromTopic(consumerTopics.get(1));
+ Assertions.assertEquals( 0, events2.size());
+ }
+
+ @Test
+ void whenConsumingFromInvalidTopicEmptyListIsReturned() {
+ OnapKafkaClient handler = new OnapKafkaClient(configuration);
+ List<String> events = handler.fetchFromTopic("invalidTopic");
+ Assertions.assertEquals(0, events.size());
+ }
+
+ @Test
+ void whenPublishingToInvalidTopicExceptionIsLogged() {
+ OnapKafkaClient handler = new OnapKafkaClient(configuration);
+ RecordMetadata metadata = handler.publishToTopic("invalid.topic", "blahblahblahblah");
+ producerLogs.assertContains("Failed the send data");
+ Assertions.assertNull(metadata);
+ }
+
+ @Test
+ void whenSubscribingToInvalidTopicExceptionIsLogged() {
+ configuration = new TestConfiguration("invalid-application.properties");
+ OnapKafkaClient handler = new OnapKafkaClient(configuration);
+ handler.fetchFromTopic("bvserbatb");
+ clientLogs.assertContains("Consumer has not been initialised");
+ configuration.setConsumerTopics(consumerTopics);
+ }
+
+
+ private static void startKafkaService() throws Exception {
+ final BrokerListener listener = new SaslPlainListener()
+ .withUsername("kafkaclient")
+ .withPassword("client-secret");
+ final Properties brokerProperties = new Properties();
+ brokerProperties.setProperty("auto.create.topics.enable", "false");
+ kafkaTestCluster = new KafkaTestCluster(
+ 1,
+ brokerProperties,
+ Collections.singletonList(listener)
+ );
+ kafkaTestCluster.start();
+ logger.debug("Cluster started at: {}", kafkaTestCluster.getKafkaConnectString());
+ }
+
+ static {
+ System.setProperty("java.security.auth.login.config", "src/test/resources/jaas.conf");
+ }
+} \ No newline at end of file
diff --git a/kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java b/kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java
new file mode 100644
index 0000000..b5fa9d1
--- /dev/null
+++ b/kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java
@@ -0,0 +1,110 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+import lombok.SneakyThrows;
+
+public class TestConfiguration implements org.onap.dmaap.kafka.IKafkaConfig {
+
+ private Properties loadProperties(String configFileName) throws IOException {
+ Properties configuration = new Properties();
+ try (InputStream inputStream = TestConfiguration.class
+ .getClassLoader()
+ .getResourceAsStream(configFileName)) {
+ configuration.load(inputStream);
+ }
+ return configuration;
+ }
+
+ private final Properties testConfig;
+ private List<String> bootstrapServers;
+ private List<String> consumerTopics;
+
+ @SneakyThrows
+ public TestConfiguration(String configFilename) {
+ testConfig = loadProperties(configFilename);
+ bootstrapServers = new ArrayList<>(Arrays.asList(((String) testConfig.get("kafka.kafkaBootstrapServers")).split(",")));
+ }
+
+ @Override
+ public List<String> getKafkaBootstrapServers() {
+ return bootstrapServers;
+ }
+
+ public void setBootstrapServers(List<String> newBootstrapList) {
+ bootstrapServers = newBootstrapList;
+ }
+
+ @Override
+ public String getKafkaSaslMechanism() {
+ return "PLAIN";
+ }
+
+ @Override
+ public String getKafkaSaslJaasConfig() {
+ return "org.apache.kafka.common.security.plain.PlainLoginModule required username=admin password=admin-secret;";
+ }
+
+ @Override
+ public int getPollingTimeout() {
+ return Integer.parseInt((String) testConfig.get("kafka.pollingTimeout"));
+ }
+
+ @Override
+ public String getConsumerGroup() {
+ return (String) testConfig.get("kafka.consumerGroup");
+ }
+
+ @Override
+ public String getConsumerID() {
+ return (String) testConfig.get("kafka.consumerID");
+ }
+
+ @Override
+ public List<String> getConsumerTopics() {
+ consumerTopics = new ArrayList<>();
+ String topicString = (String) testConfig.get("kafka.consumerTopics");
+ if (topicString != null) {
+ consumerTopics.addAll(Arrays.asList((topicString).split(",")));
+ }
+ return consumerTopics;
+ }
+
+ public void setConsumerTopics(List<String> newTopics) {
+ this.consumerTopics = newTopics;
+ }
+
+ @Override
+ public List<String> getProducerTopics() {
+ List<String> producerTopics = new ArrayList<>();
+ String topicString = (String) testConfig.get("kafka.producerTopics");
+ if (topicString != null) {
+ producerTopics.addAll(Arrays.asList((topicString).split(",")));
+ }
+ return producerTopics;
+ }
+}
diff --git a/kafkaClient/src/test/resources/application.properties b/kafkaClient/src/test/resources/application.properties
new file mode 100644
index 0000000..d1a7853
--- /dev/null
+++ b/kafkaClient/src/test/resources/application.properties
@@ -0,0 +1,6 @@
+kafka.kafkaBootstrapServers=localhost:9092
+kafka.pollingTimeout=10
+kafka.consumerGroup=mygroup
+kafka.consumerID=myid
+kafka.consumerTopics=mytopicA,mytopicB
+kafka.producerTopics=mytopicA \ No newline at end of file
diff --git a/kafkaClient/src/test/resources/invalid-application.properties b/kafkaClient/src/test/resources/invalid-application.properties
new file mode 100644
index 0000000..04b159a
--- /dev/null
+++ b/kafkaClient/src/test/resources/invalid-application.properties
@@ -0,0 +1,6 @@
+kafka.kafkaBootstrapServers=localhost:9092
+kafka.pollingTimeout=10
+kafka.consumerGroup=mygroup
+kafka.consumerID=myid
+#kafka.consumerTopics=mytopicA,mytopicB
+kafka.producerTopics=mytopicA \ No newline at end of file
diff --git a/kafkaClient/src/test/resources/jaas.conf b/kafkaClient/src/test/resources/jaas.conf
new file mode 100644
index 0000000..6f7fb5a
--- /dev/null
+++ b/kafkaClient/src/test/resources/jaas.conf
@@ -0,0 +1,20 @@
+KafkaServer {
+ org.apache.kafka.common.security.plain.PlainLoginModule required
+ username="admin"
+ password="admin-secret"
+ user_admin="admin-secret"
+ user_kafkaclient="client-secret";
+};
+
+Server {
+ org.apache.zookeeper.server.auth.DigestLoginModule required
+ username="admin"
+ password="admin-secret"
+ user_zooclient="client-secret";
+};
+
+Client {
+ org.apache.zookeeper.server.auth.DigestLoginModule required
+ username="zooclient"
+ password="client-secret";
+}; \ No newline at end of file
diff --git a/kafkaClient/src/test/resources/logback-test.xml b/kafkaClient/src/test/resources/logback-test.xml
new file mode 100644
index 0000000..c4bfa96
--- /dev/null
+++ b/kafkaClient/src/test/resources/logback-test.xml
@@ -0,0 +1,20 @@
+<configuration>
+ <timestamp key="byDay" datePattern="yyyyMMdd'T'HHmmss"/>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
+ </encoder>
+ </appender>
+ <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+ <file> log-${byDay}.txt </file>
+ <append>true</append>
+ <encoder>
+ <pattern>%-4relative [%thread] %-5level %logger{35} - %msg%n</pattern>
+ </encoder>
+ </appender>
+ <root level="DEBUG">
+ <appender-ref ref="FILE" />
+ <appender-ref ref="STDOUT" />
+ </root>
+ <Logger name="org.apache.kafka" level="WARN"/>
+</configuration> \ No newline at end of file