diff options
author | 2023-01-06 11:15:15 +0000 | |
---|---|---|
committer | 2023-03-09 10:00:42 +0000 | |
commit | 38f5b4b9dc667c52561867d4e36f940109f3e3a5 (patch) | |
tree | 7f3b4f9ec6a897a3e5b7cf11de72e5ad7da9adc2 /kafkaClient/src/test | |
parent | a6b96912d1fa3ee369577c50079b6f1d25907607 (diff) |
[KAFKA] Adding new client code
Signed-off-by: david.mcweeney <david.mcweeney@est.tech>
Change-Id: I38b930b1b5f4233f961d51bbab4b1828d034e67a
Issue-ID: DMAAP-1847
Diffstat (limited to 'kafkaClient/src/test')
6 files changed, 288 insertions, 0 deletions
diff --git a/kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java b/kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java new file mode 100644 index 0000000..9708f3b --- /dev/null +++ b/kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java @@ -0,0 +1,126 @@ +/*- + * ============LICENSE_START======================================================= + * dmaap-kafka-client + * ================================================================================ + * Copyright (C) 2023 Nordix Foundation. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.kafka; + +import com.salesforce.kafka.test.KafkaTestCluster; +import com.salesforce.kafka.test.KafkaTestUtils; +import com.salesforce.kafka.test.listeners.BrokerListener; +import com.salesforce.kafka.test.listeners.SaslPlainListener; +import io.github.netmikey.logunit.api.LogCapturer; +import java.util.Collections; +import java.util.List; +import java.util.Properties; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +class OnapKafkaClientTest { + + @RegisterExtension + LogCapturer producerLogs = LogCapturer.create().captureForType(OnapKafkaProducer.class); + + @RegisterExtension + LogCapturer clientLogs = LogCapturer.create().captureForType(OnapKafkaClient.class); + + private static final Logger logger = LoggerFactory.getLogger(OnapKafkaClientTest.class); + + private static TestConfiguration configuration = new TestConfiguration("application.properties"); + private static final List<String> consumerTopics = configuration.getConsumerTopics(); + private static KafkaTestCluster kafkaTestCluster = null; + + @BeforeAll + static void before() throws Exception { + startKafkaService(); + KafkaTestUtils utils = new KafkaTestUtils(kafkaTestCluster); + for (String topic: consumerTopics) { + utils.createTopic(topic, 1, (short) 1); + } + configuration.setBootstrapServers(Collections.singletonList(kafkaTestCluster.getKafkaConnectString())); + } + + @AfterAll + static void after() throws Exception { + kafkaTestCluster.close(); + kafkaTestCluster.stop(); + } + + @Test + void whenProducingCorrectRecordsArePresent() { + OnapKafkaClient handler = new OnapKafkaClient(configuration); + Assertions.assertEquals(handler.fetchFromTopic(consumerTopics.get(0)).size(), 0); + handler.publishToTopic(consumerTopics.get(0), "blahblahblahblah"); + handler.publishToTopic(consumerTopics.get(1), "iaerugfoiaeurgfoaiuerf"); + List<String> eventsFrom1 = handler.fetchFromTopic(consumerTopics.get(0)); + Assertions.assertEquals(1, eventsFrom1.size()); + handler.fetchFromTopic(consumerTopics.get(0)); + List<String> events2 = handler.fetchFromTopic(consumerTopics.get(1)); + Assertions.assertEquals( 0, events2.size()); + } + + @Test + void whenConsumingFromInvalidTopicEmptyListIsReturned() { + OnapKafkaClient handler = new OnapKafkaClient(configuration); + List<String> events = handler.fetchFromTopic("invalidTopic"); + Assertions.assertEquals(0, events.size()); + } + + @Test + void whenPublishingToInvalidTopicExceptionIsLogged() { + OnapKafkaClient handler = new OnapKafkaClient(configuration); + RecordMetadata metadata = handler.publishToTopic("invalid.topic", "blahblahblahblah"); + producerLogs.assertContains("Failed the send data"); + Assertions.assertNull(metadata); + } + + @Test + void whenSubscribingToInvalidTopicExceptionIsLogged() { + configuration = new TestConfiguration("invalid-application.properties"); + OnapKafkaClient handler = new OnapKafkaClient(configuration); + handler.fetchFromTopic("bvserbatb"); + clientLogs.assertContains("Consumer has not been initialised"); + configuration.setConsumerTopics(consumerTopics); + } + + + private static void startKafkaService() throws Exception { + final BrokerListener listener = new SaslPlainListener() + .withUsername("kafkaclient") + .withPassword("client-secret"); + final Properties brokerProperties = new Properties(); + brokerProperties.setProperty("auto.create.topics.enable", "false"); + kafkaTestCluster = new KafkaTestCluster( + 1, + brokerProperties, + Collections.singletonList(listener) + ); + kafkaTestCluster.start(); + logger.debug("Cluster started at: {}", kafkaTestCluster.getKafkaConnectString()); + } + + static { + System.setProperty("java.security.auth.login.config", "src/test/resources/jaas.conf"); + } +}
\ No newline at end of file diff --git a/kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java b/kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java new file mode 100644 index 0000000..b5fa9d1 --- /dev/null +++ b/kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java @@ -0,0 +1,110 @@ +/*- + * ============LICENSE_START======================================================= + * dmaap-kafka-client + * ================================================================================ + * Copyright (C) 2023 Nordix Foundation. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.kafka; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; +import lombok.SneakyThrows; + +public class TestConfiguration implements org.onap.dmaap.kafka.IKafkaConfig { + + private Properties loadProperties(String configFileName) throws IOException { + Properties configuration = new Properties(); + try (InputStream inputStream = TestConfiguration.class + .getClassLoader() + .getResourceAsStream(configFileName)) { + configuration.load(inputStream); + } + return configuration; + } + + private final Properties testConfig; + private List<String> bootstrapServers; + private List<String> consumerTopics; + + @SneakyThrows + public TestConfiguration(String configFilename) { + testConfig = loadProperties(configFilename); + bootstrapServers = new ArrayList<>(Arrays.asList(((String) testConfig.get("kafka.kafkaBootstrapServers")).split(","))); + } + + @Override + public List<String> getKafkaBootstrapServers() { + return bootstrapServers; + } + + public void setBootstrapServers(List<String> newBootstrapList) { + bootstrapServers = newBootstrapList; + } + + @Override + public String getKafkaSaslMechanism() { + return "PLAIN"; + } + + @Override + public String getKafkaSaslJaasConfig() { + return "org.apache.kafka.common.security.plain.PlainLoginModule required username=admin password=admin-secret;"; + } + + @Override + public int getPollingTimeout() { + return Integer.parseInt((String) testConfig.get("kafka.pollingTimeout")); + } + + @Override + public String getConsumerGroup() { + return (String) testConfig.get("kafka.consumerGroup"); + } + + @Override + public String getConsumerID() { + return (String) testConfig.get("kafka.consumerID"); + } + + @Override + public List<String> getConsumerTopics() { + consumerTopics = new ArrayList<>(); + String topicString = (String) testConfig.get("kafka.consumerTopics"); + if (topicString != null) { + consumerTopics.addAll(Arrays.asList((topicString).split(","))); + } + return consumerTopics; + } + + public void setConsumerTopics(List<String> newTopics) { + this.consumerTopics = newTopics; + } + + @Override + public List<String> getProducerTopics() { + List<String> producerTopics = new ArrayList<>(); + String topicString = (String) testConfig.get("kafka.producerTopics"); + if (topicString != null) { + producerTopics.addAll(Arrays.asList((topicString).split(","))); + } + return producerTopics; + } +} diff --git a/kafkaClient/src/test/resources/application.properties b/kafkaClient/src/test/resources/application.properties new file mode 100644 index 0000000..d1a7853 --- /dev/null +++ b/kafkaClient/src/test/resources/application.properties @@ -0,0 +1,6 @@ +kafka.kafkaBootstrapServers=localhost:9092 +kafka.pollingTimeout=10 +kafka.consumerGroup=mygroup +kafka.consumerID=myid +kafka.consumerTopics=mytopicA,mytopicB +kafka.producerTopics=mytopicA
\ No newline at end of file diff --git a/kafkaClient/src/test/resources/invalid-application.properties b/kafkaClient/src/test/resources/invalid-application.properties new file mode 100644 index 0000000..04b159a --- /dev/null +++ b/kafkaClient/src/test/resources/invalid-application.properties @@ -0,0 +1,6 @@ +kafka.kafkaBootstrapServers=localhost:9092 +kafka.pollingTimeout=10 +kafka.consumerGroup=mygroup +kafka.consumerID=myid +#kafka.consumerTopics=mytopicA,mytopicB +kafka.producerTopics=mytopicA
\ No newline at end of file diff --git a/kafkaClient/src/test/resources/jaas.conf b/kafkaClient/src/test/resources/jaas.conf new file mode 100644 index 0000000..6f7fb5a --- /dev/null +++ b/kafkaClient/src/test/resources/jaas.conf @@ -0,0 +1,20 @@ +KafkaServer { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="admin" + password="admin-secret" + user_admin="admin-secret" + user_kafkaclient="client-secret"; +}; + +Server { + org.apache.zookeeper.server.auth.DigestLoginModule required + username="admin" + password="admin-secret" + user_zooclient="client-secret"; +}; + +Client { + org.apache.zookeeper.server.auth.DigestLoginModule required + username="zooclient" + password="client-secret"; +};
\ No newline at end of file diff --git a/kafkaClient/src/test/resources/logback-test.xml b/kafkaClient/src/test/resources/logback-test.xml new file mode 100644 index 0000000..c4bfa96 --- /dev/null +++ b/kafkaClient/src/test/resources/logback-test.xml @@ -0,0 +1,20 @@ +<configuration> + <timestamp key="byDay" datePattern="yyyyMMdd'T'HHmmss"/> + <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> + <encoder> + <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern> + </encoder> + </appender> + <appender name="FILE" class="ch.qos.logback.core.FileAppender"> + <file> log-${byDay}.txt </file> + <append>true</append> + <encoder> + <pattern>%-4relative [%thread] %-5level %logger{35} - %msg%n</pattern> + </encoder> + </appender> + <root level="DEBUG"> + <appender-ref ref="FILE" /> + <appender-ref ref="STDOUT" /> + </root> + <Logger name="org.apache.kafka" level="WARN"/> +</configuration>
\ No newline at end of file |