summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authordavid.mcweeney <david.mcweeney@est.tech>2023-01-06 11:15:15 +0000
committerdavid.mcweeney <david.mcweeney@est.tech>2023-03-09 10:00:42 +0000
commit38f5b4b9dc667c52561867d4e36f940109f3e3a5 (patch)
tree7f3b4f9ec6a897a3e5b7cf11de72e5ad7da9adc2
parenta6b96912d1fa3ee369577c50079b6f1d25907607 (diff)
[KAFKA] Adding new client code
Signed-off-by: david.mcweeney <david.mcweeney@est.tech> Change-Id: I38b930b1b5f4233f961d51bbab4b1828d034e67a Issue-ID: DMAAP-1847
-rw-r--r--.gitignore7
-rw-r--r--LICENSE.txt39
-rw-r--r--kafkaClient/pom.xml90
-rw-r--r--kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java105
-rw-r--r--kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java79
-rw-r--r--kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java115
-rw-r--r--kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java84
-rw-r--r--kafkaClient/src/main/resources/logback.xml11
-rw-r--r--kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java126
-rw-r--r--kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java110
-rw-r--r--kafkaClient/src/test/resources/application.properties6
-rw-r--r--kafkaClient/src/test/resources/invalid-application.properties6
-rw-r--r--kafkaClient/src/test/resources/jaas.conf20
-rw-r--r--kafkaClient/src/test/resources/logback-test.xml20
-rw-r--r--pom.xml320
-rw-r--r--sampleClient/pom.xml70
-rw-r--r--sampleClient/src/main/java/org/onap/dmaap/kafka/sample/Main.java44
-rw-r--r--sampleClient/src/main/java/org/onap/dmaap/kafka/sample/SampleConfiguration.java48
-rw-r--r--sampleClient/src/main/resources/application.yaml11
-rw-r--r--sampleClient/src/main/resources/logback.xml11
-rw-r--r--src/main/docker/Dockerfile29
-rw-r--r--src/main/docker/include/etc/confluent/docker/configure123
-rw-r--r--src/main/docker/include/etc/confluent/docker/ensure29
-rw-r--r--src/main/docker/include/etc/confluent/docker/kafka.properties.template33
-rw-r--r--src/main/docker/include/etc/confluent/docker/launch37
-rw-r--r--src/main/docker/include/etc/confluent/docker/log4j.properties.template26
-rw-r--r--src/main/docker/include/etc/confluent/docker/run41
-rw-r--r--src/main/docker/include/etc/confluent/docker/tools-log4j.properties.template7
-rw-r--r--src/main/docker/org.onap.dmaap.mr.keyfile27
-rw-r--r--src/main/docker/org.onap.dmaap.mr.p12bin4149 -> 0 bytes
-rw-r--r--src/main/docker/org.onap.dmaap.mr.trust.jksbin1413 -> 0 bytes
-rw-r--r--src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProvider.java33
-rw-r--r--src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProviderFactory.java55
-rw-r--r--src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/Cadi3AAFProvider.java205
-rw-r--r--src/main/java/org/onap/dmaap/kafkaAuthorize/KafkaCustomAuthorizer.java233
-rw-r--r--src/main/java/org/onap/dmaap/kafkaAuthorize/PlainLoginModule1.java68
-rw-r--r--src/main/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServer1.java203
-rw-r--r--src/main/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServerProvider1.java42
-rw-r--r--src/main/resources/META-INF/services/org.onap.dmaap.commonauth.kafka.base.authorization.AuthorizationProvider1
-rw-r--r--src/test/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProviderFactoryTest.java39
-rw-r--r--src/test/java/org/onap/dmaap/commonauth/kafka/base/authorization/Cadi3AAFProviderTest.java85
-rw-r--r--src/test/java/org/onap/dmaap/kafkaAuthorize/KafkaCustomAuthorizerTest.java216
-rw-r--r--src/test/java/org/onap/dmaap/kafkaAuthorize/PlainLoginModule1Test.java80
-rw-r--r--src/test/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServer1Test.java184
-rw-r--r--src/test/resources/cadi.properties19
-rw-r--r--src/test/resources/org.onap.dmaap.mr.keyfile27
-rw-r--r--src/test/resources/org.onap.dmaap.mr.p12bin4637 -> 0 bytes
-rw-r--r--src/test/resources/org.onap.dmaap.mr.trust.jksbin1413 -> 0 bytes
-rw-r--r--version.properties6
49 files changed, 1009 insertions, 2161 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..c0b3fff
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,7 @@
+.idea/*
+/target/*
+/kafkaClient/target/*
+/sampleClient/target/*
+/src/.idea/*
+*/archives/
+*log* \ No newline at end of file
diff --git a/LICENSE.txt b/LICENSE.txt
deleted file mode 100644
index bb235ff..0000000
--- a/LICENSE.txt
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
-* ============LICENSE_START==========================================
-* ===================================================================
-* Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-* ===================================================================
-*
-* Unless otherwise specified, all software contained herein is licensed
-* under the Apache License, Version 2.0 (the “License”);
-* you may not use this software except in compliance with the License.
-* You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*
-*
-*
-* Unless otherwise specified, all documentation contained herein is licensed
-* under the Creative Commons License, Attribution 4.0 Intl. (the “License”);
-* you may not use this documentation except in compliance with the License.
-* You may obtain a copy of the License at
-*
-* https://creativecommons.org/licenses/by/4.0/
-*
-* Unless required by applicable law or agreed to in writing, documentation
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*
-* ============LICENSE_END============================================
-*
-* ECOMP is a trademark and service mark of AT&T Intellectual Property.
-*
-*/
diff --git a/kafkaClient/pom.xml b/kafkaClient/pom.xml
new file mode 100644
index 0000000..9fa48b3
--- /dev/null
+++ b/kafkaClient/pom.xml
@@ -0,0 +1,90 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.onap.dmaap.kafka</groupId>
+ <artifactId>parent</artifactId>
+ <version>${revision}</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+ <artifactId>onap-kafka-client</artifactId>
+ <description>Kafka client JAR file to use by kafka clients</description>
+ <packaging>jar</packaging>
+
+ <properties>
+ <maven.compiler.source>11</maven.compiler.source>
+ <maven.compiler.target>11</maven.compiler.target>
+ <kafka.version>3.3.1</kafka.version>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.kafka</groupId>
+ <artifactId>kafka_2.13</artifactId>
+ <version>${kafka.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.kafka</groupId>
+ <artifactId>kafka-clients</artifactId>
+ <version>${kafka.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.projectlombok</groupId>
+ <artifactId>lombok</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-classic</artifactId>
+ <version>1.4.5</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-core</artifactId>
+ <version>1.4.5</version>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>2.0.4</version>
+ </dependency>
+ <dependency>
+ <groupId>com.salesforce.kafka.test</groupId>
+ <artifactId>kafka-junit5</artifactId>
+ <version>3.2.4</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.kafka</groupId>
+ <artifactId>kafka-streams</artifactId>
+ </exclusion>
+ </exclusions>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.junit-pioneer</groupId>
+ <artifactId>junit-pioneer</artifactId>
+ <version>1.9.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>io.github.netmikey.logunit</groupId>
+ <artifactId>logunit-core</artifactId>
+ <version>1.1.3</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>io.github.netmikey.logunit</groupId>
+ <artifactId>logunit-logback</artifactId>
+ <version>1.1.3</version>
+ <scope>compile</scope>
+ </dependency>
+ </dependencies>
+</project> \ No newline at end of file
diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java
new file mode 100644
index 0000000..ebf8863
--- /dev/null
+++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/IKafkaConfig.java
@@ -0,0 +1,105 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka;
+
+import java.util.List;
+import org.apache.kafka.common.KafkaException;
+
+public interface IKafkaConfig {
+
+ /**
+ * Returns the list of kafka bootstrap servers.
+ *
+ * @return List of kafka bootstrap servers.
+ */
+ List<String> getKafkaBootstrapServers();
+
+ /**
+ * Kafka security protocol to be used by the client to Auth towards the kafka cluster
+ *
+ * @return Kafka security.protocol. Default is SASL_PLAINTEXT in the current onap kafka config
+ */
+ default String getKafkaSecurityProtocolConfig() {
+ return "SASL_PLAINTEXT";
+ }
+
+ /**
+ * Kafka SASL mechanism to be used by the client to Auth towards the kafka cluster
+ *
+ * @return Kafka sasl.mechanism. Default is SCRAM-SHA-512 in the current onap kafka config
+ */
+ default String getKafkaSaslMechanism() {
+ return "SCRAM-SHA-512";
+ }
+
+ /**
+ * Kafka JAAS config to be used by the client to Auth towards the kafka cluster.
+ * If overridden, must align with sasl.jaas.config convention set out by the sasl.mechanism being used
+ * otherwise, mandatory setting of the environment variable SASL_JAAS_CONFIG is required to provide default behaviour
+ * @return Kafka sasl.jaas.config
+ */
+ default String getKafkaSaslJaasConfig() {
+ String saslJaasConfFromEnv = System.getenv("SASL_JAAS_CONFIG");
+ if(saslJaasConfFromEnv != null) {
+ return saslJaasConfFromEnv;
+ } else {
+ throw new KafkaException("sasl.jaas.config not set for Kafka Consumer");
+ }
+ }
+
+ /**
+ * The timeout in seconds to wait for a response from each poll.
+ *
+ * @return Client Timeout in seconds. Default is 10 seconds
+ */
+ default int getPollingTimeout() {
+ return 10;
+ }
+
+ /**
+ * Returns the kafka consumer group defined for this component.
+ *
+ * @return KafkaConsumer group.
+ */
+ String getConsumerGroup();
+
+ /**
+ * Returns the kafka consumer id defined for this component.
+ *
+ * @return KafkaConsumer id or null.
+ */
+ String getConsumerID();
+
+ /**
+ * Returns a list of kafka topics to consume from.
+ *
+ * @return List of kafka topics or empty.
+ */
+ List<String> getConsumerTopics();
+
+ /**
+ * Returns a list of kafka topics to produce to.
+ *
+ * @return List of kafka topics or empty.
+ */
+ List<String> getProducerTopics();
+
+}
diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java
new file mode 100644
index 0000000..7986869
--- /dev/null
+++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaClient.java
@@ -0,0 +1,79 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2022 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.dmaap.kafka;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.kafka.clients.producer.RecordMetadata;
+import org.apache.kafka.common.KafkaException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Utility class that provides a handler for Kafka interactions
+ */
+public class OnapKafkaClient {
+
+ private final Logger log = LoggerFactory.getLogger(OnapKafkaClient.class.getName());
+
+ private OnapKafkaConsumer onapKafkaConsumer = null;
+
+ private final OnapKafkaProducer onapKafkaProducer;
+
+ public OnapKafkaClient(IKafkaConfig configuration) {
+ if (!configuration.getConsumerTopics().isEmpty()) {
+ onapKafkaConsumer = new OnapKafkaConsumer(configuration);
+ onapKafkaConsumer.subscribeConsumerToTopics();
+ }
+ onapKafkaProducer = new OnapKafkaProducer(configuration);
+ }
+
+ /**
+ * @param topicName The topic from which messages will be fetched
+ * @return A list of messages from a specific topic
+ */
+ public List<String> fetchFromTopic(String topicName) {
+ List<String> messages = new ArrayList<>();
+ if (onapKafkaConsumer != null) {
+ try {
+ log.debug("Polling for messages from topic: {}", topicName);
+ messages = onapKafkaConsumer.poll(topicName);
+ log.debug("Returning messages from topic {}", topicName);
+ return messages;
+ } catch (KafkaException e) {
+ log.error("Failed to fetch from kafka for topic: {}", topicName, e);
+ }
+ } else {
+ log.error("Consumer has not been initialised with the required topic list");
+ }
+ return messages;
+ }
+
+ /**
+ * Publish data to a given topic
+ * @param topicName The topic to which the message should be published
+ * @param data The data to publish to the topic specified
+ * @return
+ */
+ public RecordMetadata publishToTopic(String topicName, String data) {
+ // Should we check the data size and chunk it if necessary? Do we need to?
+ return onapKafkaProducer.sendDataSynch(topicName, data);
+ }
+}
diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java
new file mode 100644
index 0000000..e08e229
--- /dev/null
+++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaConsumer.java
@@ -0,0 +1,115 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka;
+
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+import java.util.UUID;
+import org.apache.kafka.clients.CommonClientConfigs;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.KafkaException;
+import org.apache.kafka.common.config.SaslConfigs;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Utility class that provides a KafkaConsumer to communicate with a kafka cluster
+ */
+public class OnapKafkaConsumer {
+
+ private final Logger log = LoggerFactory.getLogger(OnapKafkaConsumer.class);
+ private final KafkaConsumer<String, String> consumer;
+ private final int pollTimeout;
+ private final List<String> consumerTopics;
+
+ /**
+ *
+ * @param configuration The config provided to the client
+ */
+ public OnapKafkaConsumer(IKafkaConfig configuration) {
+ consumerTopics = configuration.getConsumerTopics();
+ log.debug("Instantiating kafka consumer for topics {}", consumerTopics);
+
+ Properties props = new Properties();
+ props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
+ props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
+ props.put(ConsumerConfig.CLIENT_ID_CONFIG, configuration.getConsumerID() + "-consumer-" + UUID.randomUUID());
+ props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, configuration.getKafkaSecurityProtocolConfig());
+ props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, configuration.getKafkaBootstrapServers());
+ props.put(SaslConfigs.SASL_JAAS_CONFIG, configuration.getKafkaSaslJaasConfig());
+ props.put(SaslConfigs.SASL_MECHANISM, configuration.getKafkaSaslMechanism());
+ props.put(ConsumerConfig.GROUP_ID_CONFIG, configuration.getConsumerGroup());
+ props.put(ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG, false);
+ props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
+ props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
+
+ consumer = new KafkaConsumer<>(props);
+
+ pollTimeout = configuration.getPollingTimeout();
+ }
+
+ /**
+ * Poll specified topic for existing messages
+ *
+ * @return List of messages from a specific topic
+ */
+ List<String> poll(String topicName) throws KafkaException {
+ List<String> msgs = new ArrayList<>();
+ log.debug("Polling records for topic {}", topicName);
+ ConsumerRecords<String, String> consumerRecordsForSpecificTopic = consumer.poll(Duration.ofSeconds(pollTimeout));
+ for(ConsumerRecord<String, String> rec : consumerRecordsForSpecificTopic){
+ if (rec.topic().equals(topicName)) {
+ msgs.add(rec.value());
+ }
+ }
+ return msgs;
+ }
+
+ /**
+ * Poll topics for existing messages
+ *
+ * @return List of messages from all subscribed topic
+ */
+ List<String> poll() throws KafkaException {
+ List<String> msgs = new ArrayList<>();
+ log.debug("Polling all records");
+ ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(pollTimeout));
+ for(ConsumerRecord<String, String> rec : consumerRecords){
+ msgs.add(rec.value());
+ }
+ return msgs;
+ }
+
+ public void subscribeConsumerToTopics() {
+ try {
+ consumer.subscribe(consumerTopics);
+ }
+ catch (KafkaException e) {
+ log.error("Failed to subscribe to given topic(s) {} : {}", consumerTopics, e.getMessage());
+ throw e;
+ }
+ }
+} \ No newline at end of file
diff --git a/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java
new file mode 100644
index 0000000..1129e14
--- /dev/null
+++ b/kafkaClient/src/main/java/org/onap/dmaap/kafka/OnapKafkaProducer.java
@@ -0,0 +1,84 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka;
+
+import java.util.List;
+import java.util.Properties;
+import java.util.UUID;
+import java.util.concurrent.ExecutionException;
+import org.apache.kafka.clients.CommonClientConfigs;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.clients.producer.RecordMetadata;
+import org.apache.kafka.common.KafkaException;
+import org.apache.kafka.common.config.SaslConfigs;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Utility class that provides a KafkaProducer to communicate with a kafka cluster
+ */
+public class OnapKafkaProducer {
+
+ private final Logger log = LoggerFactory.getLogger(OnapKafkaProducer.class);
+ private final KafkaProducer<String, String> producer;
+ private final List<String> producerTopics;
+
+ /**
+ *
+ * @param configuration The config provided to the client
+ */
+ public OnapKafkaProducer(IKafkaConfig configuration) {
+ producerTopics = configuration.getProducerTopics();
+ log.debug("Instantiating kafka producer for topics {}", producerTopics);
+ Properties props = new Properties();
+
+ props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
+ props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
+ props.put(ProducerConfig.CLIENT_ID_CONFIG, configuration.getConsumerID() + "-producer-" + UUID.randomUUID());
+ props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, configuration.getKafkaSecurityProtocolConfig());
+ props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, configuration.getKafkaBootstrapServers());
+ props.put(SaslConfigs.SASL_JAAS_CONFIG, configuration.getKafkaSaslJaasConfig());
+ props.put(SaslConfigs.SASL_MECHANISM, configuration.getKafkaSaslMechanism());
+ props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 10000);
+ producer = new KafkaProducer<>(props);
+ }
+
+ /**
+ *
+ * @param topicName The name of the topic to publish the data to
+ * @param value The value of the data
+ * @return The RecordMetedata of the request
+ */
+ public RecordMetadata sendDataSynch(String topicName, String value) {
+ RecordMetadata data = null;
+ try {
+ data = producer.send(new ProducerRecord<>(topicName, value)).get();
+ log.debug("Data sent to topic {} at partition no {} and offset {}", topicName, data.partition(), data.offset());
+ } catch (KafkaException | ExecutionException | InterruptedException e) {
+ log.error("Failed the send data: exc {}", e.getMessage());
+ } finally {
+ producer.flush();
+ }
+ return data;
+ }
+} \ No newline at end of file
diff --git a/kafkaClient/src/main/resources/logback.xml b/kafkaClient/src/main/resources/logback.xml
new file mode 100644
index 0000000..8798706
--- /dev/null
+++ b/kafkaClient/src/main/resources/logback.xml
@@ -0,0 +1,11 @@
+<configuration>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n</pattern>
+ </encoder>
+ </appender>
+
+ <root level="INFO">
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration> \ No newline at end of file
diff --git a/kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java b/kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java
new file mode 100644
index 0000000..9708f3b
--- /dev/null
+++ b/kafkaClient/src/test/java/org/onap/dmaap/kafka/OnapKafkaClientTest.java
@@ -0,0 +1,126 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka;
+
+import com.salesforce.kafka.test.KafkaTestCluster;
+import com.salesforce.kafka.test.KafkaTestUtils;
+import com.salesforce.kafka.test.listeners.BrokerListener;
+import com.salesforce.kafka.test.listeners.SaslPlainListener;
+import io.github.netmikey.logunit.api.LogCapturer;
+import java.util.Collections;
+import java.util.List;
+import java.util.Properties;
+import org.apache.kafka.clients.producer.RecordMetadata;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.RegisterExtension;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+class OnapKafkaClientTest {
+
+ @RegisterExtension
+ LogCapturer producerLogs = LogCapturer.create().captureForType(OnapKafkaProducer.class);
+
+ @RegisterExtension
+ LogCapturer clientLogs = LogCapturer.create().captureForType(OnapKafkaClient.class);
+
+ private static final Logger logger = LoggerFactory.getLogger(OnapKafkaClientTest.class);
+
+ private static TestConfiguration configuration = new TestConfiguration("application.properties");
+ private static final List<String> consumerTopics = configuration.getConsumerTopics();
+ private static KafkaTestCluster kafkaTestCluster = null;
+
+ @BeforeAll
+ static void before() throws Exception {
+ startKafkaService();
+ KafkaTestUtils utils = new KafkaTestUtils(kafkaTestCluster);
+ for (String topic: consumerTopics) {
+ utils.createTopic(topic, 1, (short) 1);
+ }
+ configuration.setBootstrapServers(Collections.singletonList(kafkaTestCluster.getKafkaConnectString()));
+ }
+
+ @AfterAll
+ static void after() throws Exception {
+ kafkaTestCluster.close();
+ kafkaTestCluster.stop();
+ }
+
+ @Test
+ void whenProducingCorrectRecordsArePresent() {
+ OnapKafkaClient handler = new OnapKafkaClient(configuration);
+ Assertions.assertEquals(handler.fetchFromTopic(consumerTopics.get(0)).size(), 0);
+ handler.publishToTopic(consumerTopics.get(0), "blahblahblahblah");
+ handler.publishToTopic(consumerTopics.get(1), "iaerugfoiaeurgfoaiuerf");
+ List<String> eventsFrom1 = handler.fetchFromTopic(consumerTopics.get(0));
+ Assertions.assertEquals(1, eventsFrom1.size());
+ handler.fetchFromTopic(consumerTopics.get(0));
+ List<String> events2 = handler.fetchFromTopic(consumerTopics.get(1));
+ Assertions.assertEquals( 0, events2.size());
+ }
+
+ @Test
+ void whenConsumingFromInvalidTopicEmptyListIsReturned() {
+ OnapKafkaClient handler = new OnapKafkaClient(configuration);
+ List<String> events = handler.fetchFromTopic("invalidTopic");
+ Assertions.assertEquals(0, events.size());
+ }
+
+ @Test
+ void whenPublishingToInvalidTopicExceptionIsLogged() {
+ OnapKafkaClient handler = new OnapKafkaClient(configuration);
+ RecordMetadata metadata = handler.publishToTopic("invalid.topic", "blahblahblahblah");
+ producerLogs.assertContains("Failed the send data");
+ Assertions.assertNull(metadata);
+ }
+
+ @Test
+ void whenSubscribingToInvalidTopicExceptionIsLogged() {
+ configuration = new TestConfiguration("invalid-application.properties");
+ OnapKafkaClient handler = new OnapKafkaClient(configuration);
+ handler.fetchFromTopic("bvserbatb");
+ clientLogs.assertContains("Consumer has not been initialised");
+ configuration.setConsumerTopics(consumerTopics);
+ }
+
+
+ private static void startKafkaService() throws Exception {
+ final BrokerListener listener = new SaslPlainListener()
+ .withUsername("kafkaclient")
+ .withPassword("client-secret");
+ final Properties brokerProperties = new Properties();
+ brokerProperties.setProperty("auto.create.topics.enable", "false");
+ kafkaTestCluster = new KafkaTestCluster(
+ 1,
+ brokerProperties,
+ Collections.singletonList(listener)
+ );
+ kafkaTestCluster.start();
+ logger.debug("Cluster started at: {}", kafkaTestCluster.getKafkaConnectString());
+ }
+
+ static {
+ System.setProperty("java.security.auth.login.config", "src/test/resources/jaas.conf");
+ }
+} \ No newline at end of file
diff --git a/kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java b/kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java
new file mode 100644
index 0000000..b5fa9d1
--- /dev/null
+++ b/kafkaClient/src/test/java/org/onap/dmaap/kafka/TestConfiguration.java
@@ -0,0 +1,110 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+import lombok.SneakyThrows;
+
+public class TestConfiguration implements org.onap.dmaap.kafka.IKafkaConfig {
+
+ private Properties loadProperties(String configFileName) throws IOException {
+ Properties configuration = new Properties();
+ try (InputStream inputStream = TestConfiguration.class
+ .getClassLoader()
+ .getResourceAsStream(configFileName)) {
+ configuration.load(inputStream);
+ }
+ return configuration;
+ }
+
+ private final Properties testConfig;
+ private List<String> bootstrapServers;
+ private List<String> consumerTopics;
+
+ @SneakyThrows
+ public TestConfiguration(String configFilename) {
+ testConfig = loadProperties(configFilename);
+ bootstrapServers = new ArrayList<>(Arrays.asList(((String) testConfig.get("kafka.kafkaBootstrapServers")).split(",")));
+ }
+
+ @Override
+ public List<String> getKafkaBootstrapServers() {
+ return bootstrapServers;
+ }
+
+ public void setBootstrapServers(List<String> newBootstrapList) {
+ bootstrapServers = newBootstrapList;
+ }
+
+ @Override
+ public String getKafkaSaslMechanism() {
+ return "PLAIN";
+ }
+
+ @Override
+ public String getKafkaSaslJaasConfig() {
+ return "org.apache.kafka.common.security.plain.PlainLoginModule required username=admin password=admin-secret;";
+ }
+
+ @Override
+ public int getPollingTimeout() {
+ return Integer.parseInt((String) testConfig.get("kafka.pollingTimeout"));
+ }
+
+ @Override
+ public String getConsumerGroup() {
+ return (String) testConfig.get("kafka.consumerGroup");
+ }
+
+ @Override
+ public String getConsumerID() {
+ return (String) testConfig.get("kafka.consumerID");
+ }
+
+ @Override
+ public List<String> getConsumerTopics() {
+ consumerTopics = new ArrayList<>();
+ String topicString = (String) testConfig.get("kafka.consumerTopics");
+ if (topicString != null) {
+ consumerTopics.addAll(Arrays.asList((topicString).split(",")));
+ }
+ return consumerTopics;
+ }
+
+ public void setConsumerTopics(List<String> newTopics) {
+ this.consumerTopics = newTopics;
+ }
+
+ @Override
+ public List<String> getProducerTopics() {
+ List<String> producerTopics = new ArrayList<>();
+ String topicString = (String) testConfig.get("kafka.producerTopics");
+ if (topicString != null) {
+ producerTopics.addAll(Arrays.asList((topicString).split(",")));
+ }
+ return producerTopics;
+ }
+}
diff --git a/kafkaClient/src/test/resources/application.properties b/kafkaClient/src/test/resources/application.properties
new file mode 100644
index 0000000..d1a7853
--- /dev/null
+++ b/kafkaClient/src/test/resources/application.properties
@@ -0,0 +1,6 @@
+kafka.kafkaBootstrapServers=localhost:9092
+kafka.pollingTimeout=10
+kafka.consumerGroup=mygroup
+kafka.consumerID=myid
+kafka.consumerTopics=mytopicA,mytopicB
+kafka.producerTopics=mytopicA \ No newline at end of file
diff --git a/kafkaClient/src/test/resources/invalid-application.properties b/kafkaClient/src/test/resources/invalid-application.properties
new file mode 100644
index 0000000..04b159a
--- /dev/null
+++ b/kafkaClient/src/test/resources/invalid-application.properties
@@ -0,0 +1,6 @@
+kafka.kafkaBootstrapServers=localhost:9092
+kafka.pollingTimeout=10
+kafka.consumerGroup=mygroup
+kafka.consumerID=myid
+#kafka.consumerTopics=mytopicA,mytopicB
+kafka.producerTopics=mytopicA \ No newline at end of file
diff --git a/kafkaClient/src/test/resources/jaas.conf b/kafkaClient/src/test/resources/jaas.conf
new file mode 100644
index 0000000..6f7fb5a
--- /dev/null
+++ b/kafkaClient/src/test/resources/jaas.conf
@@ -0,0 +1,20 @@
+KafkaServer {
+ org.apache.kafka.common.security.plain.PlainLoginModule required
+ username="admin"
+ password="admin-secret"
+ user_admin="admin-secret"
+ user_kafkaclient="client-secret";
+};
+
+Server {
+ org.apache.zookeeper.server.auth.DigestLoginModule required
+ username="admin"
+ password="admin-secret"
+ user_zooclient="client-secret";
+};
+
+Client {
+ org.apache.zookeeper.server.auth.DigestLoginModule required
+ username="zooclient"
+ password="client-secret";
+}; \ No newline at end of file
diff --git a/kafkaClient/src/test/resources/logback-test.xml b/kafkaClient/src/test/resources/logback-test.xml
new file mode 100644
index 0000000..c4bfa96
--- /dev/null
+++ b/kafkaClient/src/test/resources/logback-test.xml
@@ -0,0 +1,20 @@
+<configuration>
+ <timestamp key="byDay" datePattern="yyyyMMdd'T'HHmmss"/>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
+ </encoder>
+ </appender>
+ <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+ <file> log-${byDay}.txt </file>
+ <append>true</append>
+ <encoder>
+ <pattern>%-4relative [%thread] %-5level %logger{35} - %msg%n</pattern>
+ </encoder>
+ </appender>
+ <root level="DEBUG">
+ <appender-ref ref="FILE" />
+ <appender-ref ref="STDOUT" />
+ </root>
+ <Logger name="org.apache.kafka" level="WARN"/>
+</configuration> \ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 373a4d9..08236bd 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,85 +1,69 @@
-<!-- ============LICENSE_START=======================================================
- org.onap.dmaap ================================================================================
- Copyright © 2017 AT&T Intellectual Property. All rights reserved. ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License"); you may not
- use this file except in compliance with the License. You may obtain a copy
- of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
- by applicable law or agreed to in writing, software distributed under the
- License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
- OF ANY KIND, either express or implied. See the License for the specific
- language governing permissions and limitations under the License. ============LICENSE_END=========================================================
- ECOMP is a trademark and service mark of AT&T Intellectual Property. -->
+<!--
+ ============LICENSE_START==================================================
+ * org.onap.dmaap.kafka
+ * ===========================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
+
+ <groupId>org.onap.dmaap.kafka</groupId>
+ <artifactId>parent</artifactId>
+ <name>dmaap-kafka</name>
+ <version>${revision}</version>
+ <packaging>pom</packaging>
+
<parent>
<groupId>org.onap.oparent</groupId>
<artifactId>oparent</artifactId>
- <version>3.2.0</version>
+ <version>3.3.2</version>
</parent>
- <groupId>org.onap.dmaap.kafka</groupId>
- <artifactId>kafka11aaf</artifactId>
- <version>1.1.1-SNAPSHOT</version>
- <name>dmaap-kafka</name>
+ <modules>
+ <module>kafkaClient</module>
+ <module>sampleClient</module>
+ </modules>
<properties>
+ <revision>0.0.1-SNAPSHOT</revision>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <timestamp>${maven.build.timestamp}</timestamp>
<maven.build.timestamp.format>yyyyMMdd'T'HHmmss'Z'</maven.build.timestamp.format>
- <sitePath>
- /content/sites/site/org/onap/dmaap/kafka0111/${project.artifactId}/${project.version}
- </sitePath>
- <skip.docker.build>true</skip.docker.build>
- <skip.docker.push>true</skip.docker.push>
- <docker.nocache>true</docker.nocache>
- <nexusproxy>https://nexus.onap.org</nexusproxy>
- <docker.push.registry>nexus3.onap.org:10003</docker.push.registry>
- <onap.nexus.url>https://nexus.onap.org</onap.nexus.url>
+ <sitePath>/content/sites/site/org/onap/dmaap/kafka/${project.artifactId}/${project.version}</sitePath>
<sonar.language>java</sonar.language>
<sonar.skip>false</sonar.skip>
- <sonar.surefire.reportsPath>${project.build.directory}/surefire-reports
- </sonar.surefire.reportsPath>
- <sonar.coverage.jacoco.xmlReportPaths>
- ${project.reporting.outputDirectory}/jacoco-ut/jacoco.xml
- </sonar.coverage.jacoco.xmlReportPaths>
<sonar.projectVersion>${project.version}</sonar.projectVersion>
- <sonar.exclusions>**/gen/**,**/generated-sources/**,**/yang-gen**,**/pax/**
- </sonar.exclusions>
- <powermock.version>1.6.4</powermock.version>
- <maven.compiler.source>11</maven.compiler.source>
- <maven.compiler.target>11</maven.compiler.target>
+ <sonar.exclusions>**/gen/**,**/generated-sources/**,**/yang-gen**,**/pax/**</sonar.exclusions>
+ <kafka.version>3.3.1</kafka.version>
</properties>
-
- <distributionManagement>
- <site>
- <id>ecomp-site</id>
- <url>dav:${nexusproxy}${sitePath}</url>
- </site>
- </distributionManagement>
-
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.projectlombok</groupId>
+ <artifactId>lombok</artifactId>
+ <version>1.18.24</version>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
<build>
- <!-- Copy files to docker-stage to be included in image -->
<resources>
- <resource>
- <targetPath>${basedir}/target/docker-stage</targetPath>
- <directory>${basedir}/src/main/docker</directory>
- </resource>
- <resource>
- <directory>${basedir}/src/main/resources</directory>
- </resource>
</resources>
<plugins>
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <configuration>
- <source>${maven.compiler.source}</source>
- <target>${maven.compiler.target}</target>
- </configuration>
- </plugin>
- <plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
</plugin>
@@ -87,10 +71,6 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
- <excludes>
- </excludes>
- <argLine>
- </argLine>
<skipTests>false</skipTests>
</configuration>
</plugin>
@@ -104,84 +84,6 @@
</configuration>
</plugin>
<plugin>
- <artifactId>maven-dependency-plugin</artifactId>
- <executions>
- <execution>
- <id>copy</id>
- <phase>package</phase>
- <goals>
- <goal>copy</goal>
- </goals>
- <configuration>
- <outputDirectory>target</outputDirectory>
- <artifactItems>
- <artifactItem>
- <groupId>org.onap.dmaap.messagerouter.mirroragent</groupId>
- <artifactId>dmaapMMAgent</artifactId>
- <version>1.1.2</version>
- <destFileName>dmaapMMAgent.jar</destFileName>
- </artifactItem>
- </artifactItems>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <artifactId>maven-resources-plugin</artifactId>
- <version>2.7</version>
- <executions>
- <execution>
- <id>copy-jar</id>
- <phase>install</phase>
- <goals>
- <goal>copy-resources</goal>
- </goals>
- <configuration>
- <outputDirectory>${basedir}/target/docker-stage</outputDirectory>
- <resources>
- <resource>
- <directory>${basedir}/target</directory>
- <includes>
- <include>dmaapMMAgent.jar</include>
- <include>kafka11aaf.jar</include>
- </includes>
- </resource>
- </resources>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-shade-plugin</artifactId>
- <version>3.2.4</version>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>shade</goal>
- </goals>
- <configuration>
- <finalName>${project.artifactId}</finalName>
- <artifactSet>
- <excludes>
- </excludes>
- </artifactSet>
- <filters>
- <filter>
- <artifact>*:*</artifact>
- <excludes>
- <exclude>META-INF/*.SF</exclude>
- <exclude>META-INF/*.DSA</exclude>
- <exclude>META-INF/*.RSA</exclude>
- </excludes>
- </filter>
- </filters>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
<artifactId>maven-checkstyle-plugin</artifactId>
<executions>
<execution>
@@ -192,140 +94,6 @@
</execution>
</executions>
</plugin>
- <plugin>
- <artifactId>maven-deploy-plugin</artifactId>
- <version>3.0.0-M1</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
</plugins>
</build>
-
- <dependencies>
- <dependency>
- <groupId>org.onap.aaf.authz</groupId>
- <artifactId>aaf-cadi-aaf</artifactId>
- <version>2.7.4</version>
- </dependency>
- <dependency>
- <groupId>org.apache.kafka</groupId>
- <artifactId>kafka_2.13</artifactId>
- <version>2.8.0</version>
- </dependency>
- <dependency>
- <groupId>org.powermock</groupId>
- <artifactId>powermock-api-mockito</artifactId>
- <version>${powermock.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.powermock</groupId>
- <artifactId>powermock-module-junit4</artifactId>
- <version>${powermock.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.powermock</groupId>
- <artifactId>powermock-module-junit4-rule</artifactId>
- <version>${powermock.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-core</artifactId>
- <version>1.10.19</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <profiles>
- <profile>
- <id>docker</id>
- <properties>
- <skip.docker.build>false</skip.docker.build>
- <skip.docker.tag>false</skip.docker.tag>
- <skip.docker.push>false</skip.docker.push>
- </properties>
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.groovy.maven</groupId>
- <artifactId>gmaven-plugin</artifactId>
- <version>1.0</version>
- <executions>
- <execution>
- <phase>validate</phase>
- <goals>
- <goal>execute</goal>
- </goals>
- <configuration>
- <properties>
- <ver>${project.version}</ver>
- <timestamp>${maven.build.timestamp}</timestamp>
- </properties>
- <source>
- println project.properties['ver'];
- if (project.properties['ver'].endsWith("-SNAPSHOT")) {
- project.properties['dockertag1'] = project.properties['ver'] +
- "-latest";
- project.properties['dockertag2'] = project.properties['ver'] +
- "-" + project.properties['timestamp'];
- } else {
- project.properties['dockertag1'] = project.properties['ver'] +
- "-STAGING-latest";
- project.properties['dockertag2'] = project.properties['ver'] +
- "-STAGING-" + project.properties['timestamp'];
- }
- println 'docker tag 1: ' + project.properties['dockertag1'];
- println 'docker tag 2: ' + project.properties['dockertag2'];
- </source>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>io.fabric8</groupId>
- <artifactId>docker-maven-plugin</artifactId>
- <version>0.28.0</version>
- <configuration>
- <pullRegistry>${docker.pull.registry}</pullRegistry>
- <pushRegistry>${docker.push.registry}</pushRegistry>
- <images>
- 
- </images>
- </configuration>
- <executions>
- <execution>
- <id>generate-images</id>
- <phase>install</phase>
- <goals>
- <goal>build</goal>
- </goals>
- </execution>
- <execution>
- <id>push-images</id>
- <phase>deploy</phase>
- <goals>
- <goal>push</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
- </profiles>
</project>
diff --git a/sampleClient/pom.xml b/sampleClient/pom.xml
new file mode 100644
index 0000000..d51df67
--- /dev/null
+++ b/sampleClient/pom.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.onap.dmaap.kafka</groupId>
+ <artifactId>parent</artifactId>
+ <version>${revision}</version>
+ <relativePath>../pom.xml</relativePath>
+ </parent>
+ <artifactId>onap-kafka-client-sample</artifactId>
+ <description>Sample project for client users</description>
+
+ <properties>
+ <maven.compiler.source>11</maven.compiler.source>
+ <maven.compiler.target>11</maven.compiler.target>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.onap.dmaap.kafka</groupId>
+ <artifactId>onap-kafka-client</artifactId>
+ <version>0.0.1-SNAPSHOT</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-core</artifactId>
+ </exclusion>
+ </exclusions>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-classic</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.projectlombok</groupId>
+ <artifactId>lombok</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot</artifactId>
+ <version>2.7.7</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter</artifactId>
+ <version>2.7.7</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-autoconfigure</artifactId>
+ <version>2.7.7</version>
+ <scope>compile</scope>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ </plugins>
+ </build>
+</project> \ No newline at end of file
diff --git a/sampleClient/src/main/java/org/onap/dmaap/kafka/sample/Main.java b/sampleClient/src/main/java/org/onap/dmaap/kafka/sample/Main.java
new file mode 100644
index 0000000..37a3097
--- /dev/null
+++ b/sampleClient/src/main/java/org/onap/dmaap/kafka/sample/Main.java
@@ -0,0 +1,44 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka.sample;
+
+import org.onap.dmaap.kafka.OnapKafkaClient;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.CommandLineRunner;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+@SpringBootApplication
+public class Main implements CommandLineRunner{
+
+ @Autowired
+ private SampleConfiguration configuration;
+
+ public static void main(String[] args) {
+ SpringApplication.run(Main.class, args);
+ }
+
+ @Override
+ public void run(String... args) {
+ OnapKafkaClient handler = new OnapKafkaClient(configuration);
+ handler.fetchFromTopic("dummy.topic.blah");
+ }
+} \ No newline at end of file
diff --git a/sampleClient/src/main/java/org/onap/dmaap/kafka/sample/SampleConfiguration.java b/sampleClient/src/main/java/org/onap/dmaap/kafka/sample/SampleConfiguration.java
new file mode 100644
index 0000000..0cb5498
--- /dev/null
+++ b/sampleClient/src/main/java/org/onap/dmaap/kafka/sample/SampleConfiguration.java
@@ -0,0 +1,48 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * dmaap-kafka-client
+ * ================================================================================
+ * Copyright (C) 2023 Nordix Foundation. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.kafka.sample;
+
+import java.util.List;
+import lombok.Getter;
+import lombok.NonNull;
+import lombok.Setter;
+import org.onap.dmaap.kafka.IKafkaConfig;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+@ConfigurationProperties(prefix = "kafka")
+@Getter
+@Setter
+public class SampleConfiguration implements IKafkaConfig {
+
+ @NonNull
+ private List<String> kafkaBootstrapServers;
+
+ private List<String> consumerTopics;
+ private String consumerGroup;
+ private String consumerID;
+ private int pollingTimeout;
+
+ private List<String> producerTopics;
+ private String kafkaSaslJaasConfig;
+ // private String kafkaSaslMechanism;
+}
diff --git a/sampleClient/src/main/resources/application.yaml b/sampleClient/src/main/resources/application.yaml
new file mode 100644
index 0000000..b8a0f70
--- /dev/null
+++ b/sampleClient/src/main/resources/application.yaml
@@ -0,0 +1,11 @@
+kafka:
+ kafkaBootstrapServers: [localhost:9092]
+ pollingTimeout: 10
+ consumerGroup: my-consumer-group
+ consumerID: my-consumer-id
+ consumerTopics: [test.mytopic.1, test.mytopic.2]
+ producerTopics: [test.mytopic.3]
+ kafkaSaslJaasConfig: ${SASL_JAAS_CONFIG:org.apache.kafka.common.security.scram.ScramLoginModule required username=admin password=admin-secret;}
+
+ #kafkaSaslJaasConfig: ${SASL_JAAS_CONFIG:org.apache.kafka.common.security.plain.PlainLoginModule required username=admin password=admin-secret;}
+ #kafkaSaslMechanism: ${SASL_MECHANISM:PLAIN} \ No newline at end of file
diff --git a/sampleClient/src/main/resources/logback.xml b/sampleClient/src/main/resources/logback.xml
new file mode 100644
index 0000000..8798706
--- /dev/null
+++ b/sampleClient/src/main/resources/logback.xml
@@ -0,0 +1,11 @@
+<configuration>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n</pattern>
+ </encoder>
+ </appender>
+
+ <root level="INFO">
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration> \ No newline at end of file
diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile
deleted file mode 100644
index 930f5ba..0000000
--- a/src/main/docker/Dockerfile
+++ /dev/null
@@ -1,29 +0,0 @@
-FROM confluentinc/cp-kafka:6.2.0
-
-ENV COMPONENT=kafka \
- KAFKA_USER=mrkafka \
- KAFKA_GROUP=onap
-
-COPY org.onap.dmaap.mr.trust.jks \
- org.onap.dmaap.mr.p12 \
- org.onap.dmaap.mr.keyfile \
- /etc/${COMPONENT}/secrets/cert/
-
-USER root
-
-RUN userdel -r appuser && groupadd $KAFKA_GROUP && useradd $KAFKA_USER -u 1000 -G 1000,$KAFKA_GROUP
-
-WORKDIR /home/$KAFKA_USER
-
-COPY include/etc/confluent/docker/* /etc/confluent/docker/
-RUN chmod -R +x /etc/confluent/docker \
-&& mkdir -p /etc/${COMPONENT}/data /etc/${COMPONENT}/secrets \
-&& chown -R $KAFKA_USER:$KAFKA_GROUP /var/lib/${COMPONENT} /etc/${COMPONENT} /etc/confluent/docker /var/log/${COMPONENT} /var/lib/${COMPONENT} /var/log/confluent
-
-COPY kafka11aaf.jar /usr/share/java/${COMPONENT}/
-
-USER $KAFKA_USER
-
-EXPOSE 9092 9093
-
-CMD ["/etc/confluent/docker/run"] \ No newline at end of file
diff --git a/src/main/docker/include/etc/confluent/docker/configure b/src/main/docker/include/etc/confluent/docker/configure
deleted file mode 100644
index f0d8701..0000000
--- a/src/main/docker/include/etc/confluent/docker/configure
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env bash
-#
-# Copyright 2016 Confluent Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-. /etc/confluent/docker/bash-config
-
-dub ensure KAFKA_ZOOKEEPER_CONNECT
-dub ensure KAFKA_ADVERTISED_LISTENERS
-
-# By default, LISTENERS is derived from ADVERTISED_LISTENERS by replacing
-# hosts with 0.0.0.0. This is good default as it ensures that the broker
-# process listens on all ports.
-if [[ -z "${KAFKA_LISTENERS-}" ]]
-then
- export KAFKA_LISTENERS
- KAFKA_LISTENERS=$(cub listeners "$KAFKA_ADVERTISED_LISTENERS")
-fi
-
-dub path /etc/kafka/ writable
-
-if [[ -z "${KAFKA_LOG_DIRS-}" ]]
-then
- export KAFKA_LOG_DIRS
- KAFKA_LOG_DIRS="/var/lib/kafka/data"
-fi
-
-# advertised.host, advertised.port, host and port are deprecated. Exit if these properties are set.
-if [[ -n "${KAFKA_ADVERTISED_PORT-}" ]]
-then
- echo "advertised.port is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead."
- exit 1
-fi
-
-if [[ -n "${KAFKA_ADVERTISED_HOST-}" ]]
-then
- echo "advertised.host is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead."
- exit 1
-fi
-
-if [[ -n "${KAFKA_HOST-}" ]]
-then
- echo "host is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead."
- exit 1
-fi
-
-if [[ -n "${KAFKA_PORT-}" ]]
-then
- echo "port is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead."
- exit 1
-fi
-
-# Set if ADVERTISED_LISTENERS has SSL:// or SASL_SSL:// endpoints.
-if [[ $KAFKA_ADVERTISED_LISTENERS == *"SSL://"* ]]
-then
- echo "SSL is enabled."
-
- dub ensure KAFKA_SSL_KEYSTORE_FILENAME
- export KAFKA_SSL_KEYSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_FILENAME"
- dub path "$KAFKA_SSL_KEYSTORE_LOCATION" exists
-
- dub ensure KAFKA_SSL_KEY_CREDENTIALS
- KAFKA_SSL_KEY_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEY_CREDENTIALS"
- dub path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" exists
- export KAFKA_SSL_KEY_PASSWORD
- KAFKA_SSL_KEY_PASSWORD=$(cat "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION")
-
- dub ensure KAFKA_SSL_KEYSTORE_CREDENTIALS
- KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_CREDENTIALS"
- dub path "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION" exists
- export KAFKA_SSL_KEYSTORE_PASSWORD
- KAFKA_SSL_KEYSTORE_PASSWORD=$(cat "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION")
-
- if [[ -n "${KAFKA_SSL_CLIENT_AUTH-}" ]] && ( [[ $KAFKA_SSL_CLIENT_AUTH == *"required"* ]] || [[ $KAFKA_SSL_CLIENT_AUTH == *"requested"* ]] )
- then
- dub ensure KAFKA_SSL_TRUSTSTORE_FILENAME
- export KAFKA_SSL_TRUSTSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_FILENAME"
- dub path "$KAFKA_SSL_TRUSTSTORE_LOCATION" exists
-
- dub ensure KAFKA_SSL_TRUSTSTORE_CREDENTIALS
- KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_CREDENTIALS"
- dub path "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION" exists
- export KAFKA_SSL_TRUSTSTORE_PASSWORD
- KAFKA_SSL_TRUSTSTORE_PASSWORD=$(cat "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION")
- fi
-
-fi
-
-# Set if KAFKA_ADVERTISED_LISTENERS has SASL_PLAINTEXT:// or SASL_SSL:// endpoints.
-if [[ $KAFKA_ADVERTISED_LISTENERS =~ .*SASL_.*://.* ]]
-then
- echo "SASL" is enabled.
-
- dub ensure KAFKA_OPTS
-
- if [[ ! $KAFKA_OPTS == *"java.security.auth.login.config"* ]]
- then
- echo "KAFKA_OPTS should contain 'java.security.auth.login.config' property."
- fi
-fi
-
-if [[ -n "${KAFKA_JMX_OPTS-}" ]]
-then
- if [[ ! $KAFKA_JMX_OPTS == *"com.sun.management.jmxremote.rmi.port"* ]]
- then
- echo "KAFKA_OPTS should contain 'com.sun.management.jmxremote.rmi.port' property. It is required for accessing the JMX metrics externally."
- fi
-fi
-
-dub template "/etc/confluent/docker/${COMPONENT}.properties.template" "/etc/${COMPONENT}/${COMPONENT}.properties"
-dub template "/etc/confluent/docker/log4j.properties.template" "/etc/${COMPONENT}/log4j.properties"
-dub template "/etc/confluent/docker/tools-log4j.properties.template" "/etc/${COMPONENT}/tools-log4j.properties"
diff --git a/src/main/docker/include/etc/confluent/docker/ensure b/src/main/docker/include/etc/confluent/docker/ensure
deleted file mode 100644
index 09160f0..0000000
--- a/src/main/docker/include/etc/confluent/docker/ensure
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env bash
-#
-# Copyright 2020 Confluent Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-. /etc/confluent/docker/bash-config
-
-export KAFKA_DATA_DIRS=${KAFKA_DATA_DIRS:-"/var/lib/kafka/data"}
-echo "===> Check if $KAFKA_DATA_DIRS is writable ..."
-dub path "$KAFKA_DATA_DIRS" writable
-
-if [[ -n "${KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE-}" ]] && [[ $KAFKA_ZOOKEEPER_SSL_CLIENT_ENABLE == "true" ]]
-then
- echo "===> Skipping Zookeeper health check for SSL connections..."
-else
- echo "===> Check if Zookeeper is healthy ..."
- cub zk-ready "$KAFKA_ZOOKEEPER_CONNECT" "${KAFKA_CUB_ZK_TIMEOUT:-40}"
-fi \ No newline at end of file
diff --git a/src/main/docker/include/etc/confluent/docker/kafka.properties.template b/src/main/docker/include/etc/confluent/docker/kafka.properties.template
deleted file mode 100644
index 5eeaea3..0000000
--- a/src/main/docker/include/etc/confluent/docker/kafka.properties.template
+++ /dev/null
@@ -1,33 +0,0 @@
-{% set excluded_props = ['KAFKA_VERSION',
- 'KAFKA_HEAP_OPTS'
- 'KAFKA_LOG4J_OPTS',
- 'KAFKA_OPTS',
- 'KAFKA_JMX_OPTS',
- 'KAFKA_JVM_PERFORMANCE_OPTS',
- 'KAFKA_GC_LOG_OPTS',
- 'KAFKA_LOG4J_ROOT_LOGLEVEL',
- 'KAFKA_LOG4J_LOGGERS',
- 'KAFKA_TOOLS_LOG4J_LOGLEVEL',
- 'KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET']
--%}
-
-{# properties that don't fit the standard format #}
-{% set other_props = {
- 'KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET' : 'zookeeper.clientCnxnSocket'
- } -%}
-
-{% set kafka_props = env_to_props('KAFKA_', '', exclude=excluded_props) -%}
-{% for name, value in kafka_props.items() -%}
-{{name}}={{value}}
-{% endfor -%}
-
-{% for k, property in other_props.items() -%}
-{% if env.get(k) != None -%}
-{{property}}={{env[k]}}
-{% endif -%}
-{% endfor -%}
-
-{% set confluent_support_props = env_to_props('CONFLUENT_SUPPORT_', 'confluent.support.') -%}
-{% for name, value in confluent_support_props.items() -%}
-{{name}}={{value}}
-{% endfor -%}
diff --git a/src/main/docker/include/etc/confluent/docker/launch b/src/main/docker/include/etc/confluent/docker/launch
deleted file mode 100644
index d1eaf56..0000000
--- a/src/main/docker/include/etc/confluent/docker/launch
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env bash
-#
-# Copyright 2016 Confluent Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Override this section from the script to include the com.sun.management.jmxremote.rmi.port property.
-if [ -z "$KAFKA_JMX_OPTS" ]; then
- export KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote=true -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false "
-fi
-
-# The JMX client needs to be able to connect to java.rmi.server.hostname.
-# The default for bridged n/w is the bridged IP so you will only be able to connect from another docker container.
-# For host n/w, this is the IP that the hostname on the host resolves to.
-
-# If you have more that one n/w configured, hostname -i gives you all the IPs,
-# the default is to pick the first IP (or network).
-export KAFKA_JMX_HOSTNAME=${KAFKA_JMX_HOSTNAME:-$(hostname -i | cut -d" " -f1)}
-
-if [ "$KAFKA_JMX_PORT" ]; then
- # This ensures that the "if" section for JMX_PORT in kafka launch script does not trigger.
- export JMX_PORT=$KAFKA_JMX_PORT
- export KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Djava.rmi.server.hostname=$KAFKA_JMX_HOSTNAME -Dcom.sun.management.jmxremote.local.only=false -Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT -Dcom.sun.management.jmxremote.port=$JMX_PORT"
-fi
-
-echo "===> Launching ${COMPONENT} ... "
-exec "${COMPONENT}"-server-start /etc/"${COMPONENT}"/"${COMPONENT}".properties
diff --git a/src/main/docker/include/etc/confluent/docker/log4j.properties.template b/src/main/docker/include/etc/confluent/docker/log4j.properties.template
deleted file mode 100644
index 445a05c..0000000
--- a/src/main/docker/include/etc/confluent/docker/log4j.properties.template
+++ /dev/null
@@ -1,26 +0,0 @@
-
-log4j.rootLogger={{ env["KAFKA_LOG4J_ROOT_LOGLEVEL"] | default('INFO') }}, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
-
-{% set loggers = {
- 'kafka': 'INFO',
- 'kafka.network.RequestChannel$': 'WARN',
- 'kafka.producer.async.DefaultEventHandler': 'DEBUG',
- 'kafka.request.logger': 'WARN',
- 'kafka.controller': 'TRACE',
- 'kafka.log.LogCleaner': 'INFO',
- 'state.change.logger': 'TRACE',
- 'kafka.authorizer.logger': 'WARN'
- } -%}
-
-
-{% if env['KAFKA_LOG4J_LOGGERS'] %}
-{% set loggers = parse_log4j_loggers(env['KAFKA_LOG4J_LOGGERS'], loggers) %}
-{% endif %}
-
-{% for logger,loglevel in loggers.items() %}
-log4j.logger.{{logger}}={{loglevel}}
-{% endfor %}
diff --git a/src/main/docker/include/etc/confluent/docker/run b/src/main/docker/include/etc/confluent/docker/run
deleted file mode 100644
index 91ac16b..0000000
--- a/src/main/docker/include/etc/confluent/docker/run
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env bash
-#
-# Copyright 2016 Confluent Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-. /etc/confluent/docker/bash-config
-
-# Set environment values if they exist as arguments
-if [ $# -ne 0 ]; then
- echo "===> Overriding env params with args ..."
- for var in "$@"
- do
- export "$var"
- done
-fi
-
-echo "===> ENV Variables ..."
-env
-
-echo "===> User"
-id
-
-echo "===> Configuring ..."
-/etc/confluent/docker/configure
-
-echo "===> Running preflight checks ... "
-/etc/confluent/docker/ensure
-
-echo "===> Launching ... "
-exec /etc/confluent/docker/launch
diff --git a/src/main/docker/include/etc/confluent/docker/tools-log4j.properties.template b/src/main/docker/include/etc/confluent/docker/tools-log4j.properties.template
deleted file mode 100644
index da06f13..0000000
--- a/src/main/docker/include/etc/confluent/docker/tools-log4j.properties.template
+++ /dev/null
@@ -1,7 +0,0 @@
-
-log4j.rootLogger={{ env["KAFKA_TOOLS_LOG4J_LOGLEVEL"] | default('WARN') }}, stderr
-
-log4j.appender.stderr=org.apache.log4j.ConsoleAppender
-log4j.appender.stderr.layout=org.apache.log4j.PatternLayout
-log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.stderr.Target=System.err
diff --git a/src/main/docker/org.onap.dmaap.mr.keyfile b/src/main/docker/org.onap.dmaap.mr.keyfile
deleted file mode 100644
index 72d3b73..0000000
--- a/src/main/docker/org.onap.dmaap.mr.keyfile
+++ /dev/null
@@ -1,27 +0,0 @@
-yJhToh1HtF7641JOeljPtn4ECRn7dncPKtUh9XN4Hv1GX2q1MSVsDI2qQ7i2Q8hH1G3Ko_x0fl1p
-PLn2bOh9cOOGKxQrWxY0724Cme1MMc_drOl7TNk5DPmiw-teI6BdpS_wPtfDGLql9xuxIMWPdv_P
-Id9LSzdW_Fa4FepgcxAj6jOK7jQBmJIsedxIpAVFU0bjmMwybe_BRe1x8yEBrfQo8Si0cfjYdQYP
-XBTAnJ46zejAPJh2U4MyBhYzz2Zr1nMux2wjHc52z8J7_YpfveNSpr9UwOzSo0VqAEORISQDS7Cb
-Cc9jeYmxPkVCEraHWq5jtOpVdwxwTSh1PJ8_pgnhQ4AgQS-5JyRdHWvzwGa2RW8i3ZF1qfJBP4wb
-lyXiNYKSU5jDd-wNP0b9WDILGFRKuAHjz1wKB1IHyQTBX7dpjouuZysEkZS348NVLfAmb1klKp5Q
-1lq2H4TdQnPaG6tV_wyI0ZrZsf4TCeDxYRxEAZc3ILefM_72Zc-UWEHQ_Q4Qck30fJzoHFgEm5Rm
-8XofzAfHOcjoa7o8mEVi9veNpqTeNa8b2DLqYehcE_rMYU_y1AgWsnWgiIX1AEzLyUyfliS2PxQh
-ZI3HLMrzfV--pOuQp_CnrcHclvkX3u5ZJ01a6jq7ONpr712YNmUEoD6s1UR0wEEeO61Cun8zfty8
-m-qXD3k-re64WDizw-pHXHYmIS9a4jL7D0R4KysJRf6iZTAUy4ROy6aS-wMYGgy0r7sEOZY6zp4h
-MBczN5-3O7r-dKjOrr1RWXS3zt71oJNSGcnG41KKOnUeNpFBmIzVfoIp9KR5zdcg-gGrA8Xi_tBj
--rAqYfaNMBUDDtFCLhCHwuhKpR0Y0gG_-8J43yi3bfWOYMWkgPRHrJIiIO-SNUW_pobbRqQzIT3h
-V5IcEc5pa-lnJnBNS5b_-cHsA0x4g1FPmDmjsGBYZmiaPSvXPzhpZVsww1p9nVL63rYLMOTSps1V
-Lj9_UYWef7jD409vZRqeLQGi7lMNeC7oLv_hi_l05rZUkpegMLYmIWzjhzW8OuYdiHGuboV9TyUR
-QWn5e_bypQBAJhYqaYNADzV9duW0v3SW89zP9TlkJ6tfvhcVXRQn4MUzIC9h_0q3cf_9Wemx7UPW
-tGML52lYWcEbVbGF9yFtOm3qVPi4sVFO61vOtIoQhIIL3oa9gOWO9yPCjNm4lD-C4jAWHjtJdMr8
-cGtBplAHGly63VQs9RRUr42A95JMtsuJIPNDWP5-I-9R77NALdzjKEJE2FngGW8Ud4pJQ1sikPH3
-F4kVn1L2NpAilVrjlyb_y89mskrWaSdHCE2P1_gtkWHXfXIfKK0UFQt17s8hk0MfL6JSUDUE4IKN
-tK70iHwmqY9VbYKYLf-8Gl7CW_Q9MumHjGsktwVZinpH4kOUREr6tyve4rZv8wN6mbNPVJ5gw_PE
-I4bvSiEstMgelbkheMC4l-zc3q9C_fNZmLmdav8PLUrkS_NxnZ4hJQCDTjhbMqLIwknXU9SkDyPb
-Dgh049PyJrYzv2_TpYoS6M_o3HjApMkRKlV_TEcbGoX06gAUYTiEWAQU6wm0TdsIdxjEXAWeTiX7
-ddI_vEioFemoKjE5iRWNaKL85xsTsQj6bQi1eSj1F0lxqnSGRldiMAPMrfqKDJ7xFpXS7nyQfLjY
-m1H-Y3bk0iBBZbU0JKXerE_jlr3s7rcdarpwY1pdODoUJBk-EiKezm6zWuG9o3IisPNSqqOs4Cax
-QAE3dt-1TpCxkw7Rpgm8eTwPMPOD3gj7Szcs2sEh-0UIk8y7uZCSRz0ZCsQj-jJl97WQV1ky89xS
-c9ECqzDTgl2cVrih9aQu863_yHnjm9tNTxMH4DudB5JcmM96BX4CfS9qgVzAqCGvW9KS37wy0bK_
-iSCAhAWNT5L9E3fUyg--V_gmVjxGb8Y020cc4_pkqSbAAC8qjQhDWHLy_M2RzQrPmQMdP2PZ5-AU
-Pw6HdHmVTOLZeYuVS1rXx4AYWXkgKHiSRqO6bal1opzOnSpbw-Q1bQu0wZ1MarXodEtJFaOr \ No newline at end of file
diff --git a/src/main/docker/org.onap.dmaap.mr.p12 b/src/main/docker/org.onap.dmaap.mr.p12
deleted file mode 100644
index 1a0e8a4..0000000
--- a/src/main/docker/org.onap.dmaap.mr.p12
+++ /dev/null
Binary files differ
diff --git a/src/main/docker/org.onap.dmaap.mr.trust.jks b/src/main/docker/org.onap.dmaap.mr.trust.jks
deleted file mode 100644
index aae6d81..0000000
--- a/src/main/docker/org.onap.dmaap.mr.trust.jks
+++ /dev/null
Binary files differ
diff --git a/src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProvider.java b/src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProvider.java
deleted file mode 100644
index 551cf81..0000000
--- a/src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProvider.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Modification copyright (C) 2021 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.commonauth.kafka.base.authorization;
-
-import java.io.IOException;
-
-public interface AuthorizationProvider {
-
- boolean hasPermission(String userId, String permission, String instance, String action);
-
- String getId();
-
- String authenticate(String userId, String password) throws IOException;
-}
diff --git a/src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProviderFactory.java b/src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProviderFactory.java
deleted file mode 100644
index bdced2d..0000000
--- a/src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProviderFactory.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Modification copyright (C) 2021 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.commonauth.kafka.base.authorization;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.ServiceLoader;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class AuthorizationProviderFactory {
- private static final Logger logger = LoggerFactory.getLogger(AuthorizationProviderFactory.class);
- private static final Map<String, AuthorizationProvider> AUTHORIZATION_PROVIDER_MAP = new HashMap<>();
- private static final AuthorizationProviderFactory AUTHORIZATION_PROVIDER_FACTORY = new AuthorizationProviderFactory();
-
- private AuthorizationProviderFactory() {
- try {
- ServiceLoader<AuthorizationProvider> serviceLoader = ServiceLoader.load(AuthorizationProvider.class);
- for (AuthorizationProvider authzProvider : serviceLoader) {
- AUTHORIZATION_PROVIDER_MAP.put(authzProvider.getId(), authzProvider);
-
- }
- } catch (Exception ee) {
- logger.error(ee.getMessage(), ee);
- System.exit(0);
- }
- }
-
- public static AuthorizationProviderFactory getProviderFactory() {
- return AUTHORIZATION_PROVIDER_FACTORY;
- }
-
- public AuthorizationProvider getProvider() {
- return AUTHORIZATION_PROVIDER_MAP.get(System.getProperty("kafka.authorization.provider", "CADI_AAF_PROVIDER"));
- }
-}
diff --git a/src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/Cadi3AAFProvider.java b/src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/Cadi3AAFProvider.java
deleted file mode 100644
index 92e27b7..0000000
--- a/src/main/java/org/onap/dmaap/commonauth/kafka/base/authorization/Cadi3AAFProvider.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Modification copyright (C) 2021 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.commonauth.kafka.base.authorization;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.Map;
-import java.util.Properties;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import org.onap.aaf.cadi.PropAccess;
-import org.onap.aaf.cadi.aaf.AAFPermission;
-import org.onap.aaf.cadi.aaf.v2_0.AAFAuthn;
-import org.onap.aaf.cadi.aaf.v2_0.AAFCon;
-import org.onap.aaf.cadi.aaf.v2_0.AAFConHttp;
-import org.onap.aaf.cadi.aaf.v2_0.AbsAAFLur;
-import org.onap.aaf.cadi.principal.UnAuthPrincipal;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class Cadi3AAFProvider implements AuthorizationProvider {
-
- private static PropAccess access;
- private static AAFCon<?> aafcon;
- private static final String CADI_PROPERTIES = "/etc/kafka/data/cadi.properties";
- private static final String AAF_LOCATOR_ENV = "aaf_locate_url";
- private static String apiKey = null;
- private static String kafkaUsername = null;
- private static AAFAuthn<?> aafAuthn;
- private static AbsAAFLur<AAFPermission> aafLur;
- private static boolean enableCadi = false;
- private static final String ENABLE_CADI = "enableCadi";
- private static final Logger logger = LoggerFactory.getLogger(Cadi3AAFProvider.class);
-
- static {
- if (System.getProperty(ENABLE_CADI) != null) {
- if (System.getProperty(ENABLE_CADI).equals("true")) {
- enableCadi = true;
- }
- }
- else{
- if (System.getenv(ENABLE_CADI) != null && System.getenv(ENABLE_CADI).equals("true")) {
- enableCadi = true;
- }
- }
- Configuration config = Configuration.getConfiguration();
- try {
- if (config == null) {
- logger.error("CRITICAL ERROR|Check java.security.auth.login.config VM argument|");
- } else {
- // read the section for KafkaServer
- AppConfigurationEntry[] entries = config.getAppConfigurationEntry("KafkaServer");
- if (entries == null) {
- logger.error(
- "CRITICAL ERROR|Check config contents passed in java.security.auth.login.config VM argument|");
- kafkaUsername = "kafkaUsername";
- apiKey = "apiKey";
-
- } else {
- for (AppConfigurationEntry entry : entries) {
- Map<String, ?> optionsMap = entry.getOptions();
- kafkaUsername = (String) optionsMap.get("username");
- apiKey = (String) optionsMap.get("password");
- }
- }
- }
- } catch (Exception e) {
- logger.error("CRITICAL ERROR: JAAS configuration incorrectly set: {}", e.getMessage());
- }
- }
-
- public static String getKafkaUsername() {
- return kafkaUsername;
- }
-
- public static boolean isCadiEnabled() {
-
- return enableCadi;
- }
-
- public Cadi3AAFProvider() {
- setup();
- }
-
- private synchronized void setup() {
- if (access == null) {
-
- Properties props = new Properties();
- FileInputStream fis;
- try {
- if (System.getProperty("CADI_PROPERTIES") != null) {
- fis = new FileInputStream(System.getProperty("CADI_PROPERTIES"));
- } else {
- fis = new FileInputStream(CADI_PROPERTIES);
- }
- try {
- props.load(fis);
- if (System.getenv(AAF_LOCATOR_ENV) != null)
- props.setProperty(AAF_LOCATOR_ENV, System.getenv(AAF_LOCATOR_ENV));
- access = new PropAccess(props);
- } finally {
- fis.close();
- }
- } catch (IOException e) {
- logger.error("Unable to load " + CADI_PROPERTIES);
- logger.error("Error", e);
- }
- }
-
- if (aafAuthn == null) {
- try {
- aafcon = new AAFConHttp(access);
- aafAuthn = aafcon.newAuthn();
- aafLur = aafcon.newLur(aafAuthn);
- } catch (final Exception e) {
- aafAuthn = null;
- if (access != null)
- access.log(e, "Failed to initialize AAF");
- }
- }
-
- }
-
- /**
- * Checks if a user has a particular permission
- * <p/>
- * Returns true if the permission in found
- */
- public boolean hasPermission(String userId, String permission, String instance, String action) {
- boolean hasPermission = false;
- try {
- logger.info("^ Event at hasPermission to validate userid {} with {} {} {}", userId, permission, instance, action);
- // AAF Style permissions are in the form
- // Resource Name, Resource Type, Action
- if (userId.equals("admin")) {
- hasPermission = true;
- return hasPermission;
- }
- AAFPermission perm = new AAFPermission(null, permission, instance, action);
- if (aafLur != null) {
- hasPermission = aafLur.fish(new UnAuthPrincipal(userId), perm);
- logger.trace("Permission: {} for user : {} found: {}" , perm.getKey(), userId, hasPermission);
- } else {
- logger.error("AAF client not initialized. Not able to find permissions.");
- }
- } catch (Exception e) {
- logger.error("AAF client not initialized", e);
- }
- return hasPermission;
- }
-
- public String getId() {
- return "CADI_AAF_PROVIDER";
- }
-
- public String authenticate(String userId, String password) throws IOException {
-
- logger.info("^Event received with username {}", userId);
-
- if (!enableCadi) {
- return null;
- } else {
- if (userId.equals(kafkaUsername)) {
- if (password.equals(apiKey)) {
- logger.info("by passes the authentication for the admin {}", kafkaUsername);
- return null;
- } else {
- String errorMessage = "Authentication failed for user " + kafkaUsername;
- logger.error(errorMessage);
- return errorMessage;
- }
-
- }
-
- String aafResponse = aafAuthn.validate(userId, password);
- logger.info("aafResponse = {} for {}", aafResponse, userId);
-
- if (aafResponse != null) {
- logger.error("Authentication failed for user {}", userId);
- }
- return aafResponse;
- }
-
- }
-
-}
diff --git a/src/main/java/org/onap/dmaap/kafkaAuthorize/KafkaCustomAuthorizer.java b/src/main/java/org/onap/dmaap/kafkaAuthorize/KafkaCustomAuthorizer.java
deleted file mode 100644
index 4ad10e8..0000000
--- a/src/main/java/org/onap/dmaap/kafkaAuthorize/KafkaCustomAuthorizer.java
+++ /dev/null
@@ -1,233 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Modification copyright (C) 2021 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.kafkaAuthorize;
-
-import java.util.EnumSet;
-import java.util.Map;
-
-import org.apache.kafka.common.acl.AclOperation;
-import org.apache.kafka.common.security.auth.KafkaPrincipal;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.onap.dmaap.commonauth.kafka.base.authorization.AuthorizationProviderFactory;
-import org.onap.dmaap.commonauth.kafka.base.authorization.Cadi3AAFProvider;
-
-import kafka.network.RequestChannel.Session;
-import kafka.security.auth.Acl;
-import kafka.security.auth.Authorizer;
-import kafka.security.auth.Operation;
-import kafka.security.auth.Resource;
-import scala.collection.immutable.Set;
-
-/**
- * A trivial Kafka Authorizer for use with SSL and AAF
- * Authentication/Authorization.
- *
- */
-public class KafkaCustomAuthorizer implements Authorizer {
-
- private final String[] adminPermission = new String[3];
- protected static final EnumSet<AclOperation> TOPIC_DESCRIBE_OPERATIONS = EnumSet.of(AclOperation.DESCRIBE_CONFIGS);
- protected static final EnumSet<AclOperation> TOPIC_READ_WRITE_DESCRIBE_OPERATIONS = EnumSet.of(AclOperation.WRITE,
- AclOperation.READ, AclOperation.DESCRIBE_CONFIGS);
- protected static final EnumSet<AclOperation> TOPIC_ADMIN_OPERATIONS = EnumSet.of(AclOperation.ALTER,
- AclOperation.ALTER_CONFIGS, AclOperation.CREATE);
- static final String TOPIC = "Topic";
-
- private static final Logger logger = LoggerFactory.getLogger(KafkaCustomAuthorizer.class);
-
- @Override
- public void configure(final Map<String, ?> arg0) {
- // TODO Auto-generate method stub
- }
-
- @Override
- public void addAcls(final Set<Acl> arg0, final Resource arg1) {
- // TODO Auto-generated method stub
-
- }
-
- private String[] getTopicPermission(String topicName, AclOperation aclOperation) {
-
- String namspace = topicName.substring(0, topicName.lastIndexOf("."));
- String[] permission = new String[3];
- if (TOPIC_READ_WRITE_DESCRIBE_OPERATIONS.contains(aclOperation)) {
- permission[0] = namspace + ".topic";
- String instancePart = (System.getenv("pubSubInstPart") != null) ? System.getenv("pubSubInstPart")
- : ".topic";
- permission[1] = instancePart + topicName;
-
- if (aclOperation.equals(AclOperation.WRITE)) {
- permission[2] = "pub";
- } else if (aclOperation.equals(AclOperation.READ)) {
- permission[2] = "sub";
-
- } else if (TOPIC_DESCRIBE_OPERATIONS.contains(aclOperation)) {
- permission[2] = "view";
-
- }
- } else if (aclOperation.equals(AclOperation.DELETE)) {
- permission = (System.getProperty("msgRtr.topicfactory.aaf") + namspace + "|destroy").split("\\|");
-
- } else if (TOPIC_ADMIN_OPERATIONS.contains(aclOperation)) {
- permission = (System.getProperty("msgRtr.topicfactory.aaf") + namspace + "|create").split("\\|");
- }
-
- return permission;
- }
-
- private String[] getAdminPermission() {
-
- if (adminPermission[0] == null) {
- adminPermission[0] = System.getProperty("namespace") + ".kafka.access";
- adminPermission[1] = "*";
- adminPermission[2] = "*";
- }
-
- return adminPermission;
- }
-
- private String[] getPermission(AclOperation aclOperation, String resource, String topicName) {
- String[] permission = new String[3];
- switch (aclOperation) {
-
- case ALTER:
- case ALTER_CONFIGS:
- case CREATE:
- case DELETE:
- if (resource.equals(TOPIC)) {
- permission = getTopicPermission(topicName, aclOperation);
- } else if (resource.equals("Cluster")) {
- permission = getAdminPermission();
- }
- break;
- case DESCRIBE_CONFIGS:
- case READ:
- case WRITE:
- if (resource.equals(TOPIC)) {
- permission = getTopicPermission(topicName, aclOperation);
- }
- break;
- case IDEMPOTENT_WRITE:
- if (resource.equals("Cluster")) {
- permission = getAdminPermission();
- }
- break;
- default:
- break;
-
- }
- return permission;
-
- }
-
- @Override
- public boolean authorize(final Session arg0, final Operation arg1, final Resource arg2) {
- if (arg0.principal() == null) {
- return false;
- }
-
- String fullName = arg0.principal().getName();
- fullName = fullName != null ? fullName.trim() : fullName;
- String topicName = null;
- String[] permission;
-
- String resource = arg2.resourceType().name();
-
- if (resource.equals(TOPIC)) {
- topicName = arg2.name();
- }
-
- if (fullName != null && fullName.equals(Cadi3AAFProvider.getKafkaUsername())) {
- return true;
- }
-
- if ((!Cadi3AAFProvider.isCadiEnabled())||(null != topicName && !topicName.startsWith("org.onap"))) {
- return true;
- }
-
- permission = getPermission(arg1.toJava(), resource, topicName);
-
- if (permission[0] != null) {
- return !checkPermissions(fullName, topicName, permission);
- }
- return true;
- }
-
- private boolean checkPermissions(String fullName, String topicName, String[] permission) {
- try {
-
- if (null != topicName) {
- boolean hasResp = AuthorizationProviderFactory.getProviderFactory().getProvider()
- .hasPermission(fullName, permission[0], permission[1], permission[2]);
- if (hasResp) {
- logger.info("Successful Authorization for {} on {} for {} | {} | {}", fullName, topicName,
- permission[0], permission[1], permission[2]);
- }
- if (!hasResp) {
- logger.info("{} is not allowed in {} | {} | {}", fullName, permission[0], permission[1],
- permission[2]);
- return true;
- }
- }
- } catch (final Exception e) {
- return true;
- }
- return false;
- }
-
- @Override
- public void close() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public scala.collection.immutable.Map<Resource, Set<Acl>> getAcls() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public scala.collection.immutable.Map<Resource, Set<Acl>> getAcls(final KafkaPrincipal arg0) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public boolean removeAcls(final Resource arg0) {
- // TODO Auto-generated method stub
- return false;
- }
-
- @Override
- public boolean removeAcls(final Set<Acl> arg0, final Resource arg1) {
- // TODO Auto-generated method stub
- return false;
- }
-
- public Set<Acl> getAcls(Resource arg0) {
- // TODO Auto-generated method stub
- return null;
- }
-}
diff --git a/src/main/java/org/onap/dmaap/kafkaAuthorize/PlainLoginModule1.java b/src/main/java/org/onap/dmaap/kafkaAuthorize/PlainLoginModule1.java
deleted file mode 100644
index af5aa8f..0000000
--- a/src/main/java/org/onap/dmaap/kafkaAuthorize/PlainLoginModule1.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Modification copyright (C) 2021 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.kafkaAuthorize;
-
-import java.util.Map;
-import javax.security.auth.Subject;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.spi.LoginModule;
-
-public class PlainLoginModule1 implements LoginModule {
-
- private static final String USERNAME_CONFIG = "username";
- private static final String PASSWORD_CONFIG = "password";
-
- static {
- PlainSaslServerProvider1.initialize();
- }
-
- @Override
- public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> sharedState, Map<String, ?> options) {
- String username = (String) options.get(USERNAME_CONFIG);
- if (username != null)
- subject.getPublicCredentials().add(username);
- String password = (String) options.get(PASSWORD_CONFIG);
- if (password != null)
- subject.getPrivateCredentials().add(password);
-
- }
-
- @Override
- public boolean login() {
- return true;
- }
-
- @Override
- public boolean logout() {
- return true;
- }
-
- @Override
- public boolean commit() {
- return true;
- }
-
- @Override
- public boolean abort() {
- return false;
- }
-}
diff --git a/src/main/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServer1.java b/src/main/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServer1.java
deleted file mode 100644
index 7a9bede..0000000
--- a/src/main/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServer1.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Modification copyright (C) 2021 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.kafkaAuthorize;
-
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.sasl.Sasl;
-import javax.security.sasl.SaslException;
-import javax.security.sasl.SaslServer;
-import javax.security.sasl.SaslServerFactory;
-import org.apache.kafka.common.errors.SaslAuthenticationException;
-import org.onap.dmaap.commonauth.kafka.base.authorization.AuthorizationProviderFactory;
-
-/**
- * Simple SaslServer implementation for SASL/PLAIN. In order to make this
- * implementation fully pluggable, authentication of username/password is fully
- * contained within the server implementation.
- * <p>
- * Valid users with passwords are specified in the Jaas configuration file. Each
- * user is specified with user_<username> as key and <password> as value. This
- * is consistent with Zookeeper Digest-MD5 implementation.
- * <p>
- * To avoid storing clear passwords on disk or to integrate with external
- * authentication servers in production systems, this module can be replaced
- * with a different implementation.
- *
- */
-public class PlainSaslServer1 implements SaslServer {
-
- public static final String PLAIN_MECHANISM = "PLAIN";
-
- private boolean complete;
- private String authorizationId;
- private static final String AUTH_EXC_NOT_COMPLETE = "Authentication exchange has not completed";
-
-
- /**
- * @throws SaslAuthenticationException if username/password combination is invalid or if the requested
- * authorization id is not the same as username.
- * <p>
- * <b>Note:</b> This method may throw {@link SaslAuthenticationException} to provide custom error messages
- * to clients. But care should be taken to avoid including any information in the exception message that
- * should not be leaked to unauthenticated clients. It may be safer to throw {@link SaslException} in
- * some cases so that a standard error message is returned to clients.
- * </p>
- */
- @Override
- public byte[] evaluateResponse(byte[] responseBytes) throws SaslAuthenticationException {
- /*
- * Message format (from https://tools.ietf.org/html/rfc4616):
- *
- * message = [authzid] UTF8NUL authcid UTF8NUL passwd
- * authcid = 1*SAFE ; MUST accept up to 255 octets
- * authzid = 1*SAFE ; MUST accept up to 255 octets
- * passwd = 1*SAFE ; MUST accept up to 255 octets
- * UTF8NUL = %x00 ; UTF-8 encoded NUL character
- *
- * SAFE = UTF1 / UTF2 / UTF3 / UTF4
- * ;; any UTF-8 encoded Unicode character except NUL
- */
- String response = new String(responseBytes, StandardCharsets.UTF_8);
- List<String> tokens = extractTokens(response);
- String authorizationIdFromClient = tokens.get(0);
- String username = tokens.get(1);
- String password = tokens.get(2);
-
- if (username.isEmpty()) {
- throw new SaslAuthenticationException("Authentication failed: username not specified");
- }
- if (password.isEmpty()) {
- throw new SaslAuthenticationException("Authentication failed: password not specified");
- }
-
- String aafResponse = "Not Verified";
- try {
- aafResponse = AuthorizationProviderFactory.getProviderFactory().getProvider().authenticate(username,
- password);
- } catch (Exception ignored) {
- throw new SaslAuthenticationException("Authentication failed: " + aafResponse + " User " + username);
- }
- if (null != aafResponse) {
- throw new SaslAuthenticationException("Authentication failed: " + aafResponse + " User " + username);
- }
-
- if (!authorizationIdFromClient.isEmpty() && !authorizationIdFromClient.equals(username))
- throw new SaslAuthenticationException("Authentication failed: Client requested an authorization id that is different from username");
-
- this.authorizationId = username;
-
- complete = true;
- return new byte[0];
- }
-
- private List<String> extractTokens(String string) {
- List<String> tokens = new ArrayList<>();
- int startIndex = 0;
- for (int i = 0; i < 4; ++i) {
- int endIndex = string.indexOf("\u0000", startIndex);
- if (endIndex == -1) {
- tokens.add(string.substring(startIndex));
- break;
- }
- tokens.add(string.substring(startIndex, endIndex));
- startIndex = endIndex + 1;
- }
-
- if (tokens.size() != 3)
- throw new SaslAuthenticationException("Invalid SASL/PLAIN response: expected 3 tokens, got " +
- tokens.size());
-
- return tokens;
- }
-
- @Override
- public String getAuthorizationID() {
- if (!complete)
- throw new IllegalStateException(AUTH_EXC_NOT_COMPLETE);
- return authorizationId;
- }
-
- @Override
- public String getMechanismName() {
- return PLAIN_MECHANISM;
- }
-
- @Override
- public Object getNegotiatedProperty(String propName) {
- if (!complete)
- throw new IllegalStateException(AUTH_EXC_NOT_COMPLETE);
- return null;
- }
-
- @Override
- public boolean isComplete() {
- return complete;
- }
-
- @Override
- public byte[] unwrap(byte[] incoming, int offset, int len) {
- if (!complete)
- throw new IllegalStateException(AUTH_EXC_NOT_COMPLETE);
- return Arrays.copyOfRange(incoming, offset, offset + len);
- }
-
- @Override
- public byte[] wrap(byte[] outgoing, int offset, int len) {
- if (!complete)
- throw new IllegalStateException(AUTH_EXC_NOT_COMPLETE);
- return Arrays.copyOfRange(outgoing, offset, offset + len);
- }
-
- @Override
- public void dispose() {
- // TODO Auto-generate method stub
- }
-
- public static class PlainSaslServerFactory1 implements SaslServerFactory {
-
- @Override
- public SaslServer createSaslServer(String mechanism, String protocol, String serverName, Map<String, ?> props, CallbackHandler cbh)
- throws SaslException {
-
- if (!PLAIN_MECHANISM.equals(mechanism))
- throw new SaslException(String.format("Mechanism '%s' is not supported. Only PLAIN is supported.", mechanism));
-
- return new PlainSaslServer1();
- }
-
- @Override
- public String[] getMechanismNames(Map<String, ?> props) {
- if (props == null) return new String[]{PLAIN_MECHANISM};
- String noPlainText = (String) props.get(Sasl.POLICY_NOPLAINTEXT);
- if ("true".equals(noPlainText))
- return new String[]{};
- else
- return new String[]{PLAIN_MECHANISM};
- }
- }
-}
-
diff --git a/src/main/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServerProvider1.java b/src/main/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServerProvider1.java
deleted file mode 100644
index 37b408e..0000000
--- a/src/main/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServerProvider1.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Modification copyright (C) 2021 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.kafkaAuthorize;
-
-import java.security.Provider;
-import java.security.Security;
-
-import org.onap.dmaap.kafkaAuthorize.PlainSaslServer1.PlainSaslServerFactory1;
-
-public class PlainSaslServerProvider1 extends Provider {
-
- private static final long serialVersionUID = 1L;
-
- protected PlainSaslServerProvider1() {
- super("Simple SASL/PLAIN Server Provider", 1.0, "Simple SASL/PLAIN Server Provider for Kafka");
- super.put("SaslServerFactory." + PlainSaslServer1.PLAIN_MECHANISM, PlainSaslServerFactory1.class.getName());
- }
-
- public static void initialize() {
- Security.insertProviderAt(new PlainSaslServerProvider1(),1);
- }
-}
-
diff --git a/src/main/resources/META-INF/services/org.onap.dmaap.commonauth.kafka.base.authorization.AuthorizationProvider b/src/main/resources/META-INF/services/org.onap.dmaap.commonauth.kafka.base.authorization.AuthorizationProvider
deleted file mode 100644
index 0388ce7..0000000
--- a/src/main/resources/META-INF/services/org.onap.dmaap.commonauth.kafka.base.authorization.AuthorizationProvider
+++ /dev/null
@@ -1 +0,0 @@
-org.onap.dmaap.commonauth.kafka.base.authorization.Cadi3AAFProvider \ No newline at end of file
diff --git a/src/test/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProviderFactoryTest.java b/src/test/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProviderFactoryTest.java
deleted file mode 100644
index bf7890e..0000000
--- a/src/test/java/org/onap/dmaap/commonauth/kafka/base/authorization/AuthorizationProviderFactoryTest.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.commonauth.kafka.base.authorization;
-
-import static org.junit.Assert.assertNotNull;
-
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-@RunWith(PowerMockRunner.class)
-@PowerMockIgnore({"javax.net.ssl.*", "javax.security.auth.*", "jdk.internal.reflect.*", "javax.crypto.*"})
-public class AuthorizationProviderFactoryTest {
-
- @Test
- public void testFactory() {
- assertNotNull(AuthorizationProviderFactory.getProviderFactory().getProvider());
- }
-
-}
diff --git a/src/test/java/org/onap/dmaap/commonauth/kafka/base/authorization/Cadi3AAFProviderTest.java b/src/test/java/org/onap/dmaap/commonauth/kafka/base/authorization/Cadi3AAFProviderTest.java
deleted file mode 100644
index 4f9de3d..0000000
--- a/src/test/java/org/onap/dmaap/commonauth/kafka/base/authorization/Cadi3AAFProviderTest.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Modification copyright (C) 2021 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.commonauth.kafka.base.authorization;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.when;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.onap.aaf.cadi.aaf.v2_0.AAFAuthn;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-
-@RunWith(PowerMockRunner.class)
-@PowerMockIgnore({"javax.net.ssl.*", "javax.security.auth.*", "jdk.internal.reflect.*"})
-public class Cadi3AAFProviderTest {
-
- public Cadi3AAFProvider cadi3AAFProvider;
-
- @Mock
- private static AAFAuthn<?> aafAuthn;
-
- static {
- System.setProperty("CADI_PROPERTIES", "src/test/resources/cadi.properties");
- System.setProperty("enableCadi", "true");
- }
-
- @Before
- public void setUp() {
- MockitoAnnotations.initMocks(this);
- cadi3AAFProvider = new Cadi3AAFProvider();
- }
-
- @Test
- public void testHasPermission() {
- assertFalse(cadi3AAFProvider.hasPermission("userID", "permission", "instance", "action"));
- }
-
- @Test
- public void testHasAdminPermission() {
- assertTrue(cadi3AAFProvider.hasPermission("admin", "permission", "instance", "action"));
- }
-
- public void tesAuthenticate() throws Exception {
- when(aafAuthn.validate("userId", "password")).thenReturn("valid");
- assertEquals("valid", cadi3AAFProvider.authenticate("userId", "password"));
- }
-
- @Test
- public void tesAuthenticateAdmin() throws Exception {
- assertNull(cadi3AAFProvider.authenticate("kafkaUsername", "apiKey"));
- }
-
- @Test
- public void tesAuthenticateAdminwtWrongCred() throws Exception {
- assertNotNull(cadi3AAFProvider.authenticate("kafkaUsername", "api"));
- }
-}
diff --git a/src/test/java/org/onap/dmaap/kafkaAuthorize/KafkaCustomAuthorizerTest.java b/src/test/java/org/onap/dmaap/kafkaAuthorize/KafkaCustomAuthorizerTest.java
deleted file mode 100644
index e2e85af..0000000
--- a/src/test/java/org/onap/dmaap/kafkaAuthorize/KafkaCustomAuthorizerTest.java
+++ /dev/null
@@ -1,216 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Modification copyright (C) 2021 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.kafkaAuthorize;
-
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import org.apache.kafka.common.acl.AclOperation;
-import org.apache.kafka.common.security.auth.KafkaPrincipal;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.onap.dmaap.commonauth.kafka.base.authorization.AuthorizationProvider;
-import org.onap.dmaap.commonauth.kafka.base.authorization.AuthorizationProviderFactory;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-import kafka.network.RequestChannel.Session;
-import kafka.security.auth.Operation;
-import kafka.security.auth.Resource;
-import kafka.security.auth.ResourceType;
-
-@RunWith(PowerMockRunner.class)
-@PowerMockIgnore({"javax.net.ssl.*", "javax.security.auth.*", "jdk.internal.reflect.*", "javax.crypto.*"})
-@PrepareForTest({ AuthorizationProviderFactory.class })
-public class KafkaCustomAuthorizerTest {
- @Mock
- Session arg0;
- @Mock
- Operation arg1;
- @Mock
- Resource arg2;
- @Mock
- KafkaPrincipal principal;
- @Mock
- ResourceType resourceType;
- @Mock
- AuthorizationProviderFactory factory;
- @Mock
- AuthorizationProvider provider;
-
- KafkaCustomAuthorizer authorizer;
-
- static {
- System.setProperty("CADI_PROPERTIES", "src/test/resources/cadi.properties");
- System.setProperty("enableCadi", "true");
- }
-
- @Before
- public void setUp() {
- MockitoAnnotations.initMocks(this);
- PowerMockito.when(principal.getName()).thenReturn("fullName");
- PowerMockito.when(arg0.principal()).thenReturn(principal);
- PowerMockito.when(arg1.name()).thenReturn("Write");
- PowerMockito.when(resourceType.name()).thenReturn("Topic");
- PowerMockito.when(arg2.resourceType()).thenReturn(resourceType);
- PowerMockito.when(arg2.name()).thenReturn("namespace.Topic");
- PowerMockito.mockStatic(AuthorizationProviderFactory.class);
- PowerMockito.when(AuthorizationProviderFactory.getProviderFactory()).thenReturn(factory);
- PowerMockito.when(factory.getProvider()).thenReturn(provider);
-
- }
-
- @Test
- public void testAuthorizerSuccess() {
- PowerMockito.when(provider.hasPermission("fullName", "namespace.topic", ":topic.namespace.Topic", "pub"))
- .thenReturn(true);
- authorizer = new KafkaCustomAuthorizer();
- assertTrue(authorizer.authorize(arg0, arg1, arg2));
- }
-
- @Test
- public void testAuthorizerFailure() {
- System.setProperty("CADI_PROPERTIES", "src/test/resources/cadi.properties");
- PowerMockito.when(arg2.name()).thenReturn("org.onap.dmaap.mr.testtopic");
- PowerMockito.when(arg1.toJava()).thenReturn(AclOperation.CREATE);
- System.setProperty("msgRtr.topicfactory.aaf", "org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:");
- PowerMockito.when(provider.hasPermission("fullName", "namespace.topic", ":topic.namespace.Topic", "pub"))
- .thenReturn(false);
- authorizer = new KafkaCustomAuthorizer();
- try {
- authorizer.authorize(arg0, arg1, arg2);
- } catch (Exception e) {
- assertTrue(true);
- }
-
- }
-
- @Test
- public void testAuthorizerFailure1() {
- System.setProperty("CADI_PROPERTIES", "src/test/resources/cadi.properties");
- PowerMockito.when(arg2.name()).thenReturn("org.onap.dmaap.mr.testtopic");
- PowerMockito.when(resourceType.name()).thenReturn("Cluster");
- PowerMockito.when(arg1.toJava()).thenReturn(AclOperation.CREATE);
- System.setProperty("msgRtr.topicfactory.aaf", "org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:");
- PowerMockito.when(provider.hasPermission("fullName", "namespace.topic", ":topic.namespace.Topic", "pub"))
- .thenReturn(false);
- authorizer = new KafkaCustomAuthorizer();
- try {
- authorizer.authorize(arg0, arg1, arg2);
- } catch (Exception e) {
- assertTrue(true);
- }
-
- }
-
- @Test
- public void testAuthorizerFailure2() {
- System.setProperty("CADI_PROPERTIES", "src/test/resources/cadi.properties");
- PowerMockito.when(arg2.name()).thenReturn("org.onap.dmaap.mr.testtopic");
- PowerMockito.when(resourceType.name()).thenReturn("Topic");
- PowerMockito.when(arg1.toJava()).thenReturn(AclOperation.WRITE);
- PowerMockito.when(provider.hasPermission("fullName", "namespace.topic", ":topic.namespace.Topic", "pub"))
- .thenReturn(false);
- authorizer = new KafkaCustomAuthorizer();
- try {
- authorizer.authorize(arg0, arg1, arg2);
- } catch (Exception e) {
- assertTrue(true);
- }
-
- }
-
- @Test
- public void testAuthorizerFailure3() {
- System.setProperty("CADI_PROPERTIES", "src/test/resources/cadi.properties");
- PowerMockito.when(arg2.name()).thenReturn("org.onap.dmaap.mr.testtopic");
- PowerMockito.when(resourceType.name()).thenReturn("Topic");
- PowerMockito.when(arg1.toJava()).thenReturn(AclOperation.DESCRIBE);
- PowerMockito.when(provider.hasPermission("fullName", "namespace.topic", ":topic.namespace.Topic", "pub"))
- .thenReturn(false);
- authorizer = new KafkaCustomAuthorizer();
- try {
- authorizer.authorize(arg0, arg1, arg2);
- } catch (Exception e) {
- assertTrue(true);
- }
-
- }
- @Test
- public void testAuthorizerFailure4() {
- System.setProperty("CADI_PROPERTIES", "src/test/resources/cadi.properties");
- PowerMockito.when(arg2.name()).thenReturn("org.onap.dmaap.mr.testtopic");
- PowerMockito.when(resourceType.name()).thenReturn("Topic");
- PowerMockito.when(arg1.toJava()).thenReturn(AclOperation.READ);
- PowerMockito.when(provider.hasPermission("fullName", "namespace.topic", ":topic.namespace.Topic", "pub"))
- .thenReturn(false);
- authorizer = new KafkaCustomAuthorizer();
- try {
- authorizer.authorize(arg0, arg1, arg2);
- } catch (Exception e) {
- assertTrue(true);
- }
-
- }
-
- @Test
- public void testAuthorizerFailure5() {
- System.setProperty("CADI_PROPERTIES", "src/test/resources/cadi.properties");
- PowerMockito.when(arg2.name()).thenReturn("org.onap.dmaap.mr.testtopic");
- PowerMockito.when(resourceType.name()).thenReturn("Cluster");
- PowerMockito.when(arg1.toJava()).thenReturn(AclOperation.IDEMPOTENT_WRITE);
- System.setProperty("msgRtr.topicfactory.aaf", "org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:");
- PowerMockito.when(provider.hasPermission("fullName", "namespace.topic", ":topic.namespace.Topic", "pub"))
- .thenReturn(false);
- authorizer = new KafkaCustomAuthorizer();
- try {
- authorizer.authorize(arg0, arg1, arg2);
- } catch (Exception e) {
- assertTrue(true);
- }
-
- }
-
- @Test
- public void testAuthorizerFailure6() {
- System.setProperty("CADI_PROPERTIES", "src/test/resources/cadi.properties");
- PowerMockito.when(arg2.name()).thenReturn("org.onap.dmaap.mr.testtopic");
- PowerMockito.when(arg1.toJava()).thenReturn(AclOperation.DELETE);
- System.setProperty("msgRtr.topicfactory.aaf", "org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:");
- PowerMockito.when(provider.hasPermission("fullName", "namespace.topic", ":topic.namespace.Topic", "pub"))
- .thenReturn(false);
- authorizer = new KafkaCustomAuthorizer();
- try {
- authorizer.authorize(arg0, arg1, arg2);
- } catch (Exception e) {
- assertTrue(true);
- }
-
- }
-
-
-}
diff --git a/src/test/java/org/onap/dmaap/kafkaAuthorize/PlainLoginModule1Test.java b/src/test/java/org/onap/dmaap/kafkaAuthorize/PlainLoginModule1Test.java
deleted file mode 100644
index 9383539..0000000
--- a/src/test/java/org/onap/dmaap/kafkaAuthorize/PlainLoginModule1Test.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * Modification copyright (C) 2021 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.kafkaAuthorize;
-
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.util.Map;
-import javax.security.auth.Subject;
-import javax.security.auth.callback.CallbackHandler;
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-
-@PowerMockIgnore({"jdk.internal.reflect.*"})
-@PrepareForTest({ PlainLoginModule1.class })
-public class PlainLoginModule1Test {
-
- static PlainLoginModule1 pLogin = new PlainLoginModule1();
- static Subject subject;
- @Mock
- static CallbackHandler callbackHandler;
-
- @Mock
- static Map<String, String> mymap1;
-
- @Mock
- static Map<String, ?> mymap2;
-
- @Before
- public void setUp() {
- MockitoAnnotations.initMocks(this);
- PowerMockito.when(mymap1.get("username")).thenReturn("user1");
- PowerMockito.when(mymap1.get("password")).thenReturn("pass1");
- pLogin.initialize(subject, callbackHandler, mymap1, mymap2);
- }
-
- @Test
- public void testLogin() {
- assertTrue(pLogin.login());
- }
-
- @Test
- public void testLogout() {
- assertTrue(pLogin.logout());
- }
-
- @Test
- public void testCommit() {
- assertTrue(pLogin.commit());
- }
-
- @Test
- public void testAbort() {
- assertFalse(pLogin.abort());
- }
-}
diff --git a/src/test/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServer1Test.java b/src/test/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServer1Test.java
deleted file mode 100644
index 5d18bbd..0000000
--- a/src/test/java/org/onap/dmaap/kafkaAuthorize/PlainSaslServer1Test.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START=======================================================
- * org.onap.dmaap
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
-*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- *
- *******************************************************************************/
-package org.onap.dmaap.kafkaAuthorize;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import java.util.Map;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.sasl.Sasl;
-import javax.security.sasl.SaslException;
-import org.apache.kafka.common.errors.SaslAuthenticationException;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.MockitoAnnotations;
-import org.onap.dmaap.commonauth.kafka.base.authorization.AuthorizationProvider;
-import org.onap.dmaap.commonauth.kafka.base.authorization.AuthorizationProviderFactory;
-import org.onap.dmaap.kafkaAuthorize.PlainSaslServer1.PlainSaslServerFactory1;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-@RunWith(PowerMockRunner.class)
-@PowerMockIgnore({"javax.security.auth.*", "jdk.internal.reflect.*", "javax.crypto.*"})
-@PrepareForTest({ AuthorizationProviderFactory.class })
-public class PlainSaslServer1Test {
-
- PlainSaslServer1 sslServer = new PlainSaslServer1();
-
- @Mock
- AuthorizationProviderFactory factory;
- @Mock
- AuthorizationProvider provider;
- @Mock
- CallbackHandler callbackHandler;
- @Mock
- static Map<String, String> props;
-
- @Before
- public void setUp() {
- MockitoAnnotations.initMocks(this);
- PowerMockito.mockStatic(AuthorizationProviderFactory.class);
- PowerMockito.when(AuthorizationProviderFactory.getProviderFactory()).thenReturn(factory);
- PowerMockito.when(factory.getProvider()).thenReturn(provider);
- }
-
- public void testAuthentication() throws Exception {
- String response = "authorizationID\u0000username\u0000password";
- PowerMockito.when(provider.authenticate("username", "password")).thenReturn(null);
- assertNotNull(sslServer.evaluateResponse(response.getBytes()));
-
- }
-
- @Test
- public void testAuthenticationEmptyAuth() throws Exception {
- String response = "\u0000username\u0000password";
- PowerMockito.when(provider.authenticate("username", "password")).thenReturn(null);
- assertNotNull(sslServer.evaluateResponse(response.getBytes()));
- }
-
- @Test
- public void testAuthenticationEmptyUser() throws Exception {
- String response = "authorizationID\u0000\u0000password";
- PowerMockito.when(provider.authenticate("username", "password")).thenReturn(null);
- try {
- sslServer.evaluateResponse(response.getBytes());
- }
- catch (SaslAuthenticationException e) {
- assertNotNull(e);
- }
- }
-
- @Test
- public void testAuthenticationEmptyPassword() throws Exception {
- String response = "authorizationID\u0000username\u0000";
- PowerMockito.when(provider.authenticate("username", "password")).thenReturn(null);
- try {
- sslServer.evaluateResponse(response.getBytes());
- }
- catch (SaslAuthenticationException e) {
- assertNotNull(e);
- }
- }
-
- @Test
- public void testGetAuthorizationIdWithException() {
- try {
- sslServer.getAuthorizationID();
- }
- catch (IllegalStateException ise) {
- assertTrue(ise.getMessage().equalsIgnoreCase("Authentication exchange has not completed"));
- }
- }
-
- @Test
- public void testGetNegotiatedPropertyWithException() {
- try {
- sslServer.getNegotiatedProperty("test");
- }
- catch (IllegalStateException ise) {
- assertTrue(ise.getMessage().equalsIgnoreCase("Authentication exchange has not completed"));
- }
- }
-
- @Test
- public void testIsComplete() {
- try {
- sslServer.getNegotiatedProperty("test");
- }
- catch (IllegalStateException ise) {
- assertTrue(ise.getMessage().equalsIgnoreCase("Authentication exchange has not completed"));
- }
- assert(true);
- }
-
- @Test
- public void testUnwrap() {
- try {
- sslServer.unwrap(new byte[1], 0, 0);
- }
- catch (IllegalStateException ise) {
- assertTrue(ise.getMessage().equalsIgnoreCase("Authentication exchange has not completed"));
- } catch (SaslAuthenticationException e) {
- e.printStackTrace();
- }
- assert(true);
- }
-
- @Test
- public void testWrap() {
- try {
- sslServer.wrap(new byte[1], 0, 0);
- }
- catch (IllegalStateException ise) {
- assertTrue(ise.getMessage().equalsIgnoreCase("Authentication exchange has not completed"));
- } catch (SaslAuthenticationException e) {
- e.printStackTrace();
- }
- assert(true);
- }
-
- @Test
- public void testGetMech() {
- assertEquals("PLAIN", sslServer.getMechanismName());
- }
-
- @Test
- public void testIsCompleteBool() {
- assertFalse(sslServer.isComplete());
- }
-
- @Test
- public void testPlainSaslServer1() throws SaslException {
- PlainSaslServerFactory1 plainSaslServerFactory1 = new PlainSaslServerFactory1();
- PlainSaslServer1 saslServer1 = (PlainSaslServer1) plainSaslServerFactory1.createSaslServer(PlainSaslServer1.PLAIN_MECHANISM, "https", "mySaslServer", props, callbackHandler);
- assertNotNull(saslServer1);
- Mockito.when(props.get(Sasl.POLICY_NOPLAINTEXT)).thenReturn("javax.security.sasl.policy.noplaintext");
- assertEquals(new String[]{"PLAIN"}, plainSaslServerFactory1.getMechanismNames(props));
- }
-}
diff --git a/src/test/resources/cadi.properties b/src/test/resources/cadi.properties
deleted file mode 100644
index fc14bf1..0000000
--- a/src/test/resources/cadi.properties
+++ /dev/null
@@ -1,19 +0,0 @@
-aaf_locate_url=https://aaf-locate.onap:8095
-aaf_url=https://AAF_LOCATE_URL/onap.org.osaaf.aaf.service:2.1
-aaf_env=DEV
-aaf_lur=org.onap.aaf.cadi.aaf.v2_0.AAFLurPerm
-
-cadi_truststore=src/test/resources/org.onap.dmaap.mr.trust.jks
-cadi_truststore_password=@MP:Wc^8}%n6tG1kr]MI{:#V
-
-cadi_keyfile=src/test/resources/org.onap.dmaap.mr.keyfile
-
-cadi_alias=dmaapmr@mr.dmaap.onap.org
-cadi_keystore=src/test/resources/org.onap.dmaap.mr.p12
-cadi_keystore_password=iAHma{haRm)lJ^ah5Au{nZ;$
-cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US
-
-cadi_loglevel=INFO
-cadi_protocols=TLSv1.1,TLSv1.2
-cadi_latitude=37.78187
-cadi_longitude=-122.26147
diff --git a/src/test/resources/org.onap.dmaap.mr.keyfile b/src/test/resources/org.onap.dmaap.mr.keyfile
deleted file mode 100644
index 977f63a..0000000
--- a/src/test/resources/org.onap.dmaap.mr.keyfile
+++ /dev/null
@@ -1,27 +0,0 @@
-TbnInQ-QMCbTM2Kl9R8DfsTKhwt0nv8PTHooRfzyuHDQD3bHVmU4vjGXeQaFbj1Rq_DcOz6shg8k
-mYL0F5L0a-ZxO8id4sbkKqs_BAcRMf4PLJB0X0kBEvAq7Cqt_Hafgq4tz3c_OmutLJlGsWU4AtDe
-b68ISK2TG_P1CJGO-Y4xmRC4WW3YxnrL7pWq3r1VJ59KLlCAkL796EGy253lP2Uxl3788uUHZo-Q
-h74Yq3sxyyBn2shlH6vhRFOY8PVXO2-ljmBHrQj_NpL62ensYb1cxuGD5mivayGo2g98S3pX2ec2
-jhBB1uXsRSOJ-g1oScB9mDJYkib5l52lNKGw_ZSVaTNh2BP8T-HQjtgpM4lRps6nqLpwzV37u5wi
-67a5KIAdQz4txAjBBr62zGBpwkvEOgrtG-fk3Gh6C6C8KwxfUk-mysZNP1SaWLG0U4T05ilnrZa6
-PNqr35wqh_IBfJj2iK1pLqvlFCq8-VDDg1HWNvzlTzyzmlIFNrvHRaskgoM0QNi9ulbQyZ-j6Nsw
-l0B4khpNcOt2nc2cnI_jK7yy92i9I3IF4RcubZJSwvL1UEhtbw21XgIaWOcfnCmzIDdUZ33T-eNn
-6C6nL4_YAYfSndxOtr25tuUAbq5LWvXKUij1HAaQluN4gBMJxIdY4qm_tcKDxLTsHPTsjujyA_vr
-Ut2RWwwIqvUb98Sf2P7r8aIJe6GYrbKMs4mEnbKHzWibaW5z6652EGK20-Z3gvnZaGZ103fcV_4C
-IIQUxMmZf8TbPgjMHAP-f-uLCoQ9pPSAFsm3tdQB8IRCsfIFXsg65FPpa2YW7lVpwajCa-hPcGer
-pDbT7gKvUNijmcokNFRjjCiMUv8GyXk9xJ1XUB54pb0pZO9Nvswn94FHTpJV8o-ZSeEbnWGYfbFP
-gJYtLMrjmoolSQeGOH3gZiLoi_qkscBXhVVQ8_USSouQQPVgs2CgHpYqCrEeul9tIVTEQ6Ae_-nY
-IZKHmaEWewIRa7MhP3QzdwbuQ4v5V8D2vYYGrfrTSCOogPx8nwLKhfD1uztbMFb3pZ_qfjEvvL93
-2s8M2tnAGKXOG4z-TLQZmA0KkW32B0IB7XKQBQaElHlkbv2Sibt87APkTk38H4dlGGs1eVRnjmyX
-7sIjtbPSCzU9YXr6sRzCQH6qbzdioExUJQYNmraLx8JwJZw-C5_6jUc1jYkGMB3WFGj5i8awxSGM
-aPOeH8s6PzwK0M_4hsdl_40S8KVtaMH3541hxpUeTl_wWtlYGyRefHzSB3cEE_UD3ZvKaR56DFFJ
-szIVTeNBgZewHPkRyUiUYM3OhUwgbGkLPMA5es60qXGstqGUUZWWGRNOjE8aqQDOMElPpMZOFeqi
-m-zaUNU5i0uVpgwfEGVzl5i3jr6qRRnRRYyt7Ufiq_-L4gATQ_FtpO3YR87V9MSqKFoFT1Lr9XSg
-_-RSlaZ_uUc6DeplZqD3sExqqz3RcxvyaF1pieFMAv4IUb2-8FwNVSiMymT4g_F98s3iavydu5oy
-YtnYVAMgXeMM_O3uLnWX3uyNDWVTmSmYHSm9L0yL84E55Q-KHyjRJ5k5MKqAOmj_NzpdFyJ0zvly
-wI145Rr0IErHcrVAaqk7PR1NMoRFnndd3eRWRnsP8JzajvZfJLtLIiR2KRBl8q3Hw55rx0zr7lLu
-Wf_tRnAHfhdvwaTXZiGWPDTVOm4LlXUYm4WNu2RjEJeKq0aJ8z4aRSynxAE95xBn4wPEgu76l97X
-ipIYDz8Fv2VD4k2Oe358FtQri6wxeHV_0PVJqwSGthn3X9aDpfrAl4scUU8SoOG6CGkWRM1U1ALv
-2pv7aYrdv729j-2F8UTdXYDCxg8nlXXIi0RekPviB-AhQRX9vt4z4z6ePFXKIZqf5Lt0diG4rz_z
--tN7Vyb21CsgcE-yDk_yonyp66G1dOFMaJd-FXelfyx-9-0PskvRCrD_OMspAqb7xqDyML2CSZxs
-BvDTH9V-5Ixr72FlA3jecd9SJwCE_icpdqttZnkF-Gu7DN2dHM31WIX7ivnwef2YmxtglwKL \ No newline at end of file
diff --git a/src/test/resources/org.onap.dmaap.mr.p12 b/src/test/resources/org.onap.dmaap.mr.p12
deleted file mode 100644
index 589075e..0000000
--- a/src/test/resources/org.onap.dmaap.mr.p12
+++ /dev/null
Binary files differ
diff --git a/src/test/resources/org.onap.dmaap.mr.trust.jks b/src/test/resources/org.onap.dmaap.mr.trust.jks
deleted file mode 100644
index c2e8111..0000000
--- a/src/test/resources/org.onap.dmaap.mr.trust.jks
+++ /dev/null
Binary files differ
diff --git a/version.properties b/version.properties
index f33ebd5..a805c9c 100644
--- a/version.properties
+++ b/version.properties
@@ -1,7 +1,5 @@
###
# ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
# Copyright (C) 2017 AT&T Intellectual Property. All rights
# reserved.
# ================================================================================
@@ -24,8 +22,8 @@
# Note that these variables cannot be structured (e.g. : version.release or version.snapshot etc... )
# because they are used in Jenkins, whose plug-in doesn't support
-major=1
-minor=1
+major=0
+minor=0
patch=1
base_version=${major}.${minor}.${patch}