summaryrefslogtreecommitdiffstats
path: root/src/test/java/org
diff options
context:
space:
mode:
authorseanfos <sean.osullivan@est.tech>2021-10-06 16:09:15 +0100
committerseanfos <sean.osullivan@est.tech>2021-10-11 11:08:55 +0100
commita5b9e047b91a933ab1485011b459bfeac6e857ce (patch)
tree17071362de32ff9b4855bd1ded09ec90d57e455b /src/test/java/org
parent80adb1f3525753841a7853d245dacc894417a4f7 (diff)
[MR] Add support for configuring jaas.sasl.config at runtime
Signed-off-by: seanfos <sean.osullivan@est.tech> Change-Id: I92a6fdb9e375db7b355e19127a5fdbe2b4d2a827 Issue-ID: DMAAP-1653
Diffstat (limited to 'src/test/java/org')
-rw-r--r--src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java2
-rw-r--r--src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java147
-rw-r--r--src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java73
3 files changed, 124 insertions, 98 deletions
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java
index 7a0fe78..44047e4 100644
--- a/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java
+++ b/src/test/java/org/onap/dmaap/mr/cambria/backends/kafka/KafkaPublisherTest.java
@@ -44,7 +44,7 @@ public class KafkaPublisherTest {
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
PowerMockito.mockStatic(Utils.class);
- PowerMockito.when(Utils.isCadiEnabled()).thenReturn(true);
+ PowerMockito.when(Utils.isCadiEnabled()).thenReturn(false);
}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java b/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java
index f49f615..9d7a931 100644
--- a/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java
+++ b/src/test/java/org/onap/dmaap/mr/cambria/embed/EmbedConfigurationReader.java
@@ -9,7 +9,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,26 +18,27 @@
* ============LICENSE_END=========================================================
*
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
- *
+ *
*******************************************************************************/
- package org.onap.dmaap.mr.cambria.embed;
+package org.onap.dmaap.mr.cambria.embed;
+import com.att.ajsc.filemonitor.AJSCPropertiesMap;
+import com.att.nsa.security.db.BaseNsaApiDbImpl;
+import com.att.nsa.security.db.simple.NsaSimpleApiKey;
+import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
import java.io.File;
import java.util.Arrays;
import java.util.Map;
import java.util.Properties;
-
import org.apache.commons.io.FileUtils;
import org.apache.curator.framework.CuratorFramework;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import org.onap.dmaap.dmf.mr.backends.kafka.KafkaPublisher;
-import org.onap.dmaap.dmf.mr.backends.memory.MemoryMetaBroker;
-import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueue;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.NewTopic;
+import org.onap.dmaap.dmf.mr.backends.kafka.KafkaPublisher;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryMetaBroker;
+import org.onap.dmaap.dmf.mr.backends.memory.MemoryQueue;
import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaConsumerFactory;
import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker;
import org.onap.dmaap.dmf.mr.beans.DMaaPMetricsSet;
@@ -49,13 +50,11 @@ import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl;
import org.onap.dmaap.dmf.mr.utils.ConfigurationReader;
import org.onap.dmaap.dmf.mr.utils.DMaaPCuratorFactory;
import org.onap.dmaap.dmf.mr.utils.PropertyReader;
-import com.att.nsa.security.db.BaseNsaApiDbImpl;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-import com.att.nsa.security.db.simple.NsaSimpleApiKeyFactory;
+import org.onap.dmaap.dmf.mr.utils.Utils;
public class EmbedConfigurationReader {
- private static final String DEFAULT_KAFKA_LOG_DIR = "/kafka_embedded";
+ private static final String DEFAULT_KAFKA_LOG_DIR = "/kafka_embedded";
public static final String TEST_TOPIC = "testTopic";
private static final int BROKER_ID = 0;
private static final int BROKER_PORT = 5000;
@@ -69,49 +68,49 @@ public class EmbedConfigurationReader {
String dir;
private AdminClient fKafkaAdminClient;
KafkaLocal kafkaLocal;
-
- public void setUp() throws Exception {
-
- ClassLoader classLoader = getClass().getClassLoader();
- AJSCPropertiesMap.refresh(new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile()));
-
- Properties kafkaProperties;
+
+ public void setUp() throws Exception {
+
+ ClassLoader classLoader = getClass().getClassLoader();
+ AJSCPropertiesMap.refresh(new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile()));
+
+ Properties kafkaProperties;
Properties zkProperties;
try {
//load properties
- dir = new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile()).getParent();
+ dir = new File(classLoader.getResource(CambriaConstants.msgRtr_prop).getFile()).getParent();
kafkaProperties = getKafkaProperties(dir + DEFAULT_KAFKA_LOG_DIR, BROKER_PORT, BROKER_ID);
zkProperties = getZookeeperProperties(ZOOKEEPER_PORT,dir + DEFAULT_ZOOKEEPER_LOG_DIR);
//start kafkaLocalServer
kafkaLocal = new KafkaLocal(kafkaProperties, zkProperties);
-
+
Map<String, String> map = AJSCPropertiesMap.getProperties(CambriaConstants.msgRtr_prop);
map.put(CambriaConstants.kSetting_ZkConfigDbServers, ZOOKEEPER_HOST);
map.put("kafka.client.zookeeper", ZOOKEEPER_HOST);
map.put("kafka.metadata.broker.list", LOCALHOST_BROKER);
-
+
DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader());
-
+
final Properties props = new Properties ();
- props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092" );
- props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
- props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
- props.put("sasl.mechanism", "PLAIN");
- fKafkaAdminClient = AdminClient.create ( props );
-
- // if(!AdminUtils.topicExists(dMaaPZkClient, TEST_TOPIC))
+ props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092" );
+ props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+ props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
+ props.put("sasl.mechanism", "PLAIN");
+ fKafkaAdminClient = AdminClient.create ( props );
+
+ // if(!AdminUtils.topicExists(dMaaPZkClient, TEST_TOPIC))
// AdminUtils.createTopic(dMaaPZkClient, TEST_TOPIC, 3, 1, new Properties());
- final NewTopic topicRequest = new NewTopic ( TEST_TOPIC, 3, new Integer(1).shortValue () );
- fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) );
+ final NewTopic topicRequest = new NewTopic ( TEST_TOPIC, 3, new Integer(1).shortValue () );
+ fKafkaAdminClient.createTopics ( Arrays.asList ( topicRequest ) );
Thread.sleep(5000);
} catch (Exception e){
e.printStackTrace(System.out);
- }
- }
-
- private static Properties getKafkaProperties(String logDir, int port, int brokerId) {
+ }
+ }
+
+ private static Properties getKafkaProperties(String logDir, int port, int brokerId) {
Properties properties = new Properties();
properties.put("port", port + "");
properties.put("broker.id", brokerId + "");
@@ -122,47 +121,47 @@ public class EmbedConfigurationReader {
properties.put("consumer.timeout.ms", -1);
return properties;
}
-
- private static Properties getZookeeperProperties(int port, String zookeeperDir) {
+
+ private static Properties getZookeeperProperties(int port, String zookeeperDir) {
Properties properties = new Properties();
properties.put("clientPort", port + "");
properties.put("dataDir", zookeeperDir);
return properties;
}
- public void tearDown() throws Exception {
- DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader());
- if(fKafkaAdminClient!=null)
- fKafkaAdminClient.deleteTopics(Arrays.asList(TEST_TOPIC));
- //AdminUtils.deleteTopic(dMaaPZkClient, TEST_TOPIC);
- //dMaaPZkClient.delete(dir + DEFAULT_KAFKA_LOG_DIR);
- //dMaaPZkClient.delete(dir + DEFAULT_ZOOKEEPER_LOG_DIR);
- kafkaLocal.stop();
- FileUtils.cleanDirectory(new File(dir + DEFAULT_KAFKA_LOG_DIR));
- }
-
-
- public ConfigurationReader buildConfigurationReader() throws Exception {
-
- setUp();
-
- PropertyReader propertyReader = new PropertyReader();
- DMaaPMetricsSet dMaaPMetricsSet = new DMaaPMetricsSet(propertyReader);
- DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(propertyReader);
- DMaaPZkConfigDb dMaaPZkConfigDb = new DMaaPZkConfigDb(dMaaPZkClient, propertyReader);
- CuratorFramework curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader());
- DMaaPKafkaConsumerFactory dMaaPKafkaConsumerFactory = new DMaaPKafkaConsumerFactory(dMaaPMetricsSet, curatorFramework,null);
- MemoryQueue memoryQueue = new MemoryQueue();
- MemoryMetaBroker memoryMetaBroker = new MemoryMetaBroker(memoryQueue, dMaaPZkConfigDb);
- BaseNsaApiDbImpl<NsaSimpleApiKey> baseNsaApiDbImpl = new BaseNsaApiDbImpl<>(dMaaPZkConfigDb, new NsaSimpleApiKeyFactory());
- DMaaPAuthenticator<NsaSimpleApiKey> dMaaPAuthenticator = new DMaaPAuthenticatorImpl<>(baseNsaApiDbImpl);
- KafkaPublisher kafkaPublisher = new KafkaPublisher(propertyReader);
- DMaaPKafkaMetaBroker dMaaPKafkaMetaBroker = new DMaaPKafkaMetaBroker(propertyReader, dMaaPZkClient, dMaaPZkConfigDb);
-
- return new ConfigurationReader(propertyReader,
- dMaaPMetricsSet, dMaaPZkClient, dMaaPZkConfigDb, kafkaPublisher,
- curatorFramework, dMaaPKafkaConsumerFactory, dMaaPKafkaMetaBroker,
- memoryQueue, memoryMetaBroker, baseNsaApiDbImpl, dMaaPAuthenticator);
-
- }
+ public void tearDown() throws Exception {
+ DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(new PropertyReader());
+ if(fKafkaAdminClient!=null)
+ fKafkaAdminClient.deleteTopics(Arrays.asList(TEST_TOPIC));
+ //AdminUtils.deleteTopic(dMaaPZkClient, TEST_TOPIC);
+ //dMaaPZkClient.delete(dir + DEFAULT_KAFKA_LOG_DIR);
+ //dMaaPZkClient.delete(dir + DEFAULT_ZOOKEEPER_LOG_DIR);
+ kafkaLocal.stop();
+ FileUtils.cleanDirectory(new File(dir + DEFAULT_KAFKA_LOG_DIR));
+ }
+
+
+ public ConfigurationReader buildConfigurationReader() throws Exception {
+
+ setUp();
+
+ PropertyReader propertyReader = new PropertyReader();
+ DMaaPMetricsSet dMaaPMetricsSet = new DMaaPMetricsSet(propertyReader);
+ DMaaPZkClient dMaaPZkClient = new DMaaPZkClient(propertyReader);
+ DMaaPZkConfigDb dMaaPZkConfigDb = new DMaaPZkConfigDb(dMaaPZkClient, propertyReader);
+ CuratorFramework curatorFramework = DMaaPCuratorFactory.getCurator(new PropertyReader());
+ DMaaPKafkaConsumerFactory dMaaPKafkaConsumerFactory = new DMaaPKafkaConsumerFactory(dMaaPMetricsSet, curatorFramework,null);
+ MemoryQueue memoryQueue = new MemoryQueue();
+ MemoryMetaBroker memoryMetaBroker = new MemoryMetaBroker(memoryQueue, dMaaPZkConfigDb);
+ BaseNsaApiDbImpl<NsaSimpleApiKey> baseNsaApiDbImpl = new BaseNsaApiDbImpl<>(dMaaPZkConfigDb, new NsaSimpleApiKeyFactory());
+ DMaaPAuthenticator<NsaSimpleApiKey> dMaaPAuthenticator = new DMaaPAuthenticatorImpl<>(baseNsaApiDbImpl);
+ KafkaPublisher kafkaPublisher = new KafkaPublisher(propertyReader);
+ DMaaPKafkaMetaBroker dMaaPKafkaMetaBroker = new DMaaPKafkaMetaBroker(propertyReader, dMaaPZkClient, dMaaPZkConfigDb);
+
+ return new ConfigurationReader(propertyReader,
+ dMaaPMetricsSet, dMaaPZkClient, dMaaPZkConfigDb, kafkaPublisher,
+ curatorFramework, dMaaPKafkaConsumerFactory, dMaaPKafkaMetaBroker,
+ memoryQueue, memoryMetaBroker, baseNsaApiDbImpl, dMaaPAuthenticator);
+
+ }
}
diff --git a/src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java b/src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java
index 8a4009b..74f6750 100644
--- a/src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java
+++ b/src/test/java/org/onap/dmaap/mr/cambria/utils/UtilsTest.java
@@ -1,5 +1,5 @@
/*******************************************************************************
-/*-
+ /*-
* ============LICENSE_START=======================================================
* ONAP Policy Engine
* ================================================================================
@@ -8,9 +8,9 @@
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,22 +18,26 @@
* limitations under the License.
* ============LICENSE_END=========================================================
*/
-
- package org.onap.dmaap.mr.cambria.utils;
+
+package org.onap.dmaap.mr.cambria.utils;
import static org.junit.Assert.*;
import java.security.Principal;
import java.text.SimpleDateFormat;
import java.util.Date;
+import java.util.Properties;
import org.apache.http.auth.BasicUserPrincipal;
import org.junit.After;
import org.junit.Before;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.contrib.java.lang.system.EnvironmentVariables;
import org.springframework.mock.web.MockHttpServletRequest;
+
import org.onap.dmaap.dmf.mr.beans.DMaaPContext;
import org.onap.dmaap.dmf.mr.utils.Utils;
@@ -41,6 +45,9 @@ public class UtilsTest {
private static final String DATE_FORMAT = "dd-MM-yyyy::hh:mm:ss:SSS";
+ @Rule
+ public EnvironmentVariables environmentVariables = new EnvironmentVariables();
+
@Before
public void setUp() throws Exception {
}
@@ -57,33 +64,33 @@ public class UtilsTest {
String expectedStr = sdf.format(now);
assertNotNull(dateStr);
assertTrue("Formatted date does not match - expected [" + expectedStr
- + "] received [" + dateStr + "]",
+ + "] received [" + dateStr + "]",
dateStr.equalsIgnoreCase(expectedStr));
}
-
+
@Test
public void testgetUserApiKey(){
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader(Utils.CAMBRIA_AUTH_HEADER, "User:Password");
assertEquals("User", Utils.getUserApiKey(request));
-
+
MockHttpServletRequest request2 = new MockHttpServletRequest();
Principal principal = new BasicUserPrincipal("User@Test");
request2.setUserPrincipal(principal);
request2.addHeader("Authorization", "test");
assertEquals("User", Utils.getUserApiKey(request2));
-
+
MockHttpServletRequest request3 = new MockHttpServletRequest();
assertNull(Utils.getUserApiKey(request3));
}
-
+
@Test
public void testgetFromattedBatchSequenceId(){
Long x = new Long(1234);
String str = Utils.getFromattedBatchSequenceId(x);
- assertEquals("001234", str);
+ assertEquals("001234", str);
}
-
+
@Test
public void testmessageLengthInBytes(){
String str = "TestString";
@@ -99,38 +106,58 @@ public class UtilsTest {
assertNull(Utils.getResponseTransactionId(null));
assertNull(Utils.getResponseTransactionId(""));
}
-
+
@Test
public void testgetSleepMsForRate(){
long x = Utils.getSleepMsForRate(1024.124);
assertEquals(1000, x);
assertEquals(0, Utils.getSleepMsForRate(-1));
}
-
+
@Test
public void testgetRemoteAddress(){
DMaaPContext dMaapContext = new DMaaPContext();
MockHttpServletRequest request = new MockHttpServletRequest();
-
+
dMaapContext.setRequest(request);
-
+
assertEquals(request.getRemoteAddr(), Utils.getRemoteAddress(dMaapContext));
-
+
request.addHeader("X-Forwarded-For", "XForward");
assertEquals("XForward", Utils.getRemoteAddress(dMaapContext));
-
-
+
+
}
-
+
@Test
public void testGetKey(){
assertNotNull(Utils.getKafkaproperty());
-
+
}
-
+
@Test
public void testCadiEnable(){
assertFalse(Utils.isCadiEnabled());
-
+
+ }
+
+ @Test
+ public void testaddSaslPropsPlain() {
+ Properties props = new Properties();
+ props.put("security.protocol", "SASL_PLAINTEXT");
+ props.put(Utils.SASL_MECH, "PLAIN");
+ props.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='admin' password='admin_secret';");
+ assertEquals(props, Utils.addSaslProps());
+ }
+
+ @Test
+ public void testaddSaslPropsScram(){
+ Properties props = new Properties();
+ environmentVariables.set("SASLMECH", "scram-sha-512");
+ environmentVariables.set("JAASLOGIN", "org.apache.kafka.common.security.scram.ScramLoginModule required username='onap-dmaap-strimzi-kafka-admin' password='qul6A3TLvidY';");
+ props.put("security.protocol", "SASL_PLAINTEXT");
+ props.put(Utils.SASL_MECH, "SCRAM-SHA-512");
+ props.put("sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username='onap-dmaap-strimzi-kafka-admin' password='qul6A3TLvidY';");
+ assertEquals(props, Utils.addSaslProps());
}
}