summaryrefslogtreecommitdiffstats
path: root/src/test/java/com/att/nsa/cambria/backends/kafka
diff options
context:
space:
mode:
Diffstat (limited to 'src/test/java/com/att/nsa/cambria/backends/kafka')
-rw-r--r--src/test/java/com/att/nsa/cambria/backends/kafka/CuratorFrameworkImpl.java278
-rw-r--r--src/test/java/com/att/nsa/cambria/backends/kafka/JUnitTestSuite.java42
-rw-r--r--src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCacheTest.java256
-rw-r--r--src/test/java/com/att/nsa/cambria/backends/kafka/KafkaPublisherTest.java153
-rw-r--r--src/test/java/com/att/nsa/cambria/backends/kafka/MetricsSetImpl.java123
-rw-r--r--src/test/java/com/att/nsa/cambria/backends/kafka/TestRunner.java41
6 files changed, 0 insertions, 893 deletions
diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/CuratorFrameworkImpl.java b/src/test/java/com/att/nsa/cambria/backends/kafka/CuratorFrameworkImpl.java
deleted file mode 100644
index a12e96c..0000000
--- a/src/test/java/com/att/nsa/cambria/backends/kafka/CuratorFrameworkImpl.java
+++ /dev/null
@@ -1,278 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * ONAP Policy Engine
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package com.att.nsa.cambria.backends.kafka;
-
-import java.util.concurrent.TimeUnit;
-
-import org.apache.curator.CuratorZookeeperClient;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.WatcherRemoveCuratorFramework;
-import org.apache.curator.framework.api.CreateBuilder;
-import org.apache.curator.framework.api.CuratorListener;
-import org.apache.curator.framework.api.DeleteBuilder;
-import org.apache.curator.framework.api.ExistsBuilder;
-import org.apache.curator.framework.api.GetACLBuilder;
-import org.apache.curator.framework.api.GetChildrenBuilder;
-import org.apache.curator.framework.api.GetConfigBuilder;
-import org.apache.curator.framework.api.GetDataBuilder;
-import org.apache.curator.framework.api.ReconfigBuilder;
-import org.apache.curator.framework.api.RemoveWatchesBuilder;
-import org.apache.curator.framework.api.SetACLBuilder;
-import org.apache.curator.framework.api.SetDataBuilder;
-import org.apache.curator.framework.api.SyncBuilder;
-import org.apache.curator.framework.api.UnhandledErrorListener;
-import org.apache.curator.framework.api.transaction.CuratorMultiTransaction;
-import org.apache.curator.framework.api.transaction.CuratorTransaction;
-import org.apache.curator.framework.api.transaction.TransactionOp;
-import org.apache.curator.framework.imps.CuratorFrameworkState;
-import org.apache.curator.framework.listen.Listenable;
-import org.apache.curator.framework.schema.SchemaSet;
-import org.apache.curator.framework.state.ConnectionStateErrorPolicy;
-import org.apache.curator.framework.state.ConnectionStateListener;
-import org.apache.curator.utils.EnsurePath;
-import org.apache.zookeeper.Watcher;
-import org.apache.zookeeper.server.quorum.flexible.QuorumVerifier;
-
-public class CuratorFrameworkImpl implements CuratorFramework {
-
- @Override
- public void blockUntilConnected() throws InterruptedException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public boolean blockUntilConnected(int arg0, TimeUnit arg1) throws InterruptedException {
- // TODO Auto-generated method stub
- return false;
- }
-
- @Override
- public ExistsBuilder checkExists() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void clearWatcherReferences(Watcher arg0) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void close() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public CreateBuilder create() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public DeleteBuilder delete() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public GetACLBuilder getACL() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public GetChildrenBuilder getChildren() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Listenable<ConnectionStateListener> getConnectionStateListenable() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Listenable<CuratorListener> getCuratorListenable() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public GetDataBuilder getData() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public String getNamespace() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public CuratorFrameworkState getState() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Listenable<UnhandledErrorListener> getUnhandledErrorListenable() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public CuratorZookeeperClient getZookeeperClient() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public CuratorTransaction inTransaction() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public boolean isStarted() {
- // TODO Auto-generated method stub
- return false;
- }
-
- @Override
- public EnsurePath newNamespaceAwareEnsurePath(String arg0) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public CuratorFramework nonNamespaceView() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public SetACLBuilder setACL() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public SetDataBuilder setData() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void start() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public SyncBuilder sync() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void sync(String arg0, Object arg1) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public CuratorFramework usingNamespace(String arg0) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public ReconfigBuilder reconfig() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public GetConfigBuilder getConfig() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public CuratorMultiTransaction transaction() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public TransactionOp transactionOp() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void createContainers(String path) throws Exception {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public RemoveWatchesBuilder watches() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public WatcherRemoveCuratorFramework newWatcherRemoveCuratorFramework() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public ConnectionStateErrorPolicy getConnectionStateErrorPolicy() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public QuorumVerifier getCurrentConfig() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public SchemaSet getSchemaSet() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public boolean isZk34CompatibilityMode() {
- // TODO Auto-generated method stub
- return false;
- }
-
-}
diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/JUnitTestSuite.java b/src/test/java/com/att/nsa/cambria/backends/kafka/JUnitTestSuite.java
deleted file mode 100644
index 54ff469..0000000
--- a/src/test/java/com/att/nsa/cambria/backends/kafka/JUnitTestSuite.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * ONAP Policy Engine
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package com.att.nsa.cambria.backends.kafka;
-
-import junit.framework.TestSuite;
-
-import org.junit.runner.RunWith;
-import org.junit.runners.Suite;
-import org.junit.runners.Suite.SuiteClasses;
-import org.apache.log4j.Logger;
-
-@RunWith(Suite.class)
-@SuiteClasses({ KafkaConsumerCacheTest.class, KafkaPublisherTest.class, })
-public class JUnitTestSuite {
- private static final Logger LOGGER = Logger.getLogger(JUnitTestSuite.class);
-
- public static void main(String[] args) {
- LOGGER.info("Running the test suite");
-
- TestSuite tstSuite = new TestSuite();
- LOGGER.info("Total Test Counts " + tstSuite.countTestCases());
- }
-
-}
diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCacheTest.java b/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCacheTest.java
deleted file mode 100644
index 06d7b58..0000000
--- a/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaConsumerCacheTest.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * ONAP Policy Engine
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package com.att.nsa.cambria.backends.kafka;
-
-import static org.junit.Assert.*;
-
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.modules.junit4.PowerMockRunner;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-
-import com.att.dmf.mr.backends.MetricsSet;
-import com.att.dmf.mr.backends.kafka.Kafka011Consumer;
-import com.att.dmf.mr.backends.kafka.KafkaConsumerCache;
-import com.att.dmf.mr.backends.kafka.KafkaConsumerCache.KafkaConsumerCacheException;
-import com.att.dmf.mr.constants.CambriaConstants;
-import com.att.dmf.mr.security.DMaaPAuthenticatorImpl;
-
-@RunWith(PowerMockRunner.class)
-@PrepareForTest({ AJSCPropertiesMap.class })
-public class KafkaConsumerCacheTest {
- private KafkaConsumerCache kafkaConsumerCache =null;
- @Mock
- private ConcurrentHashMap<String, Kafka011Consumer> fConsumers;
- @Mock
- private MetricsSet fMetrics;
-
- @Before
- public void setUp() throws Exception {
- MockitoAnnotations.initMocks(this);
-
- }
-
- @After
- public void tearDown() throws Exception {
- }
-
-
- @Test
- public void testSweep() {
- kafkaConsumerCache = new KafkaConsumerCache();
- PowerMockito.mockStatic(AJSCPropertiesMap.class);
- PowerMockito.when(AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "kSetting_TouchEveryMs")).thenReturn("100");
- kafkaConsumerCache.sweep();
-
- }
-
-
- // DOES NOT WORK
- @Test
- public void testStartCache() {
-
- /*
- * KafkaConsumerCache kafka = null;
- *
- * try { kafka = new KafkaConsumerCache("123", null);
- *
- * } catch (NoClassDefFoundError e) { try { kafka.startCache("DMAAP",
- * null); } catch (NullPointerException e1) { // TODO Auto-generated
- * catch block assertTrue(true); } catch (KafkaConsumerCacheException
- * e1) { // TODO Auto-generated catch block e1.printStackTrace(); } }
- */
-
-
- new CuratorFrameworkImpl();
- new MetricsSetImpl();
- KafkaConsumerCache kafka=null;
- try {
- kafka = new KafkaConsumerCache();
- kafka.setfApiId("1");
- kafka.startCache("DMAAP", null);
- } catch (NoClassDefFoundError e) {
-
- } catch (KafkaConsumerCacheException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- }
-
- @Test
- public void testGetCuratorFramework() {
-
- CuratorFramework curator = new CuratorFrameworkImpl();
- new MetricsSetImpl();
- try {
-
- } catch (NoClassDefFoundError e) {
-
- KafkaConsumerCache.getCuratorFramework(curator);
- }
-
- }
-
- /*
- * @Test public void testStopCache() {
- *
- * KafkaConsumerCache kafka = null; new CuratorFrameworkImpl(); new
- * MetricsSetImpl(); try { kafka = new KafkaConsumerCache("123", null);
- * kafka.stopCache(); } catch (NoClassDefFoundError e) {
- *
- * }
- *
- * }
- */
-
- @Test
- public void testGetConsumerFor() {
-
- KafkaConsumerCache kafka = null;
-
- try {
- kafka = new KafkaConsumerCache();
- kafka.getConsumerFor("testTopic", "CG1", "23");
- } catch (NoClassDefFoundError e) {
-
- } catch (KafkaConsumerCacheException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- }
-
- @Test
- public void testPutConsumerFor() {
-
- Kafka011Consumer consumer = null;
- KafkaConsumerCache kafka = null;
-
- try {
- kafka = new KafkaConsumerCache();
-
- } catch (NoClassDefFoundError e) {
- try {
- kafka.putConsumerFor("testTopic", "CG1", "23", consumer);
- } catch (NullPointerException e1) {
- // TODO Auto-generated catch block
- assertTrue(true);
- } catch (KafkaConsumerCacheException e1) {
- // TODO Auto-generated catch block
- e1.printStackTrace();
- }
- }
-
- }
-
- @Test
- public void testGetConsumers() {
-
- KafkaConsumerCache kafka = null;
-
- try {
- kafka = new KafkaConsumerCache();
-
- } catch (NoClassDefFoundError e) {
- try {
- kafka.getConsumers();
- } catch (NullPointerException e1) {
- // TODO Auto-generated catch block
- assertTrue(true);
- }
- }
-
- }
-
- @Test
- public void testDropAllConsumers() {
-
- KafkaConsumerCache kafka = null;
- try {
- kafka = new KafkaConsumerCache();
-
- } catch (NoClassDefFoundError e) {
- try {
- kafka.dropAllConsumers();
- } catch (NullPointerException e1) {
- // TODO Auto-generated catch block
- assertTrue(true);
- }
- }
-
- }
-
- @Test
- public void testSignalOwnership() {
-
- KafkaConsumerCache kafka = null;
-
- try {
- kafka = new KafkaConsumerCache();
- // kafka.signalOwnership("testTopic", "CG1", "23");
- } catch (NoClassDefFoundError e) {
- try {
- kafka.signalOwnership("testTopic", "CG1", "23");
- } catch (KafkaConsumerCacheException e1) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- } catch (NullPointerException e1) {
- // TODO Auto-generated catch block
- // assertTrue(true);
- e1.printStackTrace();
- }
-
- }
-
- // assertTrue(true);
- }
-
- @Test
- public void testDropConsumer() {
-
- KafkaConsumerCache kafka = null;
-
- try {
- kafka = new KafkaConsumerCache();
- // kafka.dropConsumer("testTopic", "CG1", "23");
- } catch (NoClassDefFoundError e) {
- try {
- kafka.dropConsumer("testTopic", "CG1", "23");
- } catch (NullPointerException e1) {
- // TODO Auto-generated catch block
- assertTrue(true);
- }
- }
-
- }
-
-}
diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaPublisherTest.java b/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaPublisherTest.java
deleted file mode 100644
index 3673845..0000000
--- a/src/test/java/com/att/nsa/cambria/backends/kafka/KafkaPublisherTest.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * ONAP Policy Engine
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package com.att.nsa.cambria.backends.kafka;
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-
-import com.att.ajsc.filemonitor.AJSCPropertiesMap;
-import com.att.dmf.mr.backends.Publisher.message;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-
-import kafka.common.FailedToSendMessageException;
-import kafka.producer.KeyedMessage;
-
-public class KafkaPublisherTest {
-
-
-
- /*@Before
- public void setUp() throws Exception {
- ClassLoader classLoader = getClass().getClassLoader();
- AJSCPropertiesMap.refresh(new File(classLoader.getResource("MsgRtrApi.properties").getFile()));
- }
-
- @After
- public void tearDown() throws Exception {
- }
-
- @Test
- public void testSendMessages() {
-
- String topic = "testTopic";
-
- KafkaPublisher kafka = null;
- try {
- kafka = new KafkaPublisher(null);
-
- } catch (missingReqdSetting e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- } catch (NoClassDefFoundError e) {
- try {
- kafka.sendMessage(topic, null);
- } catch (NullPointerException e1) {
- // TODO Auto-generated catch block
- assertTrue(true);
- } catch (FailedToSendMessageException e1) {
- // TODO Auto-generated catch block
- e1.printStackTrace();
- } catch (IOException e1) {
- // TODO Auto-generated catch block
- e1.printStackTrace();
- }
- } catch (FailedToSendMessageException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- }
-
- @Test
- public void testSendBatchMessage() {
-
- String topic = "testTopic";
-
- KafkaPublisher kafka = null;
- ArrayList<KeyedMessage<String, String>> kms = null;
- try {
- kafka = new KafkaPublisher(null);
-
- } catch (missingReqdSetting e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- } catch (NoClassDefFoundError e) {
- try {
- kafka.sendBatchMessage(topic, kms);
- } catch (NullPointerException e1) {
- // TODO Auto-generated catch block
- assertTrue(true);
- } catch (IOException e1) {
- // TODO Auto-generated catch block
- e1.printStackTrace();
- }
- } catch (FailedToSendMessageException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- }
-
- @Test
- public void sendMessages() {
-
- String topic = "testTopic";
-
- List<message> msgs = null;
-
- KafkaPublisher kafka = null;
- //ArrayList<KeyedMessage<String, String>> kms = null;
- try {
- kafka = new KafkaPublisher(null);
-
- } catch (missingReqdSetting e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- } catch (NoClassDefFoundError e) {
- try {
- kafka.sendMessages(topic, msgs);
- } catch (NullPointerException e1) {
- // TODO Auto-generated catch block
- assertTrue(true);
- } catch (FailedToSendMessageException e1) {
- // TODO Auto-generated catch block
- e1.printStackTrace();
- } catch (IOException e1) {
- // TODO Auto-generated catch block
- e1.printStackTrace();
- }
- } catch (FailedToSendMessageException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- }*/
-
-}
diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/MetricsSetImpl.java b/src/test/java/com/att/nsa/cambria/backends/kafka/MetricsSetImpl.java
deleted file mode 100644
index b5f7b74..0000000
--- a/src/test/java/com/att/nsa/cambria/backends/kafka/MetricsSetImpl.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * ONAP Policy Engine
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package com.att.nsa.cambria.backends.kafka;
-
-import java.util.List;
-import java.util.Map;
-
-import org.json.JSONObject;
-
-import com.att.dmf.mr.backends.MetricsSet;
-import com.att.nsa.metrics.CdmMeasuredItem;
-
-public class MetricsSetImpl implements MetricsSet {
-
- @Override
- public List<? extends CdmMetricEntry> getEntries() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public CdmMeasuredItem getItem(String arg0) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Map<String, CdmMeasuredItem> getItems() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void putItem(String arg0, CdmMeasuredItem arg1) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void removeItem(String arg0) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public int size() {
- // TODO Auto-generated method stub
- return 0;
- }
-
- @Override
- public JSONObject toJson() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void setupCambriaSender() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void onRouteComplete(String name, long durationMs) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void publishTick(int amount) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void consumeTick(int amount) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void onKafkaConsumerCacheMiss() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void onKafkaConsumerCacheHit() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void onKafkaConsumerClaimed() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void onKafkaConsumerTimeout() {
- // TODO Auto-generated method stub
-
- }
-
-}
diff --git a/src/test/java/com/att/nsa/cambria/backends/kafka/TestRunner.java b/src/test/java/com/att/nsa/cambria/backends/kafka/TestRunner.java
deleted file mode 100644
index 53ea31b..0000000
--- a/src/test/java/com/att/nsa/cambria/backends/kafka/TestRunner.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * ONAP Policy Engine
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package com.att.nsa.cambria.backends.kafka;
-
-import org.junit.runner.JUnitCore;
-import org.junit.runner.Result;
-import org.junit.runner.notification.Failure;
-import org.apache.log4j.Logger;
-
-public class TestRunner {
- private static final Logger LOGGER = Logger.getLogger(TestRunner.class);
-
- public static void main(String[] args) {
- // TODO Auto-generated method stub
- Result result = JUnitCore.runClasses(JUnitTestSuite.class);
- for (Failure failure : result.getFailures()) {
- LOGGER.info(failure.toString());
-
- }
- LOGGER.info(result.wasSuccessful());
- }
-
-}