diff options
14 files changed, 592 insertions, 198 deletions
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java new file mode 100644 index 00000000..8a7460e8 --- /dev/null +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java @@ -0,0 +1,104 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ +package org.onap.dmaap.datarouter.node; + +import org.apache.commons.lang3.reflect.FieldUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; + +import java.io.File; +import java.io.IOException; +import java.util.Hashtable; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.onap.dmaap.datarouter.node.NodeConfigManager") +public class DeliveryTest { + + @Mock + private DeliveryQueue deliveryQueue; + + private File nDir = new File("tmp/n"); + private File sDir = new File("tmp/s"); + + @Before + public void setUp() throws IOException { + nDir.mkdirs(); + sDir.mkdirs(); + File newNDir = new File("tmp/n/0"); + newNDir.mkdirs(); + File newNFile = new File("tmp/n/0/testN.txt"); + newNFile.createNewFile(); + File newSDir = new File("tmp/s/0/1"); + newSDir.mkdirs(); + File newSpoolFile = new File("tmp/s/0/1/testSpool.txt"); + newSpoolFile.createNewFile(); + } + + @Test + public void Validate_Reset_Queue_Calls_Reset_Queue_On_Delivery_Queue_Object() throws IllegalAccessException { + NodeConfigManager config = mockNodeConfigManager(); + Delivery delivery = new Delivery(config); + Hashtable<String, DeliveryQueue> dqs = new Hashtable<>(); + dqs.put("spool/s/0/1", deliveryQueue); + FieldUtils.writeDeclaredField(delivery, "dqs", dqs, true); + delivery.resetQueue("spool/s/0/1"); + verify(deliveryQueue, times(1)).resetQueue(); + } + + @After + public void tearDown() { + nDir.delete(); + sDir.delete(); + File tmpDir = new File("tmp"); + tmpDir.delete(); + } + + private NodeConfigManager mockNodeConfigManager() { + PowerMockito.mockStatic(NodeConfigManager.class); + NodeConfigManager config = mock(NodeConfigManager.class); + PowerMockito.when(config.isConfigured()).thenReturn(true); + PowerMockito.when(config.getAllDests()).thenReturn(createDestInfoObjects()); + PowerMockito.when(config.getFreeDiskStart()).thenReturn(0.49); + PowerMockito.when(config.getFreeDiskStop()).thenReturn(0.5); + PowerMockito.when(config.getDeliveryThreads()).thenReturn(0); + PowerMockito.when(config.getSpoolBase()).thenReturn("tmp"); + return config; + } + + private DestInfo[] createDestInfoObjects() { + DestInfo[] destInfos = new DestInfo[1]; + DestInfo destInfo = new DestInfo("node.datarouternew.com", "spool/s/0/1", "1", "logs/", "/subs/1", "user1", "Basic dXNlcjE6cGFzc3dvcmQx", false, true); + destInfos[0] = destInfo; + return destInfos; + } +} diff --git a/datarouter-prov/pom.xml b/datarouter-prov/pom.xml index 84731413..76137578 100755 --- a/datarouter-prov/pom.xml +++ b/datarouter-prov/pom.xml @@ -216,6 +216,16 @@ <version>1.2.17</version>
<scope>compile</scope>
</dependency>
+ <dependency>
+ <groupId>com.h2database</groupId>
+ <artifactId>h2</artifactId>
+ <version>1.4.197</version>
+ </dependency>
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-entitymanager</artifactId>
+ <version>5.2.9.Final</version>
+ </dependency>
</dependencies>
<profiles>
<profile>
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java index 2ea60d26..a021a60e 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java @@ -60,7 +60,7 @@ public class Group extends Syncable { public static Group getGroupMatching(Group gup) {
String sql = String.format(
- "select * from GROUPS where NAME = \"%s\"",
+ "select * from GROUPS where NAME='%s'",
gup.getName()
);
List<Group> list = getGroupsForSQL(sql);
@@ -69,7 +69,7 @@ public class Group extends Syncable { public static Group getGroupMatching(Group gup, int groupid) {
String sql = String.format(
- "select * from GROUPS where NAME = \"%s\" and GROUPID != %d ",
+ "select * from GROUPS where NAME = '%s' and GROUPID != %d ",
gup.getName(),
gup.getGroupid()
);
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LOGJSONObject.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LOGJSONObject.java index afb0de24..dec3cc13 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LOGJSONObject.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LOGJSONObject.java @@ -29,14 +29,7 @@ import java.io.Writer; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; -import java.util.Collection; -import java.util.Enumeration; -import java.util.LinkedHashMap; -import java.util.Iterator; -import java.util.Locale; -import java.util.Map; -import java.util.ResourceBundle; -import java.util.Set; +import java.util.*; import org.json.JSONArray; import org.json.JSONException; @@ -140,6 +133,46 @@ public class LOGJSONObject { } /** + * Returns a hash code value for the object. This method is + * supported for the benefit of hash tables such as those provided by + * {@link HashMap}. + * <p> + * The general contract of {@code hashCode} is: + * <ul> + * <li>Whenever it is invoked on the same object more than once during + * an execution of a Java application, the {@code hashCode} method + * must consistently return the same integer, provided no information + * used in {@code equals} comparisons on the object is modified. + * This integer need not remain consistent from one execution of an + * application to another execution of the same application. + * <li>If two objects are equal according to the {@code equals(Object)} + * method, then calling the {@code hashCode} method on each of + * the two objects must produce the same integer result. + * <li>It is <em>not</em> required that if two objects are unequal + * according to the {@link Object#equals(Object)} + * method, then calling the {@code hashCode} method on each of the + * two objects must produce distinct integer results. However, the + * programmer should be aware that producing distinct integer results + * for unequal objects may improve the performance of hash tables. + * </ul> + * <p> + * As much as is reasonably practical, the hashCode method defined by + * class {@code Object} does return distinct integers for distinct + * objects. (This is typically implemented by converting the internal + * address of the object into an integer, but this implementation + * technique is not required by the + * Java™ programming language.) + * + * @return a hash code value for this object. + * @see Object#equals(Object) + * @see System#identityHashCode + */ + @Override + public int hashCode() { + return super.hashCode(); + } + + /** * Get the "null" string value. * * @return The string "null". diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java index a5281c06..28740c0f 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java @@ -152,41 +152,40 @@ public class DailyLatencyReport extends ReportBase { DB db = new DB();
@SuppressWarnings("resource")
Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(SELECT_SQL);
- ps.setLong(1, from);
- ps.setLong(2, to);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- String id = rs.getString("PUBLISH_ID");
- int feed = rs.getInt("FEEDID");
- long etime = rs.getLong("EVENT_TIME");
- String type = rs.getString("TYPE");
- String fid = rs.getString("FEED_FILEID");
- long clen = rs.getLong("CONTENT_LENGTH");
- String date = sdf.format(new Date(getPstart(id)));
- String key = date + "," + feed;
- Counters c = map.get(key);
- if (c == null) {
- c = new Counters(date, feed);
- map.put(key, c);
+ try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {
+ ps.setLong(1, from);
+ ps.setLong(2, to);
+ try(ResultSet rs = ps.executeQuery()) {
+ while (rs.next()) {
+ String id = rs.getString("PUBLISH_ID");
+ int feed = rs.getInt("FEEDID");
+ long etime = rs.getLong("EVENT_TIME");
+ String type = rs.getString("TYPE");
+ String fid = rs.getString("FEED_FILEID");
+ long clen = rs.getLong("CONTENT_LENGTH");
+ String date = sdf.format(new Date(getPstart(id)));
+ String key = date + "," + feed;
+ Counters c = map.get(key);
+ if (c == null) {
+ c = new Counters(date, feed);
+ map.put(key, c);
+ }
+ c.addEvent(etime, type, id, fid, clen);
+ }
}
- c.addEvent(etime, type, id, fid, clen);
+
+ db.release(conn);
}
- rs.close();
- ps.close();
- db.release(conn);
} catch (SQLException e) {
e.printStackTrace();
}
logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");
- try {
- PrintWriter os = new PrintWriter(outfile);
+ try (PrintWriter os = new PrintWriter(outfile)){
os.println("date,feedid,minsize,maxsize,avgsize,minlat,maxlat,avglat,fanout");
for (String key : new TreeSet<String>(map.keySet())) {
Counters c = map.get(key);
os.println(c.toString());
}
- os.close();
} catch (FileNotFoundException e) {
System.err.println("File cannot be written: "+outfile);
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java index ba8f15a0..549511b7 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java @@ -145,40 +145,38 @@ public class LatencyReport extends ReportBase { DB db = new DB();
@SuppressWarnings("resource")
Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(SELECT_SQL);
+ try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)){
ps.setLong(1, from);
ps.setLong(2, to);
- ResultSet rs = ps.executeQuery();
- PrintWriter os = new PrintWriter(outfile);
- os.println("recordid,feedid,uri,size,min,max,avg,fanout");
- Counters c = null;
- while (rs.next()) {
- long etime = rs.getLong("EVENT_TIME");
- String type = rs.getString("TYPE");
- String id = rs.getString("PUBLISH_ID");
- String fid = rs.getString("FEED_FILEID");
- int feed = rs.getInt("FEEDID");
- long clen = rs.getLong("CONTENT_LENGTH");
- if (c != null && !id.equals(c.id)) {
- String line = id + "," + c.toString();
- os.println(line);
- c = null;
+ try(ResultSet rs = ps.executeQuery()) {
+ try(PrintWriter os = new PrintWriter(outfile)) {
+ os.println("recordid,feedid,uri,size,min,max,avg,fanout");
+ Counters c = null;
+ while (rs.next()) {
+ long etime = rs.getLong("EVENT_TIME");
+ String type = rs.getString("TYPE");
+ String id = rs.getString("PUBLISH_ID");
+ String fid = rs.getString("FEED_FILEID");
+ int feed = rs.getInt("FEEDID");
+ long clen = rs.getLong("CONTENT_LENGTH");
+ if (c != null && !id.equals(c.id)) {
+ String line = id + "," + c.toString();
+ os.println(line);
+ c = null;
+ }
+ if (c == null) {
+ c = new Counters(id, feed, clen, fid);
+ }
+ if (feed != c.feedid)
+ System.err.println("Feed ID mismatch, " + feed + " <=> " + c.feedid);
+ if (clen != c.clen)
+ System.err.println("Cont Len mismatch, " + clen + " <=> " + c.clen);
+ c.addEvent(type, etime);
+ }
}
- if (c == null) {
- c = new Counters(id, feed, clen, fid);
- }
- if (feed != c.feedid)
- System.err.println("Feed ID mismatch, " + feed + " <=> " + c.feedid);
- if (clen != c.clen)
- System.err.println("Cont Len mismatch, " + clen + " <=> " + c.clen);
-// if (fid != c.fileid)
-// System.err.println("File ID mismatch, "+fid+" <=> "+c.fileid);
- c.addEvent(type, etime);
+ db.release(conn);
+ }
}
- rs.close();
- ps.close();
- db.release(conn);
- os.close();
} catch (FileNotFoundException e) {
System.err.println("File cannot be written: " + outfile);
} catch (SQLException e) {
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java index e00c3944..51beac92 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java @@ -98,62 +98,61 @@ public class SubscriberReport extends ReportBase { public void run() {
Map<String, Counters> map = new HashMap<String, Counters>();
long start = System.currentTimeMillis();
+
try {
DB db = new DB();
@SuppressWarnings("resource")
Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(SELECT_SQL);
- ps.setLong(1, from);
- ps.setLong(2, to);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- String date = rs.getString("DATE");
- int sub = rs.getInt("DELIVERY_SUBID");
- int res = rs.getInt("RESULT");
- int count = rs.getInt("COUNT");
- String key = date + "," + sub;
- Counters c = map.get(key);
- if (c == null) {
- c = new Counters(date, sub);
- map.put(key, c);
+ try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {
+ ps.setLong(1, from);
+ ps.setLong(2, to);
+ try(ResultSet rs = ps.executeQuery()) {
+ while (rs.next()) {
+ String date = rs.getString("DATE");
+ int sub = rs.getInt("DELIVERY_SUBID");
+ int res = rs.getInt("RESULT");
+ int count = rs.getInt("COUNT");
+ String key = date + "," + sub;
+ Counters c = map.get(key);
+ if (c == null) {
+ c = new Counters(date, sub);
+ map.put(key, c);
+ }
+ c.addCounts(res, count);
+ }
}
- c.addCounts(res, count);
}
- rs.close();
- ps.close();
- ps = conn.prepareStatement(SELECT_SQL2);
- ps.setLong(1, from);
- ps.setLong(2, to);
- rs = ps.executeQuery();
- while (rs.next()) {
- String date = rs.getString("DATE");
- int sub = rs.getInt("DELIVERY_SUBID");
- int count = rs.getInt("COUNT");
- String key = date + "," + sub;
- Counters c = map.get(key);
- if (c == null) {
- c = new Counters(date, sub);
- map.put(key, c);
- }
- c.addDlxCount(count);
- }
- rs.close();
- ps.close();
+ try( PreparedStatement ps2 = conn.prepareStatement(SELECT_SQL2)) {
+ ps2.setLong(1, from);
+ ps2.setLong(2, to);
+ try(ResultSet rs2 = ps2.executeQuery()) {
+ while (rs2.next()) {
+ String date = rs2.getString("DATE");
+ int sub = rs2.getInt("DELIVERY_SUBID");
+ int count = rs2.getInt("COUNT");
+ String key = date + "," + sub;
+ Counters c = map.get(key);
+ if (c == null) {
+ c = new Counters(date, sub);
+ map.put(key, c);
+ }
+ c.addDlxCount(count);
+ }
+ }
+ }
db.release(conn);
} catch (SQLException e) {
e.printStackTrace();
}
logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");
- try {
- PrintWriter os = new PrintWriter(outfile);
+ try (PrintWriter os = new PrintWriter(outfile)){
os.println("date,subid,count100,count200,count300,count400,count500,countminus1,countdlx");
for (String key : new TreeSet<String>(map.keySet())) {
Counters c = map.get(key);
os.println(c.toString());
}
- os.close();
} catch (FileNotFoundException e) {
System.err.println("File cannot be written: " + outfile);
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java index 169db0d2..34e158a7 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java @@ -36,6 +36,7 @@ import java.util.HashMap; import java.util.Map;
import java.util.TreeSet;
+import org.apache.log4j.Logger;
import org.onap.dmaap.datarouter.provisioning.utils.DB;
/**
@@ -57,7 +58,7 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB; public class VolumeReport extends ReportBase {
private static final String SELECT_SQL = "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT" +
" from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?";
-
+ private Logger loggerVolumeReport=Logger.getLogger("org.onap.dmaap.datarouter.reports");
private class Counters {
public int filespublished, filesdelivered, filesexpired;
public long bytespublished, bytesdelivered, bytesexpired;
@@ -83,58 +84,64 @@ public class VolumeReport extends ReportBase { final long stepsize = 6000000L;
boolean go_again = true;
for (long i = 0; go_again; i += stepsize) {
- PreparedStatement ps = conn.prepareStatement(SELECT_SQL);
- ps.setLong(1, from);
- ps.setLong(2, to);
- ps.setLong(3, i);
- ps.setLong(4, stepsize);
- ResultSet rs = ps.executeQuery();
- go_again = false;
- while (rs.next()) {
- go_again = true;
- long etime = rs.getLong("EVENT_TIME");
- String type = rs.getString("TYPE");
- int feed = rs.getInt("FEEDID");
- long clen = rs.getLong("CONTENT_LENGTH");
- String key = sdf.format(new Date(etime)) + ":" + feed;
- Counters c = map.get(key);
- if (c == null) {
- c = new Counters();
- map.put(key, c);
- }
- if (type.equalsIgnoreCase("pub")) {
- c.filespublished++;
- c.bytespublished += clen;
- } else if (type.equalsIgnoreCase("del")) {
- // Only count successful deliveries
- int statusCode = rs.getInt("RESULT");
- if (statusCode >= 200 && statusCode < 300) {
- c.filesdelivered++;
- c.bytesdelivered += clen;
+ try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {
+ ps.setLong(1, from);
+ ps.setLong(2, to);
+ ps.setLong(3, i);
+ ps.setLong(4, stepsize);
+ try(ResultSet rs = ps.executeQuery()) {
+ go_again = false;
+ while (rs.next()) {
+ go_again = true;
+ long etime = rs.getLong("EVENT_TIME");
+ String type = rs.getString("TYPE");
+ int feed = rs.getInt("FEEDID");
+ long clen = rs.getLong("CONTENT_LENGTH");
+ String key = sdf.format(new Date(etime)) + ":" + feed;
+ Counters c = map.get(key);
+ if (c == null) {
+ c = new Counters();
+ map.put(key, c);
+ }
+ if (type.equalsIgnoreCase("pub")) {
+ c.filespublished++;
+ c.bytespublished += clen;
+ } else if (type.equalsIgnoreCase("del")) {
+ // Only count successful deliveries
+ int statusCode = rs.getInt("RESULT");
+ if (statusCode >= 200 && statusCode < 300) {
+ c.filesdelivered++;
+ c.bytesdelivered += clen;
+ }
+ } else if (type.equalsIgnoreCase("exp")) {
+ c.filesexpired++;
+ c.bytesexpired += clen;
+ }
}
- } else if (type.equalsIgnoreCase("exp")) {
- c.filesexpired++;
- c.bytesexpired += clen;
}
+
+ }
+ catch (SQLException sqlException)
+ {
+ loggerVolumeReport.error("SqlException",sqlException);
}
- rs.close();
- ps.close();
}
+
db.release(conn);
} catch (SQLException e) {
e.printStackTrace();
}
logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");
- try {
- PrintWriter os = new PrintWriter(outfile);
+ try (PrintWriter os = new PrintWriter(outfile)) {
os.println("date,feedid,filespublished,bytespublished,filesdelivered,bytesdelivered,filesexpired,bytesexpired");
- for (String key : new TreeSet<String>(map.keySet())) {
+ for(String key :new TreeSet<String>(map.keySet()))
+ {
Counters c = map.get(key);
String[] p = key.split(":");
os.println(String.format("%s,%s,%s", p[0], p[1], c.toString()));
}
- os.close();
- } catch (FileNotFoundException e) {
+ }
+ catch (FileNotFoundException e) {
System.err.println("File cannot be written: " + outfile);
}
}
diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java index 414fc185..c7f639ed 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java @@ -38,8 +38,8 @@ public class DrServletTestBase { public void setUp() throws Exception { Properties props = new Properties(); props.setProperty("org.onap.dmaap.datarouter.provserver.isaddressauthenabled", "false"); - props.setProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir", "datarouter-prov/unit-test-logs"); - props.setProperty("org.onap.dmaap.datarouter.provserver.spooldir", "resources/spooldir"); + props.setProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir", "unit-test-logs"); + props.setProperty("org.onap.dmaap.datarouter.provserver.spooldir", "unit-test-logs/spool"); props.setProperty("org.onap.dmaap.datarouter.provserver.https.relaxation", "false"); FieldUtils.writeDeclaredStaticField(DB.class, "props", props, true); FieldUtils.writeDeclaredStaticField(BaseServlet.class, "startmsgFlag", false, true); diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/InternalServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/InternalServletTest.java index 97900d4d..5f6b7ae3 100755..100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/InternalServletTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/InternalServletTest.java @@ -31,6 +31,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.onap.dmaap.datarouter.provisioning.BaseServlet.BEHALF_HEADER; +import java.io.File; import java.net.InetAddress; import java.util.HashMap; import java.util.Map; @@ -154,6 +155,19 @@ public class InternalServletTest extends DrServletTestBase { } @Test + public void Given_Request_Is_HTTP_GET_Starts_With_Logs_In_Endpoint_And_File_Exists_Then_Request_Returns_Ok() + throws Exception { + when(request.getPathInfo()).thenReturn("/logs/testFile.txt"); + File testFile = new File("unit-test-logs/testFile.txt"); + testFile.createNewFile(); + testFile.deleteOnExit(); + ServletOutputStream outStream = mock(ServletOutputStream.class); + when(response.getOutputStream()).thenReturn(outStream); + internalServlet.doGet(request, response); + verify(response).setStatus(eq(HttpServletResponse.SC_OK)); + } + + @Test public void Given_Request_Is_HTTP_GET_With_Api_In_Endpoint_Request_Succeeds() throws Exception { when(request.getPathInfo()).thenReturn("/api/Key"); setParametersToNotContactDb(false); @@ -167,10 +181,7 @@ public class InternalServletTest extends DrServletTestBase { public void Given_Request_Is_HTTP_GET_With_Drlogs_In_Endpoint_Request_Succeeds() throws Exception { when(request.getPathInfo()).thenReturn("/drlogs/"); - PowerMockito.mockStatic(LogfileLoader.class); - LogfileLoader logfileLoader = mock(LogfileLoader.class); - when(logfileLoader.getBitSet()).thenReturn(new RLEBitSet()); - PowerMockito.when(LogfileLoader.getLoader()).thenReturn(logfileLoader); + mockLogfileLoader(); ServletOutputStream outStream = mock(ServletOutputStream.class); when(response.getOutputStream()).thenReturn(outStream); internalServlet.doGet(request, response); @@ -332,6 +343,21 @@ public class InternalServletTest extends DrServletTestBase { } @Test + public void Given_Request_Is_HTTP_POST_To_Logs_Then_Request_Succeeds() + throws Exception { + when(request.getHeader("Content-Encoding")).thenReturn("gzip"); + when(request.getPathInfo()).thenReturn("/logs/"); + ServletInputStream inStream = mock(ServletInputStream.class); + when(request.getInputStream()).thenReturn(inStream); + File testDir = new File("unit-test-logs/spool"); + testDir.mkdirs(); + testDir.deleteOnExit(); + mockLogfileLoader(); + internalServlet.doPost(request, response); + verify(response).setStatus(eq(HttpServletResponse.SC_CREATED)); + } + + @Test public void Given_Request_Is_HTTP_POST_To_Drlogs_And_Then_Unsupported_Media_Type_Response_Is_Generated() throws Exception { when(request.getHeader("Content-Type")).thenReturn("stub_contentType"); @@ -452,4 +478,11 @@ public class InternalServletTest extends DrServletTestBase { Map<String, Integer> map = new HashMap<>(); FieldUtils.writeDeclaredStaticField(NodeClass.class, "map", map, true); } + + private void mockLogfileLoader() { + PowerMockito.mockStatic(LogfileLoader.class); + LogfileLoader logfileLoader = mock(LogfileLoader.class); + when(logfileLoader.getBitSet()).thenReturn(new RLEBitSet()); + PowerMockito.when(LogfileLoader.getLoader()).thenReturn(logfileLoader); + } } diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/GroupTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/GroupTest.java index 098765cf..91d72af7 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/GroupTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/GroupTest.java @@ -22,62 +22,80 @@ ******************************************************************************/ package org.onap.dmaap.datarouter.provisioning.beans; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; -import org.powermock.modules.junit4.PowerMockRunner; +import org.junit.*; +import org.onap.dmaap.datarouter.provisioning.utils.DB; +import javax.persistence.EntityManager; +import javax.persistence.EntityManagerFactory; +import javax.persistence.Persistence; +import java.util.Collection; import java.util.Date; +import java.util.List; - -@RunWith(PowerMockRunner.class) -@SuppressStaticInitializationFor({"org.onap.dmaap.datarouter.provisioning.beans.Group"}) public class GroupTest { - private Group group; + private static EntityManagerFactory emf; + private static EntityManager em; + private Group group; + private DB db; + + @BeforeClass + public static void init() { + emf = Persistence.createEntityManagerFactory("dr-unit-tests"); + em = emf.createEntityManager(); + System.setProperty( + "org.onap.dmaap.datarouter.provserver.properties", + "src/test/resources/h2Database.properties"); + } + + @AfterClass + public static void tearDownClass() { + em.clear(); + em.close(); + emf.close(); + } + + @Before + public void setUp() throws Exception { + db = new DB(); + group = new Group("GroupTest", "", ""); + group.doInsert(db.getConnection()); + } + + @Test + public void Given_Group_Exists_In_Db_GetAllGroups_Returns_Correct_Group() { + Collection<Group> groups = Group.getAllgroups(); + Assert.assertEquals("Group1", ((List<Group>) groups).get(0).getName()); + } + + @Test + public void Given_Group_Inserted_Into_Db_GetGroupMatching_Returns_Created_Group() { + Assert.assertEquals(group, Group.getGroupMatching(group)); + } + + @Test + public void Given_Group_Inserted_With_Same_Name_GetGroupMatching_With_Id_Returns_Correct_Group() + throws Exception { + Group sameGroupName = new Group("GroupTest", "This group has a description", ""); + sameGroupName.doInsert(db.getConnection()); + Assert.assertEquals( + "This group has a description", Group.getGroupMatching(group, 2).getDescription()); + sameGroupName.doDelete(db.getConnection()); + } - @Test - public void Validate_Group_Created_With_Default_Contructor() { - group = new Group(); - Assert.assertEquals(group.getGroupid(), -1); - Assert.assertEquals(group.getName(), ""); - } + @Test + public void Given_Group_Inserted_GetGroupById_Returns_Correct_Group() { + Assert.assertEquals(group, Group.getGroupById(group.getGroupid())); + } - @Test - public void Validate_Getters_And_Setters() { - group = new Group(); - group.setGroupid(1); - group.setAuthid("Auth"); - group.setClassification("Class"); - group.setDescription("Description"); - Date date = new Date(); - group.setLast_mod(date); - group.setMembers("Members"); - group.setName("NewName"); - Assert.assertEquals(1, group.getGroupid()); - Assert.assertEquals("Auth", group.getAuthid()); - Assert.assertEquals("Class", group.getClassification()); - Assert.assertEquals("Description", group.getDescription()); - Assert.assertEquals(date, group.getLast_mod()); - Assert.assertEquals("Members", group.getMembers()); - } + @Test + public void Given_Group_AuthId_Updated_GetGroupByAuthId_Returns_Correct_Group() throws Exception { + group.setAuthid("Basic TmFtZTp6Z04wMFkyS3gybFppbXltNy94ZDhuMkdEYjA9"); + group.doUpdate(db.getConnection()); + Assert.assertEquals(group, Group.getGroupByAuthId("Basic TmFtZTp6Z04wMFkyS3gybFppbXltNy94ZDhuMkdEYjA9")); + } - @Test - public void Validate_Equals() { - group = new Group(); - group.setGroupid(1); - group.setAuthid("Auth"); - group.setClassification("Class"); - group.setDescription("Description"); - Date date = new Date(); - group.setLast_mod(date); - group.setMembers("Members"); - group.setName("NewName"); - Group group2 = new Group("NewName", "Description", "Members"); - group2.setGroupid(1); - group2.setAuthid("Auth"); - group2.setClassification("Class"); - group2.setLast_mod(date); - Assert.assertEquals(group, group2); - } + @After + public void tearDown() throws Exception { + group.doDelete(db.getConnection()); + } } diff --git a/datarouter-prov/src/test/resources/META-INF/persistence.xml b/datarouter-prov/src/test/resources/META-INF/persistence.xml new file mode 100755 index 00000000..6b42f8a9 --- /dev/null +++ b/datarouter-prov/src/test/resources/META-INF/persistence.xml @@ -0,0 +1,21 @@ +<?xml version="1.0" encoding="UTF-8"?> +<persistence version="2.1" xmlns="http://xmlns.jcp.org/xml/ns/persistence" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/persistence + http://xmlns.jcp.org/xml/ns/persistence/persistence_2_1.xsd"> + <persistence-unit name="dr-unit-tests" transaction-type="RESOURCE_LOCAL"> + <provider>org.hibernate.jpa.HibernatePersistenceProvider</provider> + <properties> + <!-- Configuring JDBC properties --> + <property name="javax.persistence.jdbc.url" value="jdbc:h2:mem:test;MODE=MySQL;INIT=RUNSCRIPT FROM 'classpath:create.sql';DB_CLOSE_DELAY=-1"/> + <property name="javax.persistence.jdbc.driver" value="org.h2.Driver"/> + + <!-- Hibernate properties --> + <property name="hibernate.dialect" value="org.hibernate.dialect.H2Dialect"/> + <property name="hibernate.hbm2ddl.auto" value="validate"/> + <property name="hibernate.format_sql" value="false"/> + <property name="hibernate.show_sql" value="true"/> + + </properties> + </persistence-unit> +</persistence>
\ No newline at end of file diff --git a/datarouter-prov/src/test/resources/create.sql b/datarouter-prov/src/test/resources/create.sql new file mode 100755 index 00000000..6e6af1d4 --- /dev/null +++ b/datarouter-prov/src/test/resources/create.sql @@ -0,0 +1,146 @@ +CREATE TABLE FEEDS ( + FEEDID INT UNSIGNED NOT NULL PRIMARY KEY, + GROUPID INT(10) UNSIGNED NOT NULL DEFAULT 0, + NAME VARCHAR(255) NOT NULL, + VERSION VARCHAR(20) NOT NULL, + DESCRIPTION VARCHAR(1000), + BUSINESS_DESCRIPTION VARCHAR(1000) DEFAULT NULL, + AUTH_CLASS VARCHAR(32) NOT NULL, + PUBLISHER VARCHAR(8) NOT NULL, + SELF_LINK VARCHAR(256), + PUBLISH_LINK VARCHAR(256), + SUBSCRIBE_LINK VARCHAR(256), + LOG_LINK VARCHAR(256), + DELETED BOOLEAN DEFAULT FALSE, + LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + SUSPENDED BOOLEAN DEFAULT FALSE, + CREATED_DATE TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +CREATE TABLE FEED_ENDPOINT_IDS ( + FEEDID INT UNSIGNED NOT NULL, + USERID VARCHAR(20) NOT NULL, + PASSWORD VARCHAR(32) NOT NULL +); + +CREATE TABLE FEED_ENDPOINT_ADDRS ( + FEEDID INT UNSIGNED NOT NULL, + ADDR VARCHAR(44) NOT NULL +); + +CREATE TABLE SUBSCRIPTIONS ( + SUBID INT UNSIGNED NOT NULL PRIMARY KEY, + FEEDID INT UNSIGNED NOT NULL, + GROUPID INT(10) UNSIGNED NOT NULL DEFAULT 0, + DELIVERY_URL VARCHAR(256), + DELIVERY_USER VARCHAR(20), + DELIVERY_PASSWORD VARCHAR(32), + DELIVERY_USE100 BOOLEAN DEFAULT FALSE, + METADATA_ONLY BOOLEAN DEFAULT FALSE, + SUBSCRIBER VARCHAR(8) NOT NULL, + SELF_LINK VARCHAR(256), + LOG_LINK VARCHAR(256), + LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + SUSPENDED BOOLEAN DEFAULT FALSE, + CREATED_DATE TIMESTAMP DEFAULT CURRENT_TIMESTAMP + +); + +CREATE TABLE PARAMETERS ( + KEYNAME VARCHAR(32) NOT NULL PRIMARY KEY, + VALUE VARCHAR(4096) NOT NULL +); + +CREATE TABLE LOG_RECORDS ( + TYPE ENUM('pub', 'del', 'exp', 'pbf', 'dlx') NOT NULL, + EVENT_TIME BIGINT NOT NULL, /* time of the publish request */ + PUBLISH_ID VARCHAR(64) NOT NULL, /* unique ID assigned to this publish attempt */ + FEEDID INT UNSIGNED NOT NULL, /* pointer to feed in FEEDS */ + REQURI VARCHAR(256) NOT NULL, /* request URI */ + METHOD ENUM('DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'TRACE') NOT NULL, /* HTTP method */ + CONTENT_TYPE VARCHAR(256) NOT NULL, /* content type of published file */ + CONTENT_LENGTH BIGINT NOT NULL, /* content length of published file */ + + FEED_FILEID VARCHAR(256), /* file ID of published file */ + REMOTE_ADDR VARCHAR(40), /* IP address of publishing endpoint */ + USER VARCHAR(50), /* user name of publishing endpoint */ + STATUS SMALLINT, /* status code returned to delivering agent */ + + DELIVERY_SUBID INT UNSIGNED, /* pointer to subscription in SUBSCRIPTIONS */ + DELIVERY_FILEID VARCHAR(256), /* file ID of file being delivered */ + RESULT SMALLINT, /* result received from subscribing agent */ + + ATTEMPTS INT, /* deliveries attempted */ + REASON ENUM('notRetryable', 'retriesExhausted', 'diskFull', 'other'), + + RECORD_ID BIGINT UNSIGNED NOT NULL PRIMARY KEY, /* unique ID for this record */ + CONTENT_LENGTH_2 BIGINT, + + INDEX (FEEDID) USING BTREE, + INDEX (DELIVERY_SUBID) USING BTREE, + INDEX (RECORD_ID) USING BTREE +) ENGINE = MyISAM; + +CREATE TABLE INGRESS_ROUTES ( + SEQUENCE INT UNSIGNED NOT NULL, + FEEDID INT UNSIGNED NOT NULL, + USERID VARCHAR(20), + SUBNET VARCHAR(44), + NODESET INT UNSIGNED NOT NULL +); + +CREATE TABLE EGRESS_ROUTES ( + SUBID INT UNSIGNED NOT NULL PRIMARY KEY, + NODEID INT UNSIGNED NOT NULL +); + +CREATE TABLE NETWORK_ROUTES ( + FROMNODE INT UNSIGNED NOT NULL, + TONODE INT UNSIGNED NOT NULL, + VIANODE INT UNSIGNED NOT NULL +); + +CREATE TABLE NODESETS ( + SETID INT UNSIGNED NOT NULL, + NODEID INT UNSIGNED NOT NULL +); + +CREATE TABLE NODES ( + NODEID INT UNSIGNED NOT NULL PRIMARY KEY, + NAME VARCHAR(255) NOT NULL, + ACTIVE BOOLEAN DEFAULT TRUE +); + +CREATE TABLE GROUPS ( + GROUPID INT UNSIGNED NOT NULL PRIMARY KEY, + AUTHID VARCHAR(100) NOT NULL, + NAME VARCHAR(50) NOT NULL, + DESCRIPTION VARCHAR(255), + CLASSIFICATION VARCHAR(20) NOT NULL, + MEMBERS TINYTEXT, + LAST_MOD TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +INSERT INTO PARAMETERS VALUES + ('ACTIVE_POD', 'dmaap-dr-prov'), + ('PROV_ACTIVE_NAME', 'dmaap-dr-prov'), + ('STANDBY_POD', ''), + ('PROV_NAME', 'dmaap-dr-prov'), + ('NODES', 'dmaap-dr-node'), + ('PROV_DOMAIN', ''), + ('DELIVERY_INIT_RETRY_INTERVAL', '10'), + ('DELIVERY_MAX_AGE', '86400'), + ('DELIVERY_MAX_RETRY_INTERVAL', '3600'), + ('DELIVERY_RETRY_RATIO', '2'), + ('LOGROLL_INTERVAL', '300'), + ('PROV_AUTH_ADDRESSES', 'dmaap-dr-prov|dmaap-dr-node'), + ('PROV_AUTH_SUBJECTS', ''), + ('PROV_MAXFEED_COUNT', '10000'), + ('PROV_MAXSUB_COUNT', '100000'), + ('PROV_REQUIRE_CERT', 'false'), + ('PROV_REQUIRE_SECURE', 'false'), + ('_INT_VALUES', 'LOGROLL_INTERVAL|PROV_MAXFEED_COUNT|PROV_MAXSUB_COUNT|DELIVERY_INIT_RETRY_INTERVAL|DELIVERY_MAX_RETRY_INTERVAL|DELIVERY_RETRY_RATIO|DELIVERY_MAX_AGE') + ; + +INSERT INTO GROUPS(GROUPID, AUTHID, NAME, DESCRIPTION, CLASSIFICATION, MEMBERS) +VALUES (1, 'Basic dXNlcjE6cGFzc3dvcmQx', 'Group1', 'First Group for testing', 'Class1', 'Member1'); diff --git a/datarouter-prov/src/test/resources/h2Database.properties b/datarouter-prov/src/test/resources/h2Database.properties new file mode 100755 index 00000000..5bc20ed4 --- /dev/null +++ b/datarouter-prov/src/test/resources/h2Database.properties @@ -0,0 +1,26 @@ +#------------------------------------------------------------------------------- +# ============LICENSE_START================================================== +# * org.onap.dmaap +# * =========================================================================== +# * Copyright ? 2017 AT&T Intellectual Property. All rights reserved. +# * =========================================================================== +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# * ============LICENSE_END==================================================== +# * +# * ECOMP is a trademark and service mark of AT&T Intellectual Property. +# * +#------------------------------------------------------------------------------- + +# Database access +org.onap.dmaap.datarouter.db.driver = org.h2.Driver +org.onap.dmaap.datarouter.db.url = jdbc:h2:mem:test;DB_CLOSE_DELAY=-1 |