aboutsummaryrefslogtreecommitdiffstats
path: root/datarouter-prov/src
diff options
context:
space:
mode:
Diffstat (limited to 'datarouter-prov/src')
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java72
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java14
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java13
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java15
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java70
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/FeedReport.java14
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java2
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java3
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java295
9 files changed, 263 insertions, 235 deletions
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java
index 05502760..4cefdf1e 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java
@@ -25,11 +25,9 @@
package org.onap.dmaap.datarouter.provisioning;
import java.io.IOException;
-import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import java.util.Properties;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
@@ -98,41 +96,47 @@ public class PublishServlet extends BaseServlet {
setIpAndFqdnForEelf("doPost");
eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));
redirect(req, resp);
+
}
- private void redirect(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- String[] nodes = getNodes();
- if (nodes == null || nodes.length == 0) {
- resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, "There are no nodes defined in the DR network.");
- } else {
- EventLogRecord elr = new EventLogRecord(req);
- int feedid = checkPath(req);
- if (feedid < 0) {
- String message = (feedid == -1)
- ? "Invalid request - Missing or bad feed number."
- : "Invalid request - Missing file ID.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
-
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
+ private void redirect(HttpServletRequest req, HttpServletResponse resp) {
+ try {
+ String[] nodes = getNodes();
+ if (nodes == null || nodes.length == 0) {
+ resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, "There are no nodes defined in the DR network.");
} else {
- // Generate new URL
- String nextnode = getRedirectNode(feedid, req);
- nextnode = nextnode+":"+DB.HTTPS_PORT;
- String newurl = "https://" + nextnode + "/publish" + req.getPathInfo();
- String qs = req.getQueryString();
- if (qs != null)
- newurl += "?" + qs;
-
- // Log redirect in event log
- String message = "Redirected to: "+newurl;
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_MOVED_PERMANENTLY);
- eventlogger.info(elr);
-
- resp.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
- resp.setHeader("Location", newurl);
+ EventLogRecord elr = new EventLogRecord(req);
+ int feedid = checkPath(req);
+ if (feedid < 0) {
+ String message = (feedid == -1)
+ ? "Invalid request - Missing or bad feed number."
+ : "Invalid request - Missing file ID.";
+ elr.setMessage(message);
+ elr.setResult(HttpServletResponse.SC_NOT_FOUND);
+ eventlogger.info(elr);
+
+ resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
+ } else {
+ // Generate new URL
+ String nextnode = getRedirectNode(feedid, req);
+ nextnode = nextnode + ":" + DB.HTTPS_PORT;
+ String newurl = "https://" + nextnode + "/publish" + req.getPathInfo();
+ String qs = req.getQueryString();
+ if (qs != null)
+ newurl += "?" + qs;
+
+ // Log redirect in event log
+ String message = "Redirected to: " + newurl;
+ elr.setMessage(message);
+ elr.setResult(HttpServletResponse.SC_MOVED_PERMANENTLY);
+ eventlogger.info(elr);
+
+ resp.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
+ resp.setHeader("Location", newurl);
+ }
}
+ } catch (IOException ioe) {
+ intlogger.error("IOException" + ioe.getMessage());
+
}
}
private String getRedirectNode(int feedid, HttpServletRequest req) {
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java
index 00eb6a26..bad6f537 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java
@@ -73,7 +73,7 @@ public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute>
}
db.release(conn);
} catch (SQLException e) {
- e.printStackTrace();
+ intlogger.error("SQLException " + e.getMessage());
}
return set;
}
@@ -127,14 +127,14 @@ public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute>
} catch (SQLException e) {
rv = false;
intlogger.warn("PROV0007 doDelete: " + e.getMessage());
- e.printStackTrace();
+ intlogger.error("SQLException " + e.getMessage());
} finally {
try {
if(ps!=null) {
ps.close();
}
} catch (SQLException e) {
- e.printStackTrace();
+ intlogger.error("SQLException " + e.getMessage());
}
}
return rv;
@@ -157,14 +157,14 @@ public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute>
rv = true;
} catch (SQLException e) {
intlogger.warn("PROV0005 doInsert: " + e.getMessage());
- e.printStackTrace();
+ intlogger.error("SQLException " + e.getMessage());
} finally {
try {
if(ps!=null) {
ps.close();
}
} catch (SQLException e) {
- e.printStackTrace();
+ intlogger.error("SQLException " + e.getMessage());
}
}
}
@@ -185,14 +185,14 @@ public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute>
} catch (SQLException e) {
rv = false;
intlogger.warn("PROV0006 doUpdate: " + e.getMessage());
- e.printStackTrace();
+ intlogger.error("SQLException " + e.getMessage());
} finally {
try {
if(ps!=null) {
ps.close();
}
} catch (SQLException e) {
- e.printStackTrace();
+ intlogger.error("SQLException " + e.getMessage());
}
}
return rv;
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
index 110c63de..b6ad8e43 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
@@ -214,7 +214,6 @@ public class LogfileLoader extends Thread {
}
} catch (Exception e) {
logger.warn("PROV0020: Caught exception in LogfileLoader: " + e);
- e.printStackTrace();
}
}
}
@@ -275,7 +274,7 @@ public class LogfileLoader extends Thread {
}
} catch (SQLException e) {
System.err.println(e);
- e.printStackTrace();
+ logger.error(e);
} finally {
db.release(conn);
}
@@ -297,7 +296,7 @@ public class LogfileLoader extends Thread {
}
} catch (SQLException e) {
System.err.println(e);
- e.printStackTrace();
+ logger.error(e);
} finally {
db.release(conn);
}
@@ -322,7 +321,7 @@ public class LogfileLoader extends Thread {
}
} catch (SQLException e) {
System.err.println(e);
- e.printStackTrace();
+ logger.error(e);
} finally {
db.release(conn);
}
@@ -376,7 +375,7 @@ public class LogfileLoader extends Thread {
logger.debug(String.format("initializeNextid, next ID is %d (%x)", nextid, nextid));
} catch (SQLException e) {
System.err.println(e);
- e.printStackTrace();
+ logger.error(e);
} finally {
db.release(conn);
}
@@ -417,19 +416,15 @@ public class LogfileLoader extends Thread {
} catch (SQLException e) {
logger.warn("PROV8003 Invalid value in record: " + line);
logger.debug(e);
- e.printStackTrace();
} catch (NumberFormatException e) {
logger.warn("PROV8004 Invalid number in record: " + line);
logger.debug(e);
- e.printStackTrace();
} catch (ParseException e) {
logger.warn("PROV8005 Invalid date in record: " + line);
logger.debug(e);
- e.printStackTrace();
} catch (Exception e) {
logger.warn("PROV8006 Invalid pattern in record: " + line);
logger.debug(e);
- e.printStackTrace();
}
total++;
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java
index 7b0e0e61..14d15197 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java
@@ -27,24 +27,30 @@ package org.onap.dmaap.datarouter.provisioning.utils;
import java.io.File;
import java.util.Properties;
import java.util.TimerTask;
+import org.apache.log4j.Logger;
/**
- * This class provides a {@link TimerTask} that purges old logfiles
- * (older than the number of days specified by the org.onap.dmaap.datarouter.provserver.logretention property).
+ * This class provides a {@link TimerTask} that purges old logfiles (older than the number of days specified by the
+ * org.onap.dmaap.datarouter.provserver.logretention property).
*
* @author Robert Eby
* @version $Id: PurgeLogDirTask.java,v 1.2 2013/07/05 13:48:05 eby Exp $
*/
public class PurgeLogDirTask extends TimerTask {
+
private static final long ONEDAY = 86400000L;
private final String logdir;
private final long interval;
+ private Logger utilsLogger;
public PurgeLogDirTask() {
Properties p = (new DB()).getProperties();
logdir = p.getProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir");
String s = p.getProperty("org.onap.dmaap.datarouter.provserver.logretention", "30");
+
+ this.utilsLogger = Logger.getLogger("org.onap.dmaap.datarouter.provisioning.utils");
+
long n = 30;
try {
n = Long.parseLong(s);
@@ -61,12 +67,13 @@ public class PurgeLogDirTask extends TimerTask {
if (dir.exists()) {
long exptime = System.currentTimeMillis() - interval;
for (File logfile : dir.listFiles()) {
- if (logfile.lastModified() < exptime)
+ if (logfile.lastModified() < exptime) {
logfile.delete();
+ }
}
}
} catch (Exception e) {
- e.printStackTrace();
+ utilsLogger.error("Exception: " + e.getMessage());
}
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java
index 28740c0f..f1e0f7cc 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java
@@ -37,7 +37,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
-
import org.onap.dmaap.datarouter.provisioning.utils.DB;
/**
@@ -65,42 +64,54 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB;
* @version $Id: DailyLatencyReport.java,v 1.2 2013/11/06 16:23:54 eby Exp $
*/
public class DailyLatencyReport extends ReportBase {
+
private static final String SELECT_SQL =
"select EVENT_TIME, TYPE, PUBLISH_ID, FEED_FILEID, FEEDID, CONTENT_LENGTH from LOG_RECORDS" +
- " where EVENT_TIME >= ? and EVENT_TIME <= ?";
+ " where EVENT_TIME >= ? and EVENT_TIME <= ?";
private class Job {
+
public long pubtime = 0;
public long clen = 0;
public List<Long> deltime = new ArrayList<Long>();
+
public long minLatency() {
long n = deltime.isEmpty() ? 0 : Long.MAX_VALUE;
- for (Long l : deltime)
- n = Math.min(n, l-pubtime);
+ for (Long l : deltime) {
+ n = Math.min(n, l - pubtime);
+ }
return n;
}
+
public long maxLatency() {
long n = 0;
- for (Long l : deltime)
- n = Math.max(n, l-pubtime);
+ for (Long l : deltime) {
+ n = Math.max(n, l - pubtime);
+ }
return n;
}
+
public long totalLatency() {
long n = 0;
- for (Long l : deltime)
- n += (l-pubtime);
+ for (Long l : deltime) {
+ n += (l - pubtime);
+ }
return n;
}
}
+
private class Counters {
+
public final String date;
public final int feedid;
public final Map<String, Job> jobs;
+
public Counters(String d, int fid) {
date = d;
feedid = fid;
- jobs = new HashMap<String, Job>();
+ jobs = new HashMap<>();
}
+
public void addEvent(long etime, String type, String id, String fid, long clen) {
Job j = jobs.get(id);
if (j == null) {
@@ -114,48 +125,52 @@ public class DailyLatencyReport extends ReportBase {
j.deltime.add(etime);
}
}
+
@Override
public String toString() {
long minsize = Long.MAX_VALUE, maxsize = 0, avgsize = 0;
- long minl = Long.MAX_VALUE, maxl = 0;
- long fanout = 0, totall = 0, totaln = 0;
+ long minl = Long.MAX_VALUE, maxl = 0;
+ long fanout = 0, totall = 0, totaln = 0;
for (Job j : jobs.values()) {
minsize = Math.min(minsize, j.clen);
maxsize = Math.max(maxsize, j.clen);
avgsize += j.clen;
- minl = Math.min(minl, j.minLatency());
- maxl = Math.max(maxl, j.maxLatency());
- totall += j.totalLatency();
- totaln += j.deltime.size();
- fanout += j.deltime.size();
+ minl = Math.min(minl, j.minLatency());
+ maxl = Math.max(maxl, j.maxLatency());
+ totall += j.totalLatency();
+ totaln += j.deltime.size();
+ fanout += j.deltime.size();
}
if (jobs.size() > 0) {
avgsize /= jobs.size();
- fanout /= jobs.size();
+ fanout /= jobs.size();
}
long avgl = (totaln > 0) ? (totall / totaln) : 0;
- return date + "," + feedid + "," + minsize + "," + maxsize + "," + avgsize + "," + minl + "," + maxl + "," + avgl + "," + fanout;
+ return date + "," + feedid + "," + minsize + "," + maxsize + "," + avgsize + "," + minl + "," + maxl + ","
+ + avgl + "," + fanout;
}
}
+
private long getPstart(String t) {
- if (t.indexOf('.') > 0)
+ if (t.indexOf('.') >= 0) {
t = t.substring(0, t.indexOf('.'));
+ }
return Long.parseLong(t);
}
@Override
public void run() {
- Map<String, Counters> map = new HashMap<String, Counters>();
+ Map<String, Counters> map = new HashMap<>();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
long start = System.currentTimeMillis();
try {
DB db = new DB();
@SuppressWarnings("resource")
Connection conn = db.getConnection();
- try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {
+ try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {
ps.setLong(1, from);
ps.setLong(2, to);
- try(ResultSet rs = ps.executeQuery()) {
+ try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
String id = rs.getString("PUBLISH_ID");
int feed = rs.getInt("FEEDID");
@@ -177,17 +192,18 @@ public class DailyLatencyReport extends ReportBase {
db.release(conn);
}
} catch (SQLException e) {
- e.printStackTrace();
+ logger.error("SQLException: " + e.getMessage());
}
- logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");
- try (PrintWriter os = new PrintWriter(outfile)){
+ logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");
+ try (PrintWriter os = new PrintWriter(outfile)) {
os.println("date,feedid,minsize,maxsize,avgsize,minlat,maxlat,avglat,fanout");
- for (String key : new TreeSet<String>(map.keySet())) {
+ for (String key : new TreeSet<>(map.keySet())) {
Counters c = map.get(key);
os.println(c.toString());
}
} catch (FileNotFoundException e) {
- System.err.println("File cannot be written: "+outfile);
+ System.err.println("File cannot be written: " + outfile);
+ logger.error("FileNotFoundException: " + e.getMessage());
}
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/FeedReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/FeedReport.java
index fe9c4601..3f67efec 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/FeedReport.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/FeedReport.java
@@ -83,7 +83,7 @@ public class FeedReport extends ReportBase {
}
db.release(conn);
} catch (SQLException e) {
- e.printStackTrace();
+ logger.error(e);
}
logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");
try (PrintWriter os = new PrintWriter(outfile)) {
@@ -91,6 +91,7 @@ public class FeedReport extends ReportBase {
os.print(sb.toString());
} catch (FileNotFoundException e) {
System.err.println("File cannot be written: " + outfile);
+ logger.error(e);
}
}
@@ -129,6 +130,7 @@ public class FeedReport extends ReportBase {
feedmap.put("pubcount", n + 1);
} catch (JSONException e) {
feedmap.put("pubcount", 1);
+ logger.error(e);
}
} else if (type.equals("del")) {
String subid = "" + rs.getInt("DELIVERY_SUBID");
@@ -137,6 +139,7 @@ public class FeedReport extends ReportBase {
feedmap.put(subid, n + 1);
} catch (JSONException e) {
feedmap.put(subid, 1);
+ logger.error(e);
}
}
}
@@ -144,7 +147,7 @@ public class FeedReport extends ReportBase {
}
db.release(conn);
} catch (SQLException e) {
- e.printStackTrace();
+ logger.error(e);
}
logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");
try {
@@ -153,6 +156,7 @@ public class FeedReport extends ReportBase {
os.close();
} catch (FileNotFoundException e) {
System.err.println("File cannot be written: " + outfile);
+ logger.error(e);
}
}
@@ -279,7 +283,7 @@ public class FeedReport extends ReportBase {
*
* @param args
*/
- public static void main(String[] args) {
+ public void main(String[] args) {
int rtype = 0; // 0 -> day, 1 -> week, 2 -> month, 3 -> year
String infile = null;
String outfile = null;
@@ -350,6 +354,7 @@ public class FeedReport extends ReportBase {
feedmap.put("pubcount", n + count);
} catch (JSONException e) {
feedmap.put("pubcount", count);
+ logger.error(e);
}
} else if (type.equals("del")) {
String subid = tt[3];
@@ -358,6 +363,7 @@ public class FeedReport extends ReportBase {
feedmap.put(subid, n + count);
} catch (JSONException e) {
feedmap.put(subid, count);
+ logger.error(e);
}
}
}
@@ -379,7 +385,7 @@ public class FeedReport extends ReportBase {
System.out.println(t);
} catch (Exception e) {
System.err.println(e);
- e.printStackTrace();
+ logger.error(e);
}
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java
index 549511b7..f5001409 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java
@@ -180,7 +180,7 @@ public class LatencyReport extends ReportBase {
} catch (FileNotFoundException e) {
System.err.println("File cannot be written: " + outfile);
} catch (SQLException e) {
- e.printStackTrace();
+ logger.error("SQLException: " + e.getMessage());
}
logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java
index 51beac92..b580af77 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java
@@ -144,7 +144,7 @@ public class SubscriberReport extends ReportBase {
db.release(conn);
} catch (SQLException e) {
- e.printStackTrace();
+ logger.error("SQLException: " + e.getMessage());
}
logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");
try (PrintWriter os = new PrintWriter(outfile)){
@@ -155,6 +155,7 @@ public class SubscriberReport extends ReportBase {
}
} catch (FileNotFoundException e) {
System.err.println("File cannot be written: " + outfile);
+ logger.error("FileNotFoundException: " + e.getMessage());
}
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java
index 34e158a7..8d5731f8 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java
@@ -1,148 +1,147 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package org.onap.dmaap.datarouter.reports;
-
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.TreeSet;
-
-import org.apache.log4j.Logger;
-import org.onap.dmaap.datarouter.provisioning.utils.DB;
-
-/**
- * Generate a traffic volume report. The report is a .csv file containing the following columns:
- * <table>
- * <tr><td>date</td><td>the date for this record</td></tr>
- * <tr><td>feedid</td><td>the Feed ID for this record</td></tr>
- * <tr><td>filespublished</td><td>the number of files published on this feed and date</td></tr>
- * <tr><td>bytespublished</td><td>the number of bytes published on this feed and date</td></tr>
- * <tr><td>filesdelivered</td><td>the number of files delivered on this feed and date</td></tr>
- * <tr><td>bytesdelivered</td><td>the number of bytes delivered on this feed and date</td></tr>
- * <tr><td>filesexpired</td><td>the number of files expired on this feed and date</td></tr>
- * <tr><td>bytesexpired</td><td>the number of bytes expired on this feed and date</td></tr>
- * </table>
- *
- * @author Robert P. Eby
- * @version $Id: VolumeReport.java,v 1.3 2014/02/28 15:11:13 eby Exp $
- */
-public class VolumeReport extends ReportBase {
- private static final String SELECT_SQL = "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT" +
- " from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?";
- private Logger loggerVolumeReport=Logger.getLogger("org.onap.dmaap.datarouter.reports");
- private class Counters {
- public int filespublished, filesdelivered, filesexpired;
- public long bytespublished, bytesdelivered, bytesexpired;
-
- @Override
- public String toString() {
- return String.format("%d,%d,%d,%d,%d,%d",
- filespublished, bytespublished, filesdelivered,
- bytesdelivered, filesexpired, bytesexpired);
- }
- }
-
- @Override
- public void run() {
- Map<String, Counters> map = new HashMap<String, Counters>();
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
- long start = System.currentTimeMillis();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- // We need to run this SELECT in stages, because otherwise we run out of memory!
- final long stepsize = 6000000L;
- boolean go_again = true;
- for (long i = 0; go_again; i += stepsize) {
- try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {
- ps.setLong(1, from);
- ps.setLong(2, to);
- ps.setLong(3, i);
- ps.setLong(4, stepsize);
- try(ResultSet rs = ps.executeQuery()) {
- go_again = false;
- while (rs.next()) {
- go_again = true;
- long etime = rs.getLong("EVENT_TIME");
- String type = rs.getString("TYPE");
- int feed = rs.getInt("FEEDID");
- long clen = rs.getLong("CONTENT_LENGTH");
- String key = sdf.format(new Date(etime)) + ":" + feed;
- Counters c = map.get(key);
- if (c == null) {
- c = new Counters();
- map.put(key, c);
- }
- if (type.equalsIgnoreCase("pub")) {
- c.filespublished++;
- c.bytespublished += clen;
- } else if (type.equalsIgnoreCase("del")) {
- // Only count successful deliveries
- int statusCode = rs.getInt("RESULT");
- if (statusCode >= 200 && statusCode < 300) {
- c.filesdelivered++;
- c.bytesdelivered += clen;
- }
- } else if (type.equalsIgnoreCase("exp")) {
- c.filesexpired++;
- c.bytesexpired += clen;
- }
- }
- }
-
- }
- catch (SQLException sqlException)
- {
- loggerVolumeReport.error("SqlException",sqlException);
- }
- }
-
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");
- try (PrintWriter os = new PrintWriter(outfile)) {
- os.println("date,feedid,filespublished,bytespublished,filesdelivered,bytesdelivered,filesexpired,bytesexpired");
- for(String key :new TreeSet<String>(map.keySet()))
- {
- Counters c = map.get(key);
- String[] p = key.split(":");
- os.println(String.format("%s,%s,%s", p[0], p[1], c.toString()));
- }
- }
- catch (FileNotFoundException e) {
- System.err.println("File cannot be written: " + outfile);
- }
- }
-}
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+
+package org.onap.dmaap.datarouter.reports;
+
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.TreeSet;
+
+import org.apache.log4j.Logger;
+import org.onap.dmaap.datarouter.provisioning.utils.DB;
+
+/**
+ * Generate a traffic volume report. The report is a .csv file containing the following columns:
+ * <table>
+ * <tr><td>date</td><td>the date for this record</td></tr>
+ * <tr><td>feedid</td><td>the Feed ID for this record</td></tr>
+ * <tr><td>filespublished</td><td>the number of files published on this feed and date</td></tr>
+ * <tr><td>bytespublished</td><td>the number of bytes published on this feed and date</td></tr>
+ * <tr><td>filesdelivered</td><td>the number of files delivered on this feed and date</td></tr>
+ * <tr><td>bytesdelivered</td><td>the number of bytes delivered on this feed and date</td></tr>
+ * <tr><td>filesexpired</td><td>the number of files expired on this feed and date</td></tr>
+ * <tr><td>bytesexpired</td><td>the number of bytes expired on this feed and date</td></tr>
+ * </table>
+ *
+ * @author Robert P. Eby
+ * @version $Id: VolumeReport.java,v 1.3 2014/02/28 15:11:13 eby Exp $
+ */
+public class VolumeReport extends ReportBase {
+ private static final String SELECT_SQL = "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT" +
+ " from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?";
+ private Logger loggerVolumeReport=Logger.getLogger("org.onap.dmaap.datarouter.reports");
+ private class Counters {
+ int filespublished, filesdelivered, filesexpired;
+ long bytespublished, bytesdelivered, bytesexpired;
+
+ @Override
+ public String toString() {
+ return String.format("%d,%d,%d,%d,%d,%d",
+ filespublished, bytespublished, filesdelivered,
+ bytesdelivered, filesexpired, bytesexpired);
+ }
+ }
+
+ @Override
+ public void run() {
+ Map<String, Counters> map = new HashMap<String, Counters>();
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
+ long start = System.currentTimeMillis();
+ try {
+ DB db = new DB();
+ @SuppressWarnings("resource")
+ Connection conn = db.getConnection();
+ // We need to run this SELECT in stages, because otherwise we run out of memory!
+ final long stepsize = 6000000L;
+ boolean go_again = true;
+ for (long i = 0; go_again; i += stepsize) {
+ try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {
+ ps.setLong(1, from);
+ ps.setLong(2, to);
+ ps.setLong(3, i);
+ ps.setLong(4, stepsize);
+ try(ResultSet rs = ps.executeQuery()) {
+ go_again = false;
+ while (rs.next()) {
+ go_again = true;
+ long etime = rs.getLong("EVENT_TIME");
+ String type = rs.getString("TYPE");
+ int feed = rs.getInt("FEEDID");
+ long clen = rs.getLong("CONTENT_LENGTH");
+ String key = sdf.format(new Date(etime)) + ":" + feed;
+ Counters c = map.get(key);
+ if (c == null) {
+ c = new Counters();
+ map.put(key, c);
+ }
+ if (type.equalsIgnoreCase("pub")) {
+ c.filespublished++;
+ c.bytespublished += clen;
+ } else if (type.equalsIgnoreCase("del")) {
+ // Only count successful deliveries
+ int statusCode = rs.getInt("RESULT");
+ if (statusCode >= 200 && statusCode < 300) {
+ c.filesdelivered++;
+ c.bytesdelivered += clen;
+ }
+ } else if (type.equalsIgnoreCase("exp")) {
+ c.filesexpired++;
+ c.bytesexpired += clen;
+ }
+ }
+ }
+ }
+ catch (SQLException sqlException)
+ {
+ loggerVolumeReport.error("SqlException",sqlException);
+ }
+ }
+
+ db.release(conn);
+ } catch (SQLException e) {
+ loggerVolumeReport.error("SQLException: " + e.getMessage());
+ }
+ logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");
+ try (PrintWriter os = new PrintWriter(outfile)) {
+ os.println("date,feedid,filespublished,bytespublished,filesdelivered,bytesdelivered,filesexpired,bytesexpired");
+ for(String key :new TreeSet<String>(map.keySet()))
+ {
+ Counters c = map.get(key);
+ String[] p = key.split(":");
+ os.println(String.format("%s,%s,%s", p[0], p[1], c.toString()));
+ }
+ }
+ catch (FileNotFoundException e) {
+ System.err.println("File cannot be written: " + outfile);
+ }
+ }
+}