From 68a9ca240970fceaf12bbe91b7bad8e1d98ecd93 Mon Sep 17 00:00:00 2001 From: efiacor Date: Mon, 21 Oct 2019 13:04:29 +0100 Subject: Refactor Prov DB handling Signed-off-by: efiacor Change-Id: I8610d3a20ca0c11dafb98b161cbeb06c3ff63be6 Issue-ID: DMAAP-1284 --- datarouter-prov/pom.xml | 9 +- .../onap/dmaap/datarouter/authz/Authorizer.java | 4 +- .../dmaap/datarouter/provisioning/BaseServlet.java | 72 +-- .../datarouter/provisioning/InternalServlet.java | 17 +- .../dmaap/datarouter/provisioning/LogServlet.java | 95 ++- .../onap/dmaap/datarouter/provisioning/Main.java | 313 ---------- .../onap/dmaap/datarouter/provisioning/Poker.java | 328 ---------- .../dmaap/datarouter/provisioning/ProvRunner.java | 331 ++++++++++ .../datarouter/provisioning/ProxyServlet.java | 13 +- .../datarouter/provisioning/PublishServlet.java | 5 +- .../datarouter/provisioning/StatisticsServlet.java | 101 ++- .../provisioning/SubscriptionServlet.java | 1 + .../datarouter/provisioning/SynchronizerTask.java | 688 --------------------- .../datarouter/provisioning/beans/Deleteable.java | 2 +- .../datarouter/provisioning/beans/EgressRoute.java | 43 +- .../dmaap/datarouter/provisioning/beans/Feed.java | 370 +++++------ .../dmaap/datarouter/provisioning/beans/Group.java | 112 +--- .../provisioning/beans/IngressRoute.java | 72 +-- .../datarouter/provisioning/beans/LogRecord.java | 89 ++- .../provisioning/beans/NetworkRoute.java | 32 +- .../datarouter/provisioning/beans/NodeClass.java | 61 +- .../datarouter/provisioning/beans/Parameters.java | 45 +- .../provisioning/beans/Subscription.java | 159 ++--- .../dmaap/datarouter/provisioning/utils/DB.java | 272 -------- .../datarouter/provisioning/utils/DRRouteCLI.java | 3 +- .../provisioning/utils/LogfileLoader.java | 68 +- .../provisioning/utils/PasswordProcessor.java | 3 +- .../dmaap/datarouter/provisioning/utils/Poker.java | 330 ++++++++++ .../datarouter/provisioning/utils/ProvDbUtils.java | 168 +++++ .../provisioning/utils/PurgeLogDirTask.java | 3 +- .../provisioning/utils/SynchronizerTask.java | 683 ++++++++++++++++++++ .../datarouter/reports/DailyLatencyReport.java | 52 +- .../onap/dmaap/datarouter/reports/FeedReport.java | 148 ++--- .../dmaap/datarouter/reports/LatencyReport.java | 21 +- .../dmaap/datarouter/reports/SubscriberReport.java | 72 +-- .../dmaap/datarouter/reports/VolumeReport.java | 44 +- .../src/main/resources/docker/startup.sh | 2 +- datarouter-prov/src/main/resources/misc/drtrprov | 2 +- .../dmaap/datarouter/authz/impl/ProvAuthTest.java | 4 - .../datarouter/provisioning/BaseServletTest.java | 2 - .../provisioning/DRFeedsServletTest.java | 52 +- .../datarouter/provisioning/DrServletTestBase.java | 5 +- .../datarouter/provisioning/FeedServletTest.java | 63 +- .../datarouter/provisioning/GroupServletTest.java | 9 +- .../provisioning/InternalServletTest.java | 1 + .../datarouter/provisioning/LogServletTest.java | 26 +- .../datarouter/provisioning/ProxyServletTest.java | 1 + .../provisioning/PublishServletTest.java | 7 +- .../provisioning/StatisticsServletTest.java | 10 - .../provisioning/SubscribeServletTest.java | 20 +- .../provisioning/SubscriptionServletTest.java | 36 +- .../provisioning/SynchronizerTaskTest.java | 1 + .../provisioning/beans/EgressRouteTest.java | 11 +- .../datarouter/provisioning/beans/FeedTest.java | 48 +- .../datarouter/provisioning/beans/GroupTest.java | 53 +- .../provisioning/beans/IngressRouteTest.java | 2 +- .../provisioning/beans/LogRecordTest.java | 38 +- .../provisioning/beans/NetworkRouteTest.java | 21 +- .../provisioning/beans/SubscriptionTest.java | 3 - .../datarouter/provisioning/utils/DbTest.java | 4 +- 60 files changed, 2399 insertions(+), 2851 deletions(-) delete mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Main.java delete mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Poker.java create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProvRunner.java delete mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java delete mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/Poker.java create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/ProvDbUtils.java create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/SynchronizerTask.java (limited to 'datarouter-prov') diff --git a/datarouter-prov/pom.xml b/datarouter-prov/pom.xml index e141595e..20ce7ce3 100755 --- a/datarouter-prov/pom.xml +++ b/datarouter-prov/pom.xml @@ -51,11 +51,12 @@ org.mariadb.jdbc mariadb-java-client - 2.3.0 + 2.5.0 - org.hamcrest - hamcrest-library + org.apache.commons + commons-dbcp2 + 2.7.0 org.slf4j @@ -279,7 +280,7 @@ true - org.onap.dmaap.datarouter.provisioning.Main + org.onap.dmaap.datarouter.provisioning.ProvRunner diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/Authorizer.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/Authorizer.java index c932f915..fb62f192 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/Authorizer.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/Authorizer.java @@ -46,7 +46,7 @@ public interface Authorizer { * permit/deny decision for the request and (after R1) supplemental information related to the response in the form * of advice and obligations. */ - public AuthorizationResponse decide(HttpServletRequest request); + AuthorizationResponse decide(HttpServletRequest request); /** * Determine if the API request carried in the request parameter,with additional attributes provided in @@ -58,5 +58,5 @@ public interface Authorizer { * permit/deny decision for the request and (after R1) supplemental information related to the response * in the form of advice and obligations. */ - public AuthorizationResponse decide(HttpServletRequest request, Map additionalAttrs); + AuthorizationResponse decide(HttpServletRequest request, Map additionalAttrs); } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java index f8b5934e..c0290bbb 100755 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java @@ -66,8 +66,10 @@ import org.onap.dmaap.datarouter.provisioning.beans.NodeClass; import org.onap.dmaap.datarouter.provisioning.beans.Parameters; import org.onap.dmaap.datarouter.provisioning.beans.Subscription; import org.onap.dmaap.datarouter.provisioning.beans.Updateable; -import org.onap.dmaap.datarouter.provisioning.utils.DB; import org.onap.dmaap.datarouter.provisioning.utils.PasswordProcessor; +import org.onap.dmaap.datarouter.provisioning.utils.Poker; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; +import org.onap.dmaap.datarouter.provisioning.utils.SynchronizerTask; import org.onap.dmaap.datarouter.provisioning.utils.ThrottleFilter; import org.slf4j.MDC; @@ -110,21 +112,19 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { public static final String SUBFULL_CONTENT_TYPE = "application/vnd.dmaap-dr.subscription-full; version=2.0"; static final String SUBLIST_CONTENT_TYPE = "application/vnd.dmaap-dr.subscription-list; version=1.0"; - //Adding groups functionality, ...1610 static final String GROUP_BASECONTENT_TYPE = "application/vnd.dmaap-dr.group"; static final String GROUP_CONTENT_TYPE = "application/vnd.dmaap-dr.group; version=2.0"; static final String GROUPFULL_CONTENT_TYPE = "application/vnd.dmaap-dr.group-full; version=2.0"; public static final String GROUPLIST_CONTENT_TYPE = "application/vnd.dmaap-dr.fegrouped-list; version=1.0"; - public static final String LOGLIST_CONTENT_TYPE = "application/vnd.dmaap-dr.log-list; version=1.0"; public static final String PROVFULL_CONTENT_TYPE1 = "application/vnd.dmaap-dr.provfeed-full; version=1.0"; public static final String PROVFULL_CONTENT_TYPE2 = "application/vnd.dmaap-dr.provfeed-full; version=2.0"; public static final String CERT_ATTRIBUTE = "javax.servlet.request.X509Certificate"; static final String DB_PROBLEM_MSG = "There has been a problem with the DB. It is suggested you " - + "try the operation again."; + + "try the operation again."; private static final int DEFAULT_MAX_FEEDS = 10000; private static final int DEFAULT_MAX_SUBS = 100000; @@ -143,7 +143,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { public static final String API = "/api/"; static final String LOGS = "/logs/"; - static final String TEXT_CT = "text/plain"; + public static final String TEXT_CT = "text/plain"; static final String INGRESS = "/ingress/"; static final String EGRESS = "/egress/"; static final String NETWORK = "/network/"; @@ -157,7 +157,6 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { static final String END_TIME = "end_time"; static final String REASON_SQL = "reasonSQL"; - /** * A boolean to trigger one time "provisioning changed" event on startup. */ @@ -259,25 +258,31 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { private InetAddress loopback; //DMAAP-597 (Tech Dept) REST request source IP auth relaxation to accommodate OOM kubernetes deploy - private static String isAddressAuthEnabled = (new DB()).getProperties() - .getProperty("org.onap.dmaap.datarouter.provserver.isaddressauthenabled", "false"); + private static String isAddressAuthEnabled = ProvRunner.getProvProperties() + .getProperty("org.onap.dmaap.datarouter.provserver.isaddressauthenabled", "false"); - static String isCadiEnabled = (new DB()).getProperties() - .getProperty("org.onap.dmaap.datarouter.provserver.cadi.enabled", "false"); + static String isCadiEnabled = ProvRunner.getProvProperties() + .getProperty("org.onap.dmaap.datarouter.provserver.cadi.enabled", "false"); /** * Initialize data common to all the provisioning server servlets. */ protected BaseServlet() { + setUpFields(); + if (authz == null) { + authz = new ProvAuthorizer(this); + } + String name = this.getClass().getName(); + intlogger.info("PROV0002 Servlet " + name + " started."); + } + + private static void setUpFields() { if (eventlogger == null) { eventlogger = EELFManager.getInstance().getLogger("EventLog"); } if (intlogger == null) { intlogger = EELFManager.getInstance().getLogger("InternalLog"); } - if (authz == null) { - authz = new ProvAuthorizer(this); - } if (startmsgFlag) { startmsgFlag = false; provisioningParametersChanged(); @@ -285,8 +290,6 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { if (synctask == null) { synctask = SynchronizerTask.getSynchronizer(); } - String name = this.getClass().getName(); - intlogger.info("PROV0002 Servlet " + name + " started."); } @Override @@ -359,7 +362,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { } private static void processPassword(String maskKey, boolean action, JSONArray endpointIds, int index, - String password) { + String password) { try { if (action) { endpointIds.getJSONObject(index).put(maskKey, PasswordProcessor.encrypt(password)); @@ -511,7 +514,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { * Something has changed in the provisioning data. Start the timers that will cause the pre-packaged JSON string to * be regenerated, and cause nodes and the other provisioning server to be notified. */ - static void provisioningDataChanged() { + public static void provisioningDataChanged() { long now = System.currentTimeMillis(); Poker pkr = Poker.getPoker(); pkr.setTimers(now + (pokeTimer1 * 1000L), now + (pokeTimer2 * 1000L)); @@ -520,7 +523,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { /** * Something in the parameters has changed, reload all parameters from the DB. */ - static void provisioningParametersChanged() { + public static void provisioningParametersChanged() { Map map = Parameters.getParameters(); requireSecure = getBoolean(map, Parameters.PROV_REQUIRE_SECURE); requireCert = getBoolean(map, Parameters.PROV_REQUIRE_CERT); @@ -600,7 +603,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { return provName; } - static String getActiveProvName() { + public static String getActiveProvName() { return activeProvName; } @@ -670,18 +673,11 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { */ protected boolean doInsert(Insertable bean) { boolean rv; - DB db = new DB(); - Connection conn = null; - try { - conn = db.getConnection(); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { rv = bean.doInsert(conn); } catch (SQLException e) { rv = false; intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e); - } finally { - if (conn != null) { - db.release(conn); - } } return rv; } @@ -694,18 +690,11 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { */ protected boolean doUpdate(Updateable bean) { boolean rv; - DB db = new DB(); - Connection conn = null; - try { - conn = db.getConnection(); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { rv = bean.doUpdate(conn); } catch (SQLException e) { rv = false; intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e); - } finally { - if (conn != null) { - db.release(conn); - } } return rv; } @@ -718,18 +707,11 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { */ protected boolean doDelete(Deleteable bean) { boolean rv; - DB db = new DB(); - Connection conn = null; - try { - conn = db.getConnection(); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { rv = bean.doDelete(conn); } catch (SQLException e) { rv = false; intlogger.warn("PROV0007 doDelete: " + e.getMessage(), e); - } finally { - if (conn != null) { - db.release(conn); - } } return rv; } @@ -987,7 +969,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { */ String getFeedPermission(String aafInstance, String userAction) { try { - Properties props = (new DB()).getProperties(); + Properties props = ProvRunner.getProvProperties(); String type = props.getProperty(AAF_CADI_FEED_TYPE, AAF_CADI_FEED); String action; switch (userAction) { @@ -1031,7 +1013,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider { */ String getSubscriberPermission(String aafInstance, String userAction) { try { - Properties props = (new DB()).getProperties(); + Properties props = ProvRunner.getProvProperties(); String type = props.getProperty(AAF_CADI_SUB_TYPE, AAF_CADI_SUB); String action; switch (userAction) { diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/InternalServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/InternalServlet.java index 12bd6ef4..4732183a 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/InternalServlet.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/InternalServlet.java @@ -44,11 +44,12 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.json.JSONArray; +import org.onap.dmaap.datarouter.provisioning.utils.Poker; +import org.onap.dmaap.datarouter.provisioning.utils.SynchronizerTask; import org.onap.dmaap.datarouter.provisioning.beans.EventLogRecord; import org.onap.dmaap.datarouter.provisioning.beans.LogRecord; import org.onap.dmaap.datarouter.provisioning.beans.Parameters; import org.onap.dmaap.datarouter.provisioning.eelf.EelfMsgs; -import org.onap.dmaap.datarouter.provisioning.utils.DB; import org.onap.dmaap.datarouter.provisioning.utils.LogfileLoader; import org.onap.dmaap.datarouter.provisioning.utils.RLEBitSet; @@ -130,7 +131,7 @@ import org.onap.dmaap.datarouter.provisioning.utils.RLEBitSet; * /internal/route/* * * * URLs under this path are handled via the - * {@link org.onap.dmaap.datarouter.provisioning.RouteServlet} + * {@link RouteServlet} * * * @@ -231,14 +232,14 @@ public class InternalServlet extends ProxyServlet { eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER), getIdFromPath(req) + ""); String path = req.getPathInfo(); - Properties props = (new DB()).getProperties(); + Properties props = ProvRunner.getProvProperties(); if ("/halt".equals(path) && !req.isSecure()) { // request to halt the server - can ONLY come from localhost String remote = req.getRemoteAddr(); if (remote.equals(props.getProperty("org.onap.dmaap.datarouter.provserver.localhost"))) { intlogger.info("PROV0009 Request to HALT received."); resp.setStatus(HttpServletResponse.SC_OK); - Main.shutdown(); + ProvRunner.shutdown(); } else { intlogger.info("PROV0010 Disallowed request to HALT received from " + remote); resp.setStatus(HttpServletResponse.SC_FORBIDDEN); @@ -293,7 +294,7 @@ public class InternalServlet extends ProxyServlet { String logdir = props.getProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir"); String logfile = path.substring(6); if (logdir != null && logfile != null && logfile.indexOf('/') < 0) { - File log = new File(logdir + "/" + logfile); + File log = new File(logdir + File.separator + logfile); if (log.exists() && log.isFile()) { resp.setStatus(HttpServletResponse.SC_OK); resp.setContentType(TEXT_CT); @@ -461,7 +462,7 @@ public class InternalServlet extends ProxyServlet { return; } String spooldir = - (new DB()).getProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir"); + ProvRunner.getProvProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir"); String spoolname = String.format("%d-%d-", System.currentTimeMillis(), Thread.currentThread().getId()); synchronized (lock) { // perhaps unnecessary, but it helps make the name unique @@ -496,7 +497,7 @@ public class InternalServlet extends ProxyServlet { } catch (Exception e) { intlogger.error("PROV0137 InternalServlet.doPost: " + e.getMessage(), e); } - if (((avail * 100) / total) < 5) { + if (total != 0 && ((avail * 100) / total) < 5) { elr.setResult(HttpServletResponse.SC_SERVICE_UNAVAILABLE); resp.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE); eventlogger.error(elr.toString()); @@ -570,7 +571,7 @@ public class InternalServlet extends ProxyServlet { private JSONArray generateLogfileList() { JSONArray ja = new JSONArray(); - Properties prop = (new DB()).getProperties(); + Properties prop = ProvRunner.getProvProperties(); String str = prop.getProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir"); if (str != null) { String[] dirs = str.split(","); diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/LogServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/LogServlet.java index 0bea9f40..9cde4804 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/LogServlet.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/LogServlet.java @@ -30,9 +30,9 @@ import com.att.eelf.configuration.EELFLogger; import com.att.eelf.configuration.EELFManager; import java.io.IOException; import java.sql.Connection; +import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; @@ -41,7 +41,6 @@ import java.util.Map; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; - import org.onap.dmaap.datarouter.provisioning.beans.DeliveryRecord; import org.onap.dmaap.datarouter.provisioning.beans.EventLogRecord; import org.onap.dmaap.datarouter.provisioning.beans.ExpiryRecord; @@ -49,11 +48,8 @@ import org.onap.dmaap.datarouter.provisioning.beans.LOGJSONable; import org.onap.dmaap.datarouter.provisioning.beans.PublishRecord; import org.onap.dmaap.datarouter.provisioning.beans.Subscription; import org.onap.dmaap.datarouter.provisioning.eelf.EelfMsgs; -import org.onap.dmaap.datarouter.provisioning.utils.DB; import org.onap.dmaap.datarouter.provisioning.utils.LOGJSONObject; - - - +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** @@ -80,7 +76,7 @@ public class LogServlet extends BaseServlet { private final boolean isfeedlog; - public abstract class RowHandler { + public abstract static class RowHandler { private final ServletOutputStream out; private final String[] fields; private boolean firstrow; @@ -91,7 +87,7 @@ public class LogServlet extends BaseServlet { * @param fieldparam String field * @param bool boolean */ - public RowHandler(ServletOutputStream out, String fieldparam, boolean bool) { + RowHandler(ServletOutputStream out, String fieldparam, boolean bool) { this.out = out; this.firstrow = bool; this.fields = (fieldparam != null) ? fieldparam.split(":") : null; @@ -101,7 +97,7 @@ public class LogServlet extends BaseServlet { * Handling row from DB. * @param rs DB Resultset */ - public void handleRow(ResultSet rs) { + void handleRow(ResultSet rs) { try { LOGJSONable js = buildJSONable(rs); LOGJSONObject jo = js.asJSONObject(); @@ -128,8 +124,8 @@ public class LogServlet extends BaseServlet { public abstract LOGJSONable buildJSONable(ResultSet rs) throws SQLException; } - public class PublishRecordRowHandler extends RowHandler { - public PublishRecordRowHandler(ServletOutputStream out, String fields, boolean bool) { + public static class PublishRecordRowHandler extends RowHandler { + PublishRecordRowHandler(ServletOutputStream out, String fields, boolean bool) { super(out, fields, bool); } @@ -139,8 +135,8 @@ public class LogServlet extends BaseServlet { } } - public class DeliveryRecordRowHandler extends RowHandler { - public DeliveryRecordRowHandler(ServletOutputStream out, String fields, boolean bool) { + public static class DeliveryRecordRowHandler extends RowHandler { + DeliveryRecordRowHandler(ServletOutputStream out, String fields, boolean bool) { super(out, fields, bool); } @@ -150,8 +146,8 @@ public class LogServlet extends BaseServlet { } } - public class ExpiryRecordRowHandler extends RowHandler { - public ExpiryRecordRowHandler(ServletOutputStream out, String fields, boolean bool) { + public static class ExpiryRecordRowHandler extends RowHandler { + ExpiryRecordRowHandler(ServletOutputStream out, String fields, boolean bool) { super(out, fields, bool); } @@ -163,10 +159,9 @@ public class LogServlet extends BaseServlet { /** * This class must be created from either a {@link FeedLogServlet} or a {@link SubLogServlet}. - * @param isFeedLog boolean to handle those places where a feedlog request is different from - * a sublog request + * @param isFeedLog boolean to handle those places where a feedlog request is different from a sublog request */ - protected LogServlet(boolean isFeedLog) { + LogServlet(boolean isFeedLog) { this.isfeedlog = isFeedLog; } @@ -179,7 +174,7 @@ public class LogServlet extends BaseServlet { eelfLogger.info(EelfMsgs.ENTRY); try { eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, - req.getHeader(BEHALF_HEADER), getIdFromPath(req) + ""); + req.getHeader(BEHALF_HEADER), getIdFromPath(req) + ""); String message = "DELETE not allowed for the logURL."; EventLogRecord elr = new EventLogRecord(req); elr.setMessage(message); @@ -201,42 +196,36 @@ public class LogServlet extends BaseServlet { eelfLogger.info(EelfMsgs.ENTRY); try { eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, - req.getHeader(BEHALF_HEADER), getIdFromPath(req) + ""); + req.getHeader(BEHALF_HEADER), getIdFromPath(req) + ""); int id = getIdFromPath(req); if (id < 0) { sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, - "Missing or bad feed/subscription number.", eventlogger); + "Missing or bad feed/subscription number.", eventlogger); return; } Map map = buildMapFromRequest(req); if (map.get("err") != null) { sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, - "Invalid arguments: " + map.get("err"), eventlogger); + "Invalid arguments: " + map.get("err"), eventlogger); return; } // check Accept: header?? - resp.setStatus(HttpServletResponse.SC_OK); resp.setContentType(LOGLIST_CONTENT_TYPE); - try (ServletOutputStream out = resp.getOutputStream()) { final String fields = req.getParameter("fields"); - out.print("["); if (isfeedlog) { // Handle /feedlog/feedid request boolean firstrow = true; - // 1. Collect publish records for this feed RowHandler rh = new PublishRecordRowHandler(out, fields, firstrow); getPublishRecordsForFeed(id, rh, map); firstrow = rh.firstrow; - // 2. Collect delivery records for subscriptions to this feed rh = new DeliveryRecordRowHandler(out, fields, firstrow); getDeliveryRecordsForFeed(id, rh, map); firstrow = rh.firstrow; - // 3. Collect expiry records for subscriptions to this feed rh = new ExpiryRecordRowHandler(out, fields, firstrow); getExpiryRecordsForFeed(id, rh, map); @@ -247,11 +236,9 @@ public class LogServlet extends BaseServlet { // 1. Collect publish records for the feed this subscription feeds RowHandler rh = new PublishRecordRowHandler(out, fields, true); getPublishRecordsForFeed(sub.getFeedid(), rh, map); - // 2. Collect delivery records for this subscription rh = new DeliveryRecordRowHandler(out, fields, rh.firstrow); getDeliveryRecordsForSubscription(id, rh, map); - // 3. Collect expiry records for this subscription rh = new ExpiryRecordRowHandler(out, fields, rh.firstrow); getExpiryRecordsForSubscription(id, rh, map); @@ -275,7 +262,7 @@ public class LogServlet extends BaseServlet { eelfLogger.info(EelfMsgs.ENTRY); try { eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, - req.getHeader(BEHALF_HEADER),getIdFromPath(req) + ""); + req.getHeader(BEHALF_HEADER),getIdFromPath(req) + ""); String message = "PUT not allowed for the logURL."; EventLogRecord elr = new EventLogRecord(req); elr.setMessage(message); @@ -375,17 +362,22 @@ public class LogServlet extends BaseServlet { str = req.getParameter("expiryReason"); if (str != null) { map.put("type", "exp"); - if ("notRetryable".equals(str)) { - map.put(REASON_SQL, " AND REASON = 'notRetryable'"); - } else if ("retriesExhausted".equals(str)) { - map.put(REASON_SQL, " AND REASON = 'retriesExhausted'"); - } else if ("diskFull".equals(str)) { - map.put(REASON_SQL, " AND REASON = 'diskFull'"); - } else if ("other".equals(str)) { - map.put(REASON_SQL, " AND REASON = 'other'"); - } else { - map.put("err", "bad expiryReason"); - return map; + switch (str) { + case "notRetryable": + map.put(REASON_SQL, " AND REASON = 'notRetryable'"); + break; + case "retriesExhausted": + map.put(REASON_SQL, " AND REASON = 'retriesExhausted'"); + break; + case "diskFull": + map.put(REASON_SQL, " AND REASON = 'diskFull'"); + break; + case "other": + map.put(REASON_SQL, " AND REASON = 'other'"); + break; + default: + map.put("err", "bad expiryReason"); + return map; } } @@ -493,23 +485,14 @@ public class LogServlet extends BaseServlet { private void getRecordsForSQL(String sql, RowHandler rh) { intlogger.debug(sql); long start = System.currentTimeMillis(); - DB db = new DB(); - Connection conn = null; - try { - conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery(sql)) { - while (rs.next()) { - rh.handleRow(rs); - } - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement(sql); + ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + rh.handleRow(rs); } } catch (SQLException sqlException) { intlogger.info("Failed to get Records. Exception = " + sqlException.getMessage(),sqlException); - } finally { - if (conn != null) { - db.release(conn); - } } intlogger.debug("Time: " + (System.currentTimeMillis() - start) + " ms"); } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Main.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Main.java deleted file mode 100644 index 3269c843..00000000 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Main.java +++ /dev/null @@ -1,313 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.provisioning; - -import static java.lang.System.exit; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.File; -import java.io.IOException; -import java.security.Security; -import java.util.EnumSet; -import java.util.Properties; -import java.util.Timer; -import javax.servlet.DispatcherType; -import org.eclipse.jetty.http.HttpVersion; -import org.eclipse.jetty.server.Connector; -import org.eclipse.jetty.server.Handler; -import org.eclipse.jetty.server.HttpConfiguration; -import org.eclipse.jetty.server.HttpConnectionFactory; -import org.eclipse.jetty.server.NCSARequestLog; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.server.ServerConnector; -import org.eclipse.jetty.server.SslConnectionFactory; -import org.eclipse.jetty.server.handler.ContextHandlerCollection; -import org.eclipse.jetty.server.handler.DefaultHandler; -import org.eclipse.jetty.server.handler.HandlerCollection; -import org.eclipse.jetty.server.handler.RequestLogHandler; -import org.eclipse.jetty.servlet.FilterHolder; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; -import org.eclipse.jetty.util.ssl.SslContextFactory; -import org.eclipse.jetty.util.thread.QueuedThreadPool; -import org.onap.dmaap.datarouter.provisioning.utils.AafPropsUtils; -import org.onap.dmaap.datarouter.provisioning.utils.DB; -import org.onap.dmaap.datarouter.provisioning.utils.DRProvCadiFilter; -import org.onap.dmaap.datarouter.provisioning.utils.LogfileLoader; -import org.onap.dmaap.datarouter.provisioning.utils.PurgeLogDirTask; -import org.onap.dmaap.datarouter.provisioning.utils.ThrottleFilter; - -/** - *

- * A main class which may be used to start the provisioning server with an "embedded" Jetty server. Configuration is - * done via the properties file provserver.properties, which should be in the CLASSPATH. The provisioning server - * may also be packaged with a web.xml and started as a traditional webapp. - *

- *

- * Most of the work of the provisioning server is carried out within the eight servlets (configured below) that are used - * to handle each of the eight types of requests the server may receive. In addition, there are background threads - * started to perform other tasks: - *

- *
    - *
  • One background Thread runs the {@link LogfileLoader} in order to process incoming logfiles. - * This Thread is created as a side effect of the first successful POST to the /internal/logs/ servlet.
  • - *
  • One background Thread runs the {@link SynchronizerTask} which is used to periodically - * synchronize the database between active and standby servers.
  • - *
  • One background Thread runs the {@link Poker} which is used to notify the nodes whenever - * provisioning data changes.
  • - *
  • One task is run once a day to run {@link PurgeLogDirTask} which purges older logs from the - * /opt/app/datartr/logs directory.
  • - *
- *

- * The provisioning server is stopped by issuing a GET to the URL http://127.0.0.1/internal/halt using curl or - * some other such tool. - *

- * - * @author Robert Eby - * @version $Id: Main.java,v 1.12 2014/03/12 19:45:41 eby Exp $ - */ -public class Main { - - public static final EELFLogger intlogger = EELFManager.getInstance() - .getLogger("org.onap.dmaap.datarouter.provisioning.internal"); - - /** - * The one and only {@link Server} instance in this JVM. - */ - private static Server server; - static AafPropsUtils aafPropsUtils; - - /** - * Starts the Data Router Provisioning server. - * - * @param args not used - * @throws Exception if Jetty has a problem starting - */ - public static void main(String[] args) throws Exception { - Security.setProperty("networkaddress.cache.ttl", "4"); - // Check DB is accessible and contains the expected tables - if (!checkDatabase()) { - intlogger.error("Data Router Provisioning database init failure. Exiting."); - exit(1); - } - - intlogger.info("PROV0000 **** Data Router Provisioning Server starting...."); - - Security.setProperty("networkaddress.cache.ttl", "4"); - Properties provProperties = (new DB()).getProperties(); - int httpPort = Integer.parseInt(provProperties - .getProperty("org.onap.dmaap.datarouter.provserver.http.port", "8080")); - final int httpsPort = Integer.parseInt(provProperties - .getProperty("org.onap.dmaap.datarouter.provserver.https.port", "8443")); - - // Server's thread pool - QueuedThreadPool queuedThreadPool = new QueuedThreadPool(); - queuedThreadPool.setMinThreads(10); - queuedThreadPool.setMaxThreads(200); - queuedThreadPool.setDetailedDump(false); - - // The server itself - server = new Server(queuedThreadPool); - server.setStopAtShutdown(true); - server.setStopTimeout(5000); - server.setDumpAfterStart(false); - server.setDumpBeforeStop(false); - - // Request log configuration - NCSARequestLog ncsaRequestLog = new NCSARequestLog(); - ncsaRequestLog.setFilename(provProperties - .getProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir") - + "/request.log.yyyy_mm_dd"); - ncsaRequestLog.setFilenameDateFormat("yyyyMMdd"); - ncsaRequestLog.setRetainDays(90); - ncsaRequestLog.setAppend(true); - ncsaRequestLog.setExtended(false); - ncsaRequestLog.setLogCookies(false); - ncsaRequestLog.setLogTimeZone("GMT"); - - RequestLogHandler requestLogHandler = new RequestLogHandler(); - requestLogHandler.setRequestLog(ncsaRequestLog); - server.setRequestLog(ncsaRequestLog); - - // HTTP configuration - HttpConfiguration httpConfiguration = new HttpConfiguration(); - httpConfiguration.setSecureScheme("https"); - httpConfiguration.setSecurePort(httpsPort); - httpConfiguration.setOutputBufferSize(32768); - httpConfiguration.setRequestHeaderSize(8192); - httpConfiguration.setResponseHeaderSize(8192); - httpConfiguration.setSendServerVersion(true); - httpConfiguration.setSendDateHeader(false); - - try { - AafPropsUtils.init(new File(provProperties.getProperty( - "org.onap.dmaap.datarouter.provserver.aafprops.path", - "/opt/app/osaaf/local/org.onap.dmaap-dr.props"))); - } catch (IOException e) { - intlogger.error("NODE0314 Failed to load AAF props. Exiting", e); - exit(1); - } - aafPropsUtils = AafPropsUtils.getInstance(); - - //HTTP Connector - HandlerCollection handlerCollection; - try (ServerConnector httpServerConnector = - new ServerConnector(server, new HttpConnectionFactory(httpConfiguration))) { - httpServerConnector.setPort(httpPort); - httpServerConnector.setAcceptQueueSize(2); - httpServerConnector.setIdleTimeout(300000); - - // SSL Context - SslContextFactory sslContextFactory = new SslContextFactory(); - sslContextFactory.setKeyStoreType(AafPropsUtils.KEYSTORE_TYPE_PROPERTY); - sslContextFactory.setKeyStorePath(aafPropsUtils.getKeystorePathProperty()); - sslContextFactory.setKeyStorePassword(aafPropsUtils.getKeystorePassProperty()); - sslContextFactory.setKeyManagerPassword(aafPropsUtils.getKeystorePassProperty()); - - String truststorePathProperty = aafPropsUtils.getTruststorePathProperty(); - if (truststorePathProperty != null && truststorePathProperty.length() > 0) { - intlogger.info("@@ TS -> " + truststorePathProperty); - sslContextFactory.setTrustStoreType(AafPropsUtils.TRUESTSTORE_TYPE_PROPERTY); - sslContextFactory.setTrustStorePath(truststorePathProperty); - sslContextFactory.setTrustStorePassword(aafPropsUtils.getTruststorePassProperty()); - } else { - sslContextFactory.setTrustStorePath(AafPropsUtils.DEFAULT_TRUSTSTORE); - sslContextFactory.setTrustStorePassword("changeit"); - } - - sslContextFactory.setWantClientAuth(true); - sslContextFactory.setExcludeCipherSuites( - "SSL_RSA_WITH_DES_CBC_SHA", - "SSL_DHE_RSA_WITH_DES_CBC_SHA", - "SSL_DHE_DSS_WITH_DES_CBC_SHA", - "SSL_RSA_EXPORT_WITH_RC4_40_MD5", - "SSL_RSA_EXPORT_WITH_DES40_CBC_SHA", - "SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA", - "SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA" - ); - sslContextFactory.addExcludeProtocols("SSLv3"); - sslContextFactory.setIncludeProtocols(provProperties.getProperty( - "org.onap.dmaap.datarouter.provserver.https.include.protocols", - "TLSv1.1|TLSv1.2").trim().split("\\|")); - - intlogger.info("Not supported protocols prov server:-" - + String.join(",", sslContextFactory.getExcludeProtocols())); - intlogger.info("Supported protocols prov server:-" - + String.join(",", sslContextFactory.getIncludeProtocols())); - intlogger.info("Not supported ciphers prov server:-" - + String.join(",", sslContextFactory.getExcludeCipherSuites())); - intlogger.info("Supported ciphers prov server:-" - + String.join(",", sslContextFactory.getIncludeCipherSuites())); - - // HTTPS configuration - HttpConfiguration httpsConfiguration = new HttpConfiguration(httpConfiguration); - httpsConfiguration.setRequestHeaderSize(8192); - - // HTTPS connector - try (ServerConnector httpsServerConnector = new ServerConnector(server, - new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.asString()), - new HttpConnectionFactory(httpsConfiguration))) { - - httpsServerConnector.setPort(httpsPort); - httpsServerConnector.setIdleTimeout(30000); - httpsServerConnector.setAcceptQueueSize(2); - - // Servlet and Filter configuration - ServletContextHandler servletContextHandler = new ServletContextHandler(0); - servletContextHandler.setContextPath("/"); - servletContextHandler.addServlet(new ServletHolder(new FeedServlet()), "/feed/*"); - servletContextHandler.addServlet(new ServletHolder(new FeedLogServlet()), "/feedlog/*"); - servletContextHandler.addServlet(new ServletHolder(new PublishServlet()), "/publish/*"); - servletContextHandler.addServlet(new ServletHolder(new SubscribeServlet()), "/subscribe/*"); - servletContextHandler.addServlet(new ServletHolder(new StatisticsServlet()), "/statistics/*"); - servletContextHandler.addServlet(new ServletHolder(new SubLogServlet()), "/sublog/*"); - servletContextHandler.addServlet(new ServletHolder(new GroupServlet()), "/group/*"); - servletContextHandler.addServlet(new ServletHolder(new SubscriptionServlet()), "/subs/*"); - servletContextHandler.addServlet(new ServletHolder(new InternalServlet()), "/internal/*"); - servletContextHandler.addServlet(new ServletHolder(new RouteServlet()), "/internal/route/*"); - servletContextHandler.addServlet(new ServletHolder(new DRFeedsServlet()), "/"); - servletContextHandler.addFilter(new FilterHolder(new ThrottleFilter()), - "/publish/*", EnumSet.of(DispatcherType.REQUEST)); - - //CADI Filter activation check - if (Boolean.parseBoolean(provProperties.getProperty( - "org.onap.dmaap.datarouter.provserver.cadi.enabled", "false"))) { - servletContextHandler.addFilter(new FilterHolder(new DRProvCadiFilter(true, aafPropsUtils.getPropAccess())), - "/*", EnumSet.of(DispatcherType.REQUEST)); - intlogger.info("PROV0001 AAF CADI Auth enabled for "); - } - - ContextHandlerCollection contextHandlerCollection = new ContextHandlerCollection(); - contextHandlerCollection.addHandler(servletContextHandler); - - // Server's Handler collection - handlerCollection = new HandlerCollection(); - handlerCollection.setHandlers(new Handler[]{contextHandlerCollection, new DefaultHandler()}); - handlerCollection.addHandler(requestLogHandler); - - server.setConnectors(new Connector[]{httpServerConnector, httpsServerConnector}); - } - } - server.setHandler(handlerCollection); - - // Daemon to clean up the log directory on a daily basis - Timer rolex = new Timer(); - rolex.scheduleAtFixedRate(new PurgeLogDirTask(), 0, 86400000L); // run once per day - - // Start LogfileLoader - LogfileLoader.getLoader(); - - try { - server.start(); - intlogger.info("Prov Server started-" + server.getState()); - } catch (Exception e) { - intlogger.error("Jetty failed to start. Exiting: " + e.getMessage(), e); - exit(1); - } - server.join(); - intlogger.info("PROV0001 **** AT&T Data Router Provisioning Server halted."); - } - - private static boolean checkDatabase() { - DB db = new DB(); - return db.runRetroFits(); - } - - /** - * Stop the Jetty server. - */ - static void shutdown() { - new Thread(() -> { - try { - server.stop(); - Thread.sleep(5000L); - exit(0); - } catch (Exception e) { - intlogger.error("Exception in Main.shutdown(): " + e.getMessage(), e); - } - }); - } -} diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Poker.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Poker.java deleted file mode 100644 index 5f421b0f..00000000 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Poker.java +++ /dev/null @@ -1,328 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - -package org.onap.dmaap.datarouter.provisioning; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.IOException; -import java.net.HttpURLConnection; -import java.net.InetAddress; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.UnknownHostException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.Timer; -import java.util.TimerTask; -import java.util.TreeSet; -import org.json.JSONException; -import org.json.JSONObject; -import org.json.JSONTokener; - -import org.onap.dmaap.datarouter.provisioning.beans.EgressRoute; -import org.onap.dmaap.datarouter.provisioning.beans.Feed; -import org.onap.dmaap.datarouter.provisioning.beans.Group; -import org.onap.dmaap.datarouter.provisioning.beans.IngressRoute; -import org.onap.dmaap.datarouter.provisioning.beans.NetworkRoute; -import org.onap.dmaap.datarouter.provisioning.beans.Parameters; -import org.onap.dmaap.datarouter.provisioning.beans.Subscription; -import org.onap.dmaap.datarouter.provisioning.utils.DB; - -/** - * This class handles the two timers (described in R1 Design Notes), and takes care of issuing the GET to each node of - * the URL to "poke". - * - * @author Robert Eby - * @version $Id: Poker.java,v 1.11 2014/01/08 16:13:47 eby Exp $ - */ - -public class Poker extends TimerTask { - - /** - * Template used to generate the URL to issue the GET against. - */ - private static final String POKE_URL_TEMPLATE = "http://%s/internal/fetchProv"; - - private static final Object lock = new Object(); - private static final String CARRIAGE_RETURN = "\n],\n"; - - /** - * This is a singleton -- there is only one Poker object in the server. - */ - private static Poker poker; - private long timer1; - private long timer2; - private String thisPod; // DNS name of this machine - private EELFLogger logger; - private String provString; - - - private Poker() { - timer1 = timer2 = 0; - logger = EELFManager.getInstance().getLogger("InternalLog"); - try { - thisPod = InetAddress.getLocalHost().getHostName(); - } catch (UnknownHostException e) { - thisPod = "*UNKNOWN_POD*"; // not a major problem - logger.info("UnknownHostException: Setting thisPod to \"*UNKNOWN_POD*\"", e); - } - provString = buildProvisioningString(); - Timer rolex = new Timer(); - rolex.scheduleAtFixedRate(this, 0L, 1000L); // Run once a second to check the timers - } - - /** - * Get the singleton Poker object. - * - * @return the Poker - */ - public static synchronized Poker getPoker() { - if (poker == null) { - poker = new Poker(); - } - return poker; - } - - /** - * This method sets the two timers described in the design notes. - * - * @param t1 the first timer controls how long to wait after a provisioning request before poking each node This - * timer can be reset if it has not "gone off". - * @param t2 the second timer set the outer bound on how long to wait. It cannot be reset. - */ - public void setTimers(long t1, long t2) { - synchronized (lock) { - if (timer1 == 0 || t1 > timer1) { - timer1 = t1; - } - if (timer2 == 0) { - timer2 = t2; - } - } - if (logger.isDebugEnabled()) { - logger.debug("Poker timers set to " + timer1 + " and " + timer2); - } - - - } - - /** - * Return the last provisioning string built. - * - * @return the last provisioning string built. - */ - public String getProvisioningString() { - return provString; - } - - /** - * The method to run at the predefined interval (once per second). This method checks to see if either of the two - * timers has expired, and if so, will rebuild the provisioning string, and poke all the nodes and other PODs. The - * timers are then reset to 0. - */ - @Override - public void run() { - try { - if (timer1 > 0) { - long now = System.currentTimeMillis(); - boolean fire = false; - synchronized (lock) { - if (now > timer1 || now > timer2) { - timer1 = timer2 = 0; - fire = true; - } - } - if (fire) { - pokeNodes(); - } - } - } catch (Exception e) { - logger.warn("PROV0020: Caught exception in Poker: " + e); - } - } - - private void pokeNodes() { - // Rebuild the prov string - provString = buildProvisioningString(); - // Only the active POD should poke nodes, etc. - boolean active = SynchronizerTask.getSynchronizer().isActive(); - if (active) { - // Poke all the DR nodes - for (String n : BaseServlet.getNodes()) { - pokeNode(n); - } - // Poke the pod that is not us - for (String n : BaseServlet.getPods()) { - if (n.length() > 0 && !n.equals(thisPod)) { - pokeNode(n); - } - } - } - } - - private void pokeNode(final String nodename) { - logger.debug("PROV0012 Poking node " + nodename + " ..."); - String nodeUrl = String.format(POKE_URL_TEMPLATE, nodename + ":" + DB.getHttpPort()); - Runnable runn = () -> { - try { - URL url = new URL(nodeUrl); - HttpURLConnection conn = (HttpURLConnection) url.openConnection(); - conn.setConnectTimeout(60000); //Fixes for Itrack DATARTR-3, poke timeout - conn.connect(); - conn.getContentLength(); // Force the GET through - conn.disconnect(); - } catch (MalformedURLException e) { - logger.warn( - "PROV0013 MalformedURLException Error poking node at " + nodeUrl + " : " + e - .getMessage(), e); - } catch (IOException e) { - logger.warn("PROV0013 IOException Error poking node at " + nodeUrl + " : " + e - .getMessage(), e); - } - }; - runn.run(); - } - - private String buildProvisioningString() { - StringBuilder sb = new StringBuilder("{\n"); - - // Append Feeds to the string - String pfx = "\n"; - sb.append("\"feeds\": ["); - for (Feed f : Feed.getAllFeeds()) { - sb.append(pfx); - sb.append(f.asJSONObject().toString()); - pfx = ",\n"; - } - sb.append(CARRIAGE_RETURN); - - //Append groups to the string - Rally:US708115 - 1610 - pfx = "\n"; - sb.append("\"groups\": ["); - for (Group s : Group.getAllgroups()) { - sb.append(pfx); - sb.append(s.asJSONObject().toString()); - pfx = ",\n"; - } - sb.append(CARRIAGE_RETURN); - - // Append Subscriptions to the string - pfx = "\n"; - sb.append("\"subscriptions\": ["); - for (Subscription s : Subscription.getAllSubscriptions()) { - sb.append(pfx); - if (s != null) { - sb.append(s.asJSONObject().toString()); - } - pfx = ",\n"; - } - sb.append(CARRIAGE_RETURN); - - // Append Parameters to the string - pfx = "\n"; - sb.append("\"parameters\": {"); - Map props = Parameters.getParameters(); - Set ivals = new HashSet<>(); - String intv = props.get("_INT_VALUES"); - if (intv != null) { - ivals.addAll(Arrays.asList(intv.split("\\|"))); - } - for (String key : new TreeSet(props.keySet())) { - String val = props.get(key); - sb.append(pfx); - sb.append(" \"").append(key).append("\": "); - if (ivals.contains(key)) { - // integer value - sb.append(val); - } else if (key.endsWith("S")) { - // Split and append array of strings - String[] pp = val.split("\\|"); - String p2 = ""; - sb.append("["); - for (String t : pp) { - sb.append(p2).append("\"").append(quote(t)).append("\""); - p2 = ","; - } - sb.append("]"); - } else { - sb.append("\"").append(quote(val)).append("\""); - } - pfx = ",\n"; - } - sb.append("\n},\n"); - - // Append Routes to the string - pfx = "\n"; - sb.append("\"ingress\": ["); - for (IngressRoute in : IngressRoute.getAllIngressRoutes()) { - sb.append(pfx); - sb.append(in.asJSONObject().toString()); - pfx = ",\n"; - } - sb.append(CARRIAGE_RETURN); - - pfx = "\n"; - sb.append("\"egress\": {"); - for (EgressRoute eg : EgressRoute.getAllEgressRoutes()) { - sb.append(pfx); - String str = eg.asJSONObject().toString(); - str = str.substring(1, str.length() - 1); - sb.append(str); - pfx = ",\n"; - } - sb.append("\n},\n"); - - pfx = "\n"; - sb.append("\"routing\": ["); - for (NetworkRoute ne : NetworkRoute.getAllNetworkRoutes()) { - sb.append(pfx); - sb.append(ne.asJSONObject().toString()); - pfx = ",\n"; - } - sb.append("\n]"); - sb.append("\n}"); - - // Convert to string and verify it is valid JSON - String tempProvString = sb.toString(); - try { - new JSONObject(new JSONTokener(tempProvString)); - } catch (JSONException e) { - logger.warn("PROV0016: Possible invalid prov string: " + e); - } - return tempProvString; - } - - private String quote(String str) { - StringBuilder sb = new StringBuilder(); - for (char ch : str.toCharArray()) { - if (ch == '\\' || ch == '"') { - sb.append('\\'); - } - sb.append(ch); - } - return sb.toString(); - } -} diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProvRunner.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProvRunner.java new file mode 100644 index 00000000..4078922e --- /dev/null +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProvRunner.java @@ -0,0 +1,331 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.provisioning; + +import static java.lang.System.exit; +import static java.lang.System.getProperty; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.security.Security; +import java.util.EnumSet; +import java.util.Properties; +import java.util.Timer; +import javax.servlet.DispatcherType; +import org.eclipse.jetty.http.HttpVersion; +import org.eclipse.jetty.server.Connector; +import org.eclipse.jetty.server.Handler; +import org.eclipse.jetty.server.HttpConfiguration; +import org.eclipse.jetty.server.HttpConnectionFactory; +import org.eclipse.jetty.server.NCSARequestLog; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.server.SslConnectionFactory; +import org.eclipse.jetty.server.handler.ContextHandlerCollection; +import org.eclipse.jetty.server.handler.DefaultHandler; +import org.eclipse.jetty.server.handler.HandlerCollection; +import org.eclipse.jetty.server.handler.RequestLogHandler; +import org.eclipse.jetty.servlet.FilterHolder; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jetty.servlet.ServletHolder; +import org.eclipse.jetty.util.ssl.SslContextFactory; +import org.eclipse.jetty.util.thread.QueuedThreadPool; +import org.onap.dmaap.datarouter.provisioning.utils.AafPropsUtils; +import org.onap.dmaap.datarouter.provisioning.utils.DRProvCadiFilter; +import org.onap.dmaap.datarouter.provisioning.utils.LogfileLoader; +import org.onap.dmaap.datarouter.provisioning.utils.Poker; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; +import org.onap.dmaap.datarouter.provisioning.utils.PurgeLogDirTask; +import org.onap.dmaap.datarouter.provisioning.utils.SynchronizerTask; +import org.onap.dmaap.datarouter.provisioning.utils.ThrottleFilter; + +/** + *

+ * A main class which may be used to start the provisioning server with an "embedded" Jetty server. Configuration is + * done via the properties file provserver.properties, which should be in the CLASSPATH. The provisioning server + * may also be packaged with a web.xml and started as a traditional webapp. + *

+ *

+ * Most of the work of the provisioning server is carried out within the eight servlets (configured below) that are used + * to handle each of the eight types of requests the server may receive. In addition, there are background threads + * started to perform other tasks: + *

+ *
    + *
  • One background Thread runs the {@link LogfileLoader} in order to process incoming logfiles. + * This Thread is created as a side effect of the first successful POST to the /internal/logs/ servlet.
  • + *
  • One background Thread runs the {@link SynchronizerTask} which is used to periodically + * synchronize the database between active and standby servers.
  • + *
  • One background Thread runs the {@link Poker} which is used to notify the nodes whenever + * provisioning data changes.
  • + *
  • One task is run once a day to run {@link PurgeLogDirTask} which purges older logs from the + * /opt/app/datartr/logs directory.
  • + *
+ *

+ * The provisioning server is stopped by issuing a GET to the URL http://127.0.0.1/internal/halt using curl or + * some other such tool. + *

+ * + * @author Robert Eby + * @version $Id: Main.java,v 1.12 2014/03/12 19:45:41 eby Exp $ + */ +public class ProvRunner { + + public static final EELFLogger intlogger = EELFManager.getInstance() + .getLogger("org.onap.dmaap.datarouter.provisioning.internal"); + + /** + * The one and only {@link Server} instance in this JVM. + */ + private static Server server; + private static AafPropsUtils aafPropsUtils; + private static Properties provProperties; + + /** + * Starts the Data Router Provisioning server. + * + * @param args not used + * @throws Exception if Jetty has a problem starting + */ + public static void main(String[] args) throws Exception { + + intlogger.info("PROV0000 **** Data Router Provisioning Server starting...."); + + // Check DB is accessible and contains the expected tables + if (!ProvDbUtils.getInstance().initProvDB()) { + intlogger.error("Data Router Provisioning database init failure. Exiting."); + exit(1); + } + + int httpPort = Integer.parseInt( + getProvProperties().getProperty("org.onap.dmaap.datarouter.provserver.http.port", "8080")); + final int httpsPort = Integer.parseInt( + getProvProperties().getProperty("org.onap.dmaap.datarouter.provserver.https.port", "8443")); + + Security.setProperty("networkaddress.cache.ttl", "4"); + // Server's thread pool + QueuedThreadPool queuedThreadPool = new QueuedThreadPool(); + queuedThreadPool.setMinThreads(10); + queuedThreadPool.setMaxThreads(200); + queuedThreadPool.setDetailedDump(false); + + // The server itself + server = new Server(queuedThreadPool); + server.setStopAtShutdown(true); + server.setStopTimeout(5000); + server.setDumpAfterStart(false); + server.setDumpBeforeStop(false); + + // Request log configuration + NCSARequestLog ncsaRequestLog = new NCSARequestLog(); + ncsaRequestLog.setFilename(getProvProperties() + .getProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir") + + "/request.log.yyyy_mm_dd"); + ncsaRequestLog.setFilenameDateFormat("yyyyMMdd"); + ncsaRequestLog.setRetainDays(90); + ncsaRequestLog.setAppend(true); + ncsaRequestLog.setExtended(false); + ncsaRequestLog.setLogCookies(false); + ncsaRequestLog.setLogTimeZone("GMT"); + + RequestLogHandler requestLogHandler = new RequestLogHandler(); + requestLogHandler.setRequestLog(ncsaRequestLog); + server.setRequestLog(ncsaRequestLog); + + // HTTP configuration + HttpConfiguration httpConfiguration = new HttpConfiguration(); + httpConfiguration.setSecureScheme("https"); + httpConfiguration.setSecurePort(httpsPort); + httpConfiguration.setOutputBufferSize(32768); + httpConfiguration.setRequestHeaderSize(8192); + httpConfiguration.setResponseHeaderSize(8192); + httpConfiguration.setSendServerVersion(true); + httpConfiguration.setSendDateHeader(false); + + try { + AafPropsUtils.init(new File(getProvProperties().getProperty( + "org.onap.dmaap.datarouter.provserver.aafprops.path", + "/opt/app/osaaf/local/org.onap.dmaap-dr.props"))); + } catch (IOException e) { + intlogger.error("NODE0314 Failed to load AAF props. Exiting", e); + exit(1); + } + aafPropsUtils = AafPropsUtils.getInstance(); + + //HTTP Connector + HandlerCollection handlerCollection; + try (ServerConnector httpServerConnector = + new ServerConnector(server, new HttpConnectionFactory(httpConfiguration))) { + httpServerConnector.setPort(httpPort); + httpServerConnector.setAcceptQueueSize(2); + httpServerConnector.setIdleTimeout(300000); + + // SSL Context + SslContextFactory sslContextFactory = new SslContextFactory(); + sslContextFactory.setKeyStoreType(AafPropsUtils.KEYSTORE_TYPE_PROPERTY); + sslContextFactory.setKeyStorePath(getAafPropsUtils().getKeystorePathProperty()); + sslContextFactory.setKeyStorePassword(getAafPropsUtils().getKeystorePassProperty()); + sslContextFactory.setKeyManagerPassword(getAafPropsUtils().getKeystorePassProperty()); + + String truststorePathProperty = getAafPropsUtils().getTruststorePathProperty(); + if (truststorePathProperty != null && truststorePathProperty.length() > 0) { + intlogger.info("@@ TS -> " + truststorePathProperty); + sslContextFactory.setTrustStoreType(AafPropsUtils.TRUESTSTORE_TYPE_PROPERTY); + sslContextFactory.setTrustStorePath(truststorePathProperty); + sslContextFactory.setTrustStorePassword(getAafPropsUtils().getTruststorePassProperty()); + } else { + sslContextFactory.setTrustStorePath(AafPropsUtils.DEFAULT_TRUSTSTORE); + sslContextFactory.setTrustStorePassword("changeit"); + } + + sslContextFactory.setWantClientAuth(true); + sslContextFactory.setExcludeCipherSuites( + "SSL_RSA_WITH_DES_CBC_SHA", + "SSL_DHE_RSA_WITH_DES_CBC_SHA", + "SSL_DHE_DSS_WITH_DES_CBC_SHA", + "SSL_RSA_EXPORT_WITH_RC4_40_MD5", + "SSL_RSA_EXPORT_WITH_DES40_CBC_SHA", + "SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA", + "SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA" + ); + sslContextFactory.addExcludeProtocols("SSLv3"); + sslContextFactory.setIncludeProtocols(getProvProperties().getProperty( + "org.onap.dmaap.datarouter.provserver.https.include.protocols", + "TLSv1.1|TLSv1.2").trim().split("\\|")); + + intlogger.info("Not supported protocols prov server:-" + + String.join(",", sslContextFactory.getExcludeProtocols())); + intlogger.info("Supported protocols prov server:-" + + String.join(",", sslContextFactory.getIncludeProtocols())); + intlogger.info("Not supported ciphers prov server:-" + + String.join(",", sslContextFactory.getExcludeCipherSuites())); + intlogger.info("Supported ciphers prov server:-" + + String.join(",", sslContextFactory.getIncludeCipherSuites())); + + // HTTPS configuration + HttpConfiguration httpsConfiguration = new HttpConfiguration(httpConfiguration); + httpsConfiguration.setRequestHeaderSize(8192); + + // HTTPS connector + try (ServerConnector httpsServerConnector = new ServerConnector(server, + new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.asString()), + new HttpConnectionFactory(httpsConfiguration))) { + + httpsServerConnector.setPort(httpsPort); + httpsServerConnector.setIdleTimeout(30000); + httpsServerConnector.setAcceptQueueSize(2); + + // Servlet and Filter configuration + ServletContextHandler servletContextHandler = new ServletContextHandler(0); + servletContextHandler.setContextPath("/"); + servletContextHandler.addServlet(new ServletHolder(new FeedServlet()), "/feed/*"); + servletContextHandler.addServlet(new ServletHolder(new FeedLogServlet()), "/feedlog/*"); + servletContextHandler.addServlet(new ServletHolder(new PublishServlet()), "/publish/*"); + servletContextHandler.addServlet(new ServletHolder(new SubscribeServlet()), "/subscribe/*"); + servletContextHandler.addServlet(new ServletHolder(new StatisticsServlet()), "/statistics/*"); + servletContextHandler.addServlet(new ServletHolder(new SubLogServlet()), "/sublog/*"); + servletContextHandler.addServlet(new ServletHolder(new GroupServlet()), "/group/*"); + servletContextHandler.addServlet(new ServletHolder(new SubscriptionServlet()), "/subs/*"); + servletContextHandler.addServlet(new ServletHolder(new InternalServlet()), "/internal/*"); + servletContextHandler.addServlet(new ServletHolder(new RouteServlet()), "/internal/route/*"); + servletContextHandler.addServlet(new ServletHolder(new DRFeedsServlet()), "/"); + servletContextHandler.addFilter(new FilterHolder(new ThrottleFilter()), + "/publish/*", EnumSet.of(DispatcherType.REQUEST)); + + //CADI Filter activation check + if (Boolean.parseBoolean(getProvProperties().getProperty( + "org.onap.dmaap.datarouter.provserver.cadi.enabled", "false"))) { + servletContextHandler.addFilter(new FilterHolder(new DRProvCadiFilter(true, getAafPropsUtils().getPropAccess())), + "/*", EnumSet.of(DispatcherType.REQUEST)); + intlogger.info("PROV0001 AAF CADI Auth enabled for "); + } + + ContextHandlerCollection contextHandlerCollection = new ContextHandlerCollection(); + contextHandlerCollection.addHandler(servletContextHandler); + + // Server's Handler collection + handlerCollection = new HandlerCollection(); + handlerCollection.setHandlers(new Handler[]{contextHandlerCollection, new DefaultHandler()}); + handlerCollection.addHandler(requestLogHandler); + + server.setConnectors(new Connector[]{httpServerConnector, httpsServerConnector}); + } + } + server.setHandler(handlerCollection); + + // Daemon to clean up the log directory on a daily basis + Timer rolex = new Timer(); + rolex.scheduleAtFixedRate(new PurgeLogDirTask(), 0, 86400000L); // run once per day + + // Start LogfileLoader + LogfileLoader.getLoader(); + + try { + server.start(); + intlogger.info("Prov Server started-" + server.getState()); + } catch (Exception e) { + intlogger.error("Jetty failed to start. Exiting: " + e.getMessage(), e); + exit(1); + } + server.join(); + intlogger.info("PROV0001 **** AT&T Data Router Provisioning Server halted."); + } + + /** + * Stop the Jetty server. + */ + public static void shutdown() { + new Thread(() -> { + try { + server.stop(); + Thread.sleep(5000L); + exit(0); + } catch (Exception e) { + intlogger.error("Exception in Main.shutdown(): " + e.getMessage(), e); + } + }); + } + + public static Properties getProvProperties() { + if (provProperties == null) { + try { + provProperties = new Properties(); + provProperties.load(new FileInputStream(getProperty( + "org.onap.dmaap.datarouter.provserver.properties", + "/opt/app/datartr/etc/provserver.properties"))); + } catch (IOException e) { + intlogger.error("Failed to load PROV properties: " + e.getMessage(), e); + exit(1); + } + } + return provProperties; + } + + public static AafPropsUtils getAafPropsUtils() { + return aafPropsUtils; + } +} diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProxyServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProxyServlet.java index 72d55a4c..d84e4925 100755 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProxyServlet.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProxyServlet.java @@ -36,7 +36,6 @@ import java.security.KeyStore; import java.security.KeyStoreException; import java.util.Collections; import java.util.List; -import java.util.Properties; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; @@ -54,7 +53,7 @@ import org.apache.http.entity.BasicHttpEntity; import org.apache.http.impl.client.AbstractHttpClient; import org.apache.http.impl.client.DefaultHttpClient; import org.onap.dmaap.datarouter.provisioning.utils.AafPropsUtils; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.SynchronizerTask; import org.onap.dmaap.datarouter.provisioning.utils.URLUtilities; /** @@ -82,12 +81,12 @@ public class ProxyServlet extends BaseServlet { try { // Set up keystore String type = AafPropsUtils.KEYSTORE_TYPE_PROPERTY; - String store = Main.aafPropsUtils.getKeystorePathProperty(); - String pass = Main.aafPropsUtils.getKeystorePassProperty(); + String store = ProvRunner.getAafPropsUtils().getKeystorePathProperty(); + String pass = ProvRunner.getAafPropsUtils().getKeystorePassProperty(); KeyStore keyStore = readStore(store, pass, type); // Set up truststore - store = Main.aafPropsUtils.getTruststorePathProperty(); - pass = Main.aafPropsUtils.getTruststorePassProperty(); + store = ProvRunner.getAafPropsUtils().getTruststorePathProperty(); + pass = ProvRunner.getAafPropsUtils().getTruststorePassProperty(); if (store == null || store.length() == 0) { store = AafPropsUtils.DEFAULT_TRUSTSTORE; pass = "changeit"; @@ -97,7 +96,7 @@ public class ProxyServlet extends BaseServlet { // We are connecting with the node name, but the certificate will have the CNAME // So we need to accept a non-matching certificate name SSLSocketFactory socketFactory = new SSLSocketFactory(keyStore, - Main.aafPropsUtils.getKeystorePassProperty(), trustStore); + ProvRunner.getAafPropsUtils().getKeystorePassProperty(), trustStore); socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); sch = new Scheme("https", 443, socketFactory); inited = true; diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java index 2ca24539..35205aa9 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java @@ -41,10 +41,10 @@ import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONTokener; +import org.onap.dmaap.datarouter.provisioning.utils.Poker; import org.onap.dmaap.datarouter.provisioning.beans.EventLogRecord; import org.onap.dmaap.datarouter.provisioning.beans.IngressRoute; import org.onap.dmaap.datarouter.provisioning.eelf.EelfMsgs; -import org.onap.dmaap.datarouter.provisioning.utils.DB; /** * This servlet handles redirects for the <publishURL> on the provisioning server, which is generated by the @@ -158,7 +158,8 @@ public class PublishServlet extends BaseServlet { } else { // Generate new URL String nextnode = getRedirectNode(feedid, req); - nextnode = nextnode + ":" + DB.getHttpsPort(); + nextnode = nextnode + ":" + ProvRunner.getProvProperties().getProperty( + "org.onap.dmaap.datarouter.provserver.https.port", "8443"); String newurl = "https://" + nextnode + "/publish" + req.getPathInfo(); String qs = req.getQueryString(); if (qs != null) { diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java index 9f113efd..76991ca0 100755 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java @@ -42,8 +42,8 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.json.JSONException; import org.onap.dmaap.datarouter.provisioning.beans.EventLogRecord; -import org.onap.dmaap.datarouter.provisioning.utils.DB; import org.onap.dmaap.datarouter.provisioning.utils.LOGJSONObject; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** @@ -70,7 +70,7 @@ public class StatisticsServlet extends BaseServlet { private static final String SQL_TYPE_PUB = ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS FILES_PUBLISHED,"; private static final String SQL_SELECT_SUM = "(SELECT SUM(content_length) FROM LOG_RECORDS AS c WHERE c.FEEDID in("; private static final String SQL_PUBLISH_LENGTH = ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS PUBLISH_LENGTH, COUNT(e.EVENT_TIME) as FILES_DELIVERED,"; - private static final String SQL_SUBSCRIBER_URL = " sum(m.content_length) as DELIVERED_LENGTH,SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL,"; + private static final String SQL_SUBSCRIBER_URL = " sum(m.content_length) as DELIVERED_LENGTH, SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL,"; private static final String SQL_SUB_ID = " e.DELIVERY_SUBID as SUBID, "; private static final String SQL_DELIVERY_TIME = " e.EVENT_TIME AS PUBLISH_TIME, m.EVENT_TIME AS DELIVERY_TIME, "; private static final String SQL_AVERAGE_DELAY = " AVG(e.EVENT_TIME - m.EVENT_TIME)/1000 as AverageDelay FROM LOG_RECORDS"; @@ -101,7 +101,7 @@ public class StatisticsServlet extends BaseServlet { Map map = buildMapFromRequest(req); if (map.get("err") != null) { sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, - "Invalid arguments: " + map.get("err"), eventlogger); + "Invalid arguments: " + map.get("err"), eventlogger); return; } // check Accept: header?? @@ -123,7 +123,7 @@ public class StatisticsServlet extends BaseServlet { try { groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter(GROUPID))); map.put(FEEDIDS, groupid1.toString()); - } catch (NumberFormatException | SQLException e) { + } catch (NumberFormatException e) { eventlogger.error("PROV0172 StatisticsServlet.doGet: " + e.getMessage(), e); } } @@ -134,7 +134,7 @@ public class StatisticsServlet extends BaseServlet { groupid1.append(","); groupid1.append(req.getParameter(FEEDID).replace("|", ",")); map.put(FEEDIDS, groupid1.toString()); - } catch (NumberFormatException | SQLException e) { + } catch (NumberFormatException e) { eventlogger.error("PROV0173 StatisticsServlet.doGet: " + e.getMessage(), e); } } @@ -184,7 +184,7 @@ public class StatisticsServlet extends BaseServlet { * @throws IOException input/output exception * @throws SQLException SQL exception */ - public void rsToCSV(ResultSet rs, ServletOutputStream out) throws IOException, SQLException { + private void rsToCSV(ResultSet rs, ServletOutputStream out) throws IOException, SQLException { String header = "FEEDNAME,FEEDID,FILES_PUBLISHED,PUBLISH_LENGTH, FILES_DELIVERED, " + "DELIVERED_LENGTH, SUBSCRIBER_URL, SUBID, PUBLISH_TIME,DELIVERY_TIME, AverageDelay\n"; out.write(header.getBytes()); @@ -252,41 +252,24 @@ public class StatisticsServlet extends BaseServlet { /** * getFeedIdsByGroupId - Getting FEEDID's by GROUP ID. * - * @throws SQLException Query SQLException. + * @param groupIds Integer ref of Group */ - private StringBuilder getFeedIdsByGroupId(int groupIds) throws SQLException { - DB db = null; - Connection conn = null; - ResultSet resultSet = null; - String sqlGoupid = null; + private StringBuilder getFeedIdsByGroupId(int groupIds) { StringBuilder feedIds = new StringBuilder(); - try { - db = new DB(); - conn = db.getConnection(); - sqlGoupid = " SELECT FEEDID from FEEDS WHERE GROUPID = ?"; - try (PreparedStatement prepareStatement = conn.prepareStatement(sqlGoupid)) { - prepareStatement.setInt(1, groupIds); - resultSet = prepareStatement.executeQuery(); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement prepareStatement = conn.prepareStatement( + " SELECT FEEDID from FEEDS WHERE GROUPID = ?")) { + prepareStatement.setInt(1, groupIds); + try (ResultSet resultSet = prepareStatement.executeQuery()) { while (resultSet.next()) { feedIds.append(resultSet.getInt(FEEDID)); feedIds.append(","); } - feedIds.deleteCharAt(feedIds.length() - 1); - eventlogger.info("PROV0177 StatisticsServlet.getFeedIdsByGroupId: feedIds = " + feedIds.toString()); } + feedIds.deleteCharAt(feedIds.length() - 1); + eventlogger.info("PROV0177 StatisticsServlet.getFeedIdsByGroupId: feedIds = " + feedIds.toString()); } catch (SQLException e) { eventlogger.error("PROV0175 StatisticsServlet.getFeedIdsByGroupId: " + e.getMessage(), e); - } finally { - try { - if (resultSet != null) { - resultSet.close(); - } - if (conn != null) { - db.release(conn); - } - } catch (Exception e) { - eventlogger.error("PROV0176 StatisticsServlet.getFeedIdsByGroupId: " + e.getMessage(), e); - } } return feedIds; } @@ -323,11 +306,11 @@ public class StatisticsServlet extends BaseServlet { if (endTime == null && startTime == null) { sql = SQL_SELECT_NAME + feedids + SQL_FEED_ID + SQL_SELECT_COUNT + feedids + SQL_TYPE_PUB - + SQL_SELECT_SUM - + feedids + SQL_PUBLISH_LENGTH + + SQL_SELECT_SUM + + feedids + SQL_PUBLISH_LENGTH + SQL_SUBSCRIBER_URL + SQL_SUB_ID + SQL_DELIVERY_TIME + SQL_AVERAGE_DELAY + SQL_JOIN_RECORDS - + feedids + ") " + subid - + SQL_STATUS_204 + SQL_GROUP_SUB_ID; + + feedids + ") " + subid + + SQL_STATUS_204 + SQL_GROUP_SUB_ID; return sql; } else if (startTime != null && endTime == null) { @@ -338,10 +321,10 @@ public class StatisticsServlet extends BaseServlet { long compareTime = currentTimeInMilli - inputTimeInMilli; sql = SQL_SELECT_NAME + feedids + SQL_FEED_ID + SQL_SELECT_COUNT + feedids + SQL_TYPE_PUB - + SQL_SELECT_SUM - + feedids + SQL_PUBLISH_LENGTH + + SQL_SELECT_SUM + + feedids + SQL_PUBLISH_LENGTH + SQL_SUBSCRIBER_URL + SQL_SUB_ID + SQL_DELIVERY_TIME + SQL_AVERAGE_DELAY + SQL_JOIN_RECORDS - + feedids + ") " + subid + + feedids + ") " + subid + SQL_STATUS_204 + " and e.event_time>=" + compareTime + SQL_GROUP_SUB_ID; return sql; @@ -354,10 +337,10 @@ public class StatisticsServlet extends BaseServlet { long endInMillis = endDate.getTime(); sql = SQL_SELECT_NAME + feedids + SQL_FEED_ID + SQL_SELECT_COUNT + feedids + SQL_TYPE_PUB - + SQL_SELECT_SUM - + feedids + SQL_PUBLISH_LENGTH + SQL_SUBSCRIBER_URL - + SQL_SUB_ID + SQL_DELIVERY_TIME + SQL_AVERAGE_DELAY + SQL_JOIN_RECORDS + feedids + ")" + subid + SQL_STATUS_204 - +" and e.event_time between " + startInMillis + " and " + endInMillis + SQL_GROUP_SUB_ID; + + SQL_SELECT_SUM + + feedids + SQL_PUBLISH_LENGTH + SQL_SUBSCRIBER_URL + + SQL_SUB_ID + SQL_DELIVERY_TIME + SQL_AVERAGE_DELAY + SQL_JOIN_RECORDS + feedids + ")" + subid + SQL_STATUS_204 + +" and e.event_time between " + startInMillis + " and " + endInMillis + SQL_GROUP_SUB_ID; return sql; } @@ -517,29 +500,27 @@ public class StatisticsServlet extends BaseServlet { } private void getRecordsForSQL(Map map, String outputType, ServletOutputStream out, - HttpServletResponse resp) { + HttpServletResponse resp) { try { String filterQuery = this.queryGeneretor(map); eventlogger.debug("SQL Query for Statistics resultset. " + filterQuery); intlogger.debug(filterQuery); long start = System.currentTimeMillis(); - DB db = new DB(); - try (Connection conn = db.getConnection()) { - try (ResultSet rs = conn.prepareStatement(filterQuery).executeQuery()) { - if ("csv".equals(outputType)) { - resp.setContentType("application/octet-stream"); - Date date = new Date(); - SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-YYYY HH:mm:ss"); - resp.setHeader("Content-Disposition", - "attachment; filename=\"result:" + dateFormat.format(date) + ".csv\""); - eventlogger.info("Generating CSV file from Statistics resultset"); - rsToCSV(rs, out); - } else { - eventlogger.info("Generating JSON for Statistics resultset"); - this.rsToJson(rs, out); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement(filterQuery); + ResultSet rs = ps.executeQuery()) { + if ("csv".equals(outputType)) { + resp.setContentType("application/octet-stream"); + Date date = new Date(); + SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-YYYY HH:mm:ss"); + resp.setHeader("Content-Disposition", + "attachment; filename=\"result:" + dateFormat.format(date) + ".csv\""); + eventlogger.info("Generating CSV file from Statistics resultset"); + rsToCSV(rs, out); + } else { + eventlogger.info("Generating JSON for Statistics resultset"); + this.rsToJson(rs, out); } - db.release(conn); } catch (SQLException e) { eventlogger.error("SQLException:" + e); } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServlet.java index 03bc983e..1f7c291d 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServlet.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServlet.java @@ -39,6 +39,7 @@ import javax.servlet.http.HttpServletResponse; import org.json.JSONException; import org.json.JSONObject; import org.onap.dmaap.datarouter.authz.AuthorizationResponse; +import org.onap.dmaap.datarouter.provisioning.utils.SynchronizerTask; import org.onap.dmaap.datarouter.provisioning.beans.EventLogRecord; import org.onap.dmaap.datarouter.provisioning.beans.Subscription; import org.onap.dmaap.datarouter.provisioning.eelf.EelfMsgs; diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java deleted file mode 100644 index 2a907fb7..00000000 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java +++ /dev/null @@ -1,688 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.provisioning; - -import static org.onap.dmaap.datarouter.provisioning.BaseServlet.TEXT_CT; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; - -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.InputStream; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.security.KeyStore; -import java.sql.Connection; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.Timer; -import java.util.TimerTask; -import java.util.TreeSet; - -import javax.servlet.http.HttpServletResponse; - -import org.apache.http.HttpEntity; -import org.apache.http.HttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.conn.scheme.Scheme; -import org.apache.http.conn.ssl.SSLSocketFactory; -import org.apache.http.entity.ByteArrayEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.impl.client.AbstractHttpClient; -import org.apache.http.impl.client.DefaultHttpClient; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; -import org.json.JSONTokener; -import org.onap.dmaap.datarouter.provisioning.beans.EgressRoute; -import org.onap.dmaap.datarouter.provisioning.beans.Feed; -import org.onap.dmaap.datarouter.provisioning.beans.Group; -import org.onap.dmaap.datarouter.provisioning.beans.IngressRoute; -import org.onap.dmaap.datarouter.provisioning.beans.NetworkRoute; -import org.onap.dmaap.datarouter.provisioning.beans.Parameters; -import org.onap.dmaap.datarouter.provisioning.beans.Subscription; -import org.onap.dmaap.datarouter.provisioning.beans.Syncable; -import org.onap.dmaap.datarouter.provisioning.utils.AafPropsUtils; -import org.onap.dmaap.datarouter.provisioning.utils.DB; -import org.onap.dmaap.datarouter.provisioning.utils.LogfileLoader; -import org.onap.dmaap.datarouter.provisioning.utils.RLEBitSet; -import org.onap.dmaap.datarouter.provisioning.utils.URLUtilities; - -/** - * This class handles synchronization between provisioning servers (PODs). It has three primary functions: - *
    - *
  1. Checking DNS once per minute to see which POD the DNS CNAME points to. The CNAME will point to - * the active (master) POD.
  2. - *
  3. On non-master (standby) PODs, fetches provisioning data and logs in order to keep MariaDB in sync.
  4. - *
  5. Providing information to other parts of the system as to the current role (ACTIVE_POD, STANDBY_POD, UNKNOWN_POD) - * of this POD.
  6. - *
- *

For this to work correctly, the following code needs to be placed at the beginning of main().

- * - * Security.setProperty("networkaddress.cache.ttl", "10"); - * - * - * @author Robert Eby - * @version $Id: SynchronizerTask.java,v 1.10 2014/03/21 13:50:10 eby Exp $ - */ - -public class SynchronizerTask extends TimerTask { - - /** - * This is a singleton -- there is only one SynchronizerTask object in the server. - */ - private static SynchronizerTask synctask; - - /** - * This POD is unknown -- not on the list of PODs. - */ - public static final int UNKNOWN_POD = 0; - /** - * This POD is active -- on the list of PODs, and the DNS CNAME points to us. - */ - public static final int ACTIVE_POD = 1; - /** - * This POD is standby -- on the list of PODs, and the DNS CNAME does not point to us. - */ - public static final int STANDBY_POD = 2; - - private static final String[] stnames = {"UNKNOWN_POD", "ACTIVE_POD", "STANDBY_POD"}; - private static final long ONE_HOUR = 60 * 60 * 1000L; - - private long nextMsg = 0; // only display the "Current podState" msg every 5 mins. - - private final EELFLogger logger; - private final Timer rolex; - private final String spooldir; - private int podState; - private boolean doFetch; - private long nextsynctime; - private AbstractHttpClient httpclient = null; - - @SuppressWarnings("deprecation") - private SynchronizerTask() { - logger = EELFManager.getInstance().getLogger("InternalLog"); - rolex = new Timer(); - spooldir = (new DB()).getProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir"); - podState = UNKNOWN_POD; - doFetch = true; // start off with a fetch - nextsynctime = 0; - - logger.info("PROV5000: Sync task starting, server podState is UNKNOWN_POD"); - try { - // Set up keystore - String type = AafPropsUtils.KEYSTORE_TYPE_PROPERTY; - String store = Main.aafPropsUtils.getKeystorePathProperty(); - String pass = Main.aafPropsUtils.getKeystorePassProperty(); - KeyStore keyStore = KeyStore.getInstance(type); - try (FileInputStream instream = new FileInputStream(new File(store))) { - keyStore.load(instream, pass.toCharArray()); - - } - // Set up truststore - store = Main.aafPropsUtils.getTruststorePathProperty(); - pass = Main.aafPropsUtils.getTruststorePassProperty(); - KeyStore trustStore = null; - if (store != null && store.length() > 0) { - trustStore = KeyStore.getInstance(AafPropsUtils.TRUESTSTORE_TYPE_PROPERTY); - try (FileInputStream instream = new FileInputStream(new File(store))) { - trustStore.load(instream, pass.toCharArray()); - - } - } - - // We are connecting with the node name, but the certificate will have the CNAME - // So we need to accept a non-matching certificate name - String keystorepass = Main.aafPropsUtils.getKeystorePassProperty(); - try (AbstractHttpClient hc = new DefaultHttpClient()) { - SSLSocketFactory socketFactory = - (trustStore == null) - ? new SSLSocketFactory(keyStore, keystorepass) - : new SSLSocketFactory(keyStore, keystorepass, trustStore); - socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); - Scheme sch = new Scheme("https", 443, socketFactory); - hc.getConnectionManager().getSchemeRegistry().register(sch); - httpclient = hc; - } - setSynchTimer(new DB().getProperties().getProperty( - "org.onap.dmaap.datarouter.provserver.sync_interval", "5000")); - } catch (Exception e) { - logger.warn("PROV5005: Problem starting the synchronizer: " + e); - } - } - - private void setSynchTimer(String strInterval) { - // Run once every 5 seconds to check DNS, etc. - long interval; - try { - interval = Long.parseLong(strInterval); - } catch (NumberFormatException e) { - interval = 5000L; - } - rolex.scheduleAtFixedRate(this, 0L, interval); - } - - /** - * Get the singleton SynchronizerTask object. - * - * @return the SynchronizerTask - */ - public static synchronized SynchronizerTask getSynchronizer() { - if (synctask == null) { - synctask = new SynchronizerTask(); - } - return synctask; - } - - /** - * What is the podState of this POD?. - * - * @return one of ACTIVE_POD, STANDBY_POD, UNKNOWN_POD - */ - public int getPodState() { - return podState; - } - - /** - * Is this the active POD?. - * - * @return true if we are active (the master), false otherwise - */ - public boolean isActive() { - return podState == ACTIVE_POD; - } - - /** - * This method is used to signal that another POD (the active POD) has sent us a /fetchProv request, and that we - * should re-synchronize with the master. - */ - void doFetch() { - doFetch = true; - } - - /** - * Runs once a minute in order to
    - *
  1. lookup DNS names,
  2. - *
  3. determine the podState of this POD,
  4. - *
  5. if this is a standby POD, and the fetch flag is set, perform a fetch of podState from the active POD.
  6. - *
  7. if this is a standby POD, check if there are any new log records to be replicated.
  8. - *
. - */ - @Override - public void run() { - try { - podState = lookupState(); - if (podState == STANDBY_POD) { - // Only copy provisioning data FROM the active server TO the standby - if (doFetch || (System.currentTimeMillis() >= nextsynctime)) { - syncProvisioningData(); - logger.info("PROV5013: Sync completed."); - nextsynctime = System.currentTimeMillis() + ONE_HOUR; - } - } else { - // Don't do fetches on non-standby PODs - doFetch = false; - } - - // Fetch DR logs as needed - server to server - LogfileLoader lfl = LogfileLoader.getLoader(); - if (lfl.isIdle()) { - // Only fetch new logs if the loader is waiting for them. - logger.trace("Checking for logs to replicate..."); - RLEBitSet local = lfl.getBitSet(); - RLEBitSet remote = readRemoteLoglist(); - remote.andNot(local); - if (!remote.isEmpty()) { - logger.debug(" Replicating logs: " + remote); - replicateDataRouterLogs(remote); - } - } - } catch (Exception e) { - logger.warn("PROV0020: Caught exception in SynchronizerTask: " + e); - } - } - - private void syncProvisioningData() { - logger.debug("Initiating a sync..."); - JSONObject jo = readProvisioningJson(); - if (jo != null) { - doFetch = false; - syncFeeds(jo.getJSONArray("feeds")); - syncSubs(jo.getJSONArray("subscriptions")); - syncGroups(jo.getJSONArray("groups")); //Rally:US708115 - 1610 - syncParams(jo.getJSONObject("parameters")); - // The following will not be present in a version=1.0 provfeed - JSONArray ja = jo.optJSONArray("ingress"); - if (ja != null) { - syncIngressRoutes(ja); - } - JSONObject j2 = jo.optJSONObject("egress"); - if (j2 != null) { - syncEgressRoutes(j2); - } - ja = jo.optJSONArray("routing"); - if (ja != null) { - syncNetworkRoutes(ja); - } - } - } - - /** - * This method is used to lookup the CNAME that points to the active server. - * It returns 0 (UNKNOWN_POD), 1(ACTIVE_POD), or (STANDBY_POD) to indicate the podState of this server. - * - * @return the current podState - */ - int lookupState() { - int newPodState = UNKNOWN_POD; - try { - InetAddress myaddr = InetAddress.getLocalHost(); - if (logger.isTraceEnabled()) { - logger.trace("My address: " + myaddr); - } - String thisPod = myaddr.getHostName(); - Set pods = new TreeSet<>(Arrays.asList(BaseServlet.getPods())); - if (pods.contains(thisPod)) { - InetAddress pserver = InetAddress.getByName(BaseServlet.getActiveProvName()); - newPodState = myaddr.equals(pserver) ? ACTIVE_POD : STANDBY_POD; - if (logger.isDebugEnabled() && System.currentTimeMillis() >= nextMsg) { - logger.debug("Active POD = " + pserver + ", Current podState is " + stnames[newPodState]); - nextMsg = System.currentTimeMillis() + (5 * 60 * 1000L); - } - } else { - logger.warn("PROV5003: My name (" + thisPod + ") is missing from the list of provisioning servers."); - } - } catch (UnknownHostException e) { - logger.warn("PROV5002: Cannot determine the name of this provisioning server.", e); - } - - if (newPodState != podState) { - logger.info(String.format("PROV5001: Server podState changed from %s to %s", - stnames[podState], stnames[newPodState])); - } - return newPodState; - } - - /** - * Synchronize the Feeds in the JSONArray, with the Feeds in the DB. - */ - private void syncFeeds(JSONArray ja) { - Collection coll = new ArrayList<>(); - for (int n = 0; n < ja.length(); n++) { - try { - Feed feed = new Feed(ja.getJSONObject(n)); - coll.add(feed); - } catch (Exception e) { - logger.warn("PROV5004: Invalid object in feed: " + ja.optJSONObject(n), e); - } - } - if (sync(coll, Feed.getAllFeeds())) { - BaseServlet.provisioningDataChanged(); - } - } - - /** - * Synchronize the Subscriptions in the JSONArray, with the Subscriptions in the DB. - */ - private void syncSubs(JSONArray ja) { - Collection coll = new ArrayList<>(); - for (int n = 0; n < ja.length(); n++) { - try { - //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047. - JSONObject jsonObject = ja.getJSONObject(n); - jsonObject.put("sync", "true"); - Subscription sub = new Subscription(jsonObject); - coll.add(sub); - } catch (Exception e) { - logger.warn("PROV5004: Invalid object in subscription: " + ja.optJSONObject(n), e); - } - } - if (sync(coll, Subscription.getAllSubscriptions())) { - BaseServlet.provisioningDataChanged(); - } - } - - /** - * Rally:US708115 - Synchronize the Groups in the JSONArray, with the Groups in the DB. - */ - private void syncGroups(JSONArray ja) { - Collection coll = new ArrayList<>(); - for (int n = 0; n < ja.length(); n++) { - try { - Group group = new Group(ja.getJSONObject(n)); - coll.add(group); - } catch (Exception e) { - logger.warn("PROV5004: Invalid object in group: " + ja.optJSONObject(n), e); - } - } - if (sync(coll, Group.getAllgroups())) { - BaseServlet.provisioningDataChanged(); - } - } - - - /** - * Synchronize the Parameters in the JSONObject, with the Parameters in the DB. - */ - private void syncParams(JSONObject jo) { - Collection coll = new ArrayList<>(); - for (String k : jo.keySet()) { - String val = ""; - try { - val = jo.getString(k); - } catch (JSONException e) { - logger.warn("PROV5004: Invalid object in parameters: " + jo.optJSONObject(k), e); - try { - val = "" + jo.getInt(k); - } catch (JSONException e1) { - logger.warn("PROV5004: Invalid object in parameters: " + jo.optInt(k), e1); - JSONArray ja = jo.getJSONArray(k); - for (int i = 0; i < ja.length(); i++) { - if (i > 0) { - val += "|"; - } - val += ja.getString(i); - } - } - } - coll.add(new Parameters(k, val)); - } - if (sync(coll, Parameters.getParameterCollection())) { - BaseServlet.provisioningDataChanged(); - BaseServlet.provisioningParametersChanged(); - } - } - - private void syncIngressRoutes(JSONArray ja) { - Collection coll = new ArrayList<>(); - for (int n = 0; n < ja.length(); n++) { - try { - IngressRoute in = new IngressRoute(ja.getJSONObject(n)); - coll.add(in); - } catch (NumberFormatException e) { - logger.warn("PROV5004: Invalid object in ingress routes: " + ja.optJSONObject(n)); - } - } - if (sync(coll, IngressRoute.getAllIngressRoutes())) { - BaseServlet.provisioningDataChanged(); - } - } - - private void syncEgressRoutes(JSONObject jo) { - Collection coll = new ArrayList<>(); - for (String key : jo.keySet()) { - try { - int sub = Integer.parseInt(key); - String node = jo.getString(key); - EgressRoute er = new EgressRoute(sub, node); - coll.add(er); - } catch (NumberFormatException e) { - logger.warn("PROV5004: Invalid subid in egress routes: " + key, e); - } catch (IllegalArgumentException e) { - logger.warn("PROV5004: Invalid node name in egress routes: " + key, e); - } - } - if (sync(coll, EgressRoute.getAllEgressRoutes())) { - BaseServlet.provisioningDataChanged(); - } - } - - private void syncNetworkRoutes(JSONArray ja) { - Collection coll = new ArrayList<>(); - for (int n = 0; n < ja.length(); n++) { - try { - NetworkRoute nr = new NetworkRoute(ja.getJSONObject(n)); - coll.add(nr); - } catch (JSONException e) { - logger.warn("PROV5004: Invalid object in network routes: " + ja.optJSONObject(n), e); - } - } - if (sync(coll, NetworkRoute.getAllNetworkRoutes())) { - BaseServlet.provisioningDataChanged(); - } - } - - private boolean sync(Collection newc, Collection oldc) { - boolean changes = false; - try { - Map newmap = getMap(newc); - Map oldmap = getMap(oldc); - Set union = new TreeSet<>(newmap.keySet()); - union.addAll(oldmap.keySet()); - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - for (String n : union) { - Syncable newobj = newmap.get(n); - Syncable oldobj = oldmap.get(n); - if (oldobj == null) { - changes = insertRecord(conn, newobj); - } else if (newobj == null) { - changes = deleteRecord(conn, oldobj); - } else if (!newobj.equals(oldobj)) { - changes = updateRecord(conn, newobj, oldobj); - } - } - db.release(conn); - } catch (SQLException e) { - logger.warn("PROV5009: problem during sync, exception: " + e); - } - return changes; - } - - private boolean updateRecord(Connection conn, Syncable newobj, Syncable oldobj) { - if (logger.isDebugEnabled()) { - logger.debug(" Updating record: " + newobj); - } - boolean changes = newobj.doUpdate(conn); - checkChangeOwner(newobj, oldobj); - - return changes; - } - - private boolean deleteRecord(Connection conn, Syncable oldobj) { - if (logger.isDebugEnabled()) { - logger.debug(" Deleting record: " + oldobj); - } - return oldobj.doDelete(conn); - } - - private boolean insertRecord(Connection conn, Syncable newobj) { - if (logger.isDebugEnabled()) { - logger.debug(" Inserting record: " + newobj); - } - return newobj.doInsert(conn); - } - - private Map getMap(Collection coll) { - Map map = new HashMap<>(); - for (Syncable v : coll) { - map.put(v.getKey(), v); - } - return map; - } - - /** - * Change owner of FEED/SUBSCRIPTION. - * Rally US708115 Change Ownership of FEED - 1610 - */ - private void checkChangeOwner(Syncable newobj, Syncable oldobj) { - if (newobj instanceof Feed) { - Feed oldfeed = (Feed) oldobj; - Feed newfeed = (Feed) newobj; - - if (!oldfeed.getPublisher().equals(newfeed.getPublisher())) { - logger.info("PROV5013 - Previous publisher: " - + oldfeed.getPublisher() + ": New publisher-" + newfeed.getPublisher()); - oldfeed.setPublisher(newfeed.getPublisher()); - oldfeed.changeOwnerShip(); - } - } else if (newobj instanceof Subscription) { - Subscription oldsub = (Subscription) oldobj; - Subscription newsub = (Subscription) newobj; - - if (!oldsub.getSubscriber().equals(newsub.getSubscriber())) { - logger.info("PROV5013 - Previous subscriber: " - + oldsub.getSubscriber() + ": New subscriber-" + newsub.getSubscriber()); - oldsub.setSubscriber(newsub.getSubscriber()); - oldsub.changeOwnerShip(); - } - } - - } - - /** - * Issue a GET on the peer POD's /internal/prov/ URL to get a copy of its provisioning data. - * - * @return the provisioning data (as a JONObject) - */ - private synchronized JSONObject readProvisioningJson() { - String url = URLUtilities.generatePeerProvURL(); - HttpGet get = new HttpGet(url); - try { - HttpResponse response = httpclient.execute(get); - int code = response.getStatusLine().getStatusCode(); - if (code != HttpServletResponse.SC_OK) { - logger.warn("PROV5010: readProvisioningJson failed, bad error code: " + code); - return null; - } - HttpEntity entity = response.getEntity(); - String ctype = entity.getContentType().getValue().trim(); - if (!ctype.equals(BaseServlet.PROVFULL_CONTENT_TYPE1) - && !ctype.equals(BaseServlet.PROVFULL_CONTENT_TYPE2)) { - logger.warn("PROV5011: readProvisioningJson failed, bad content type: " + ctype); - return null; - } - return new JSONObject(new JSONTokener(entity.getContent())); - } catch (Exception e) { - logger.warn("PROV5012: readProvisioningJson failed, exception: " + e); - return null; - } finally { - get.releaseConnection(); - } - } - - /** - * Issue a GET on the peer POD's /internal/drlogs/ URL to get an RELBitSet representing the log records available in - * the remote database. - * - * @return the bitset - */ - RLEBitSet readRemoteLoglist() { - RLEBitSet bs = new RLEBitSet(); - String url = URLUtilities.generatePeerLogsURL(); - - //Fixing if only one Prov is configured, not to give exception to fill logs, return empty bitset. - if ("".equals(url)) { - return bs; - } - //End of fix. - - HttpGet get = new HttpGet(url); - try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) { - HttpResponse response = httpclient.execute(get); - int code = response.getStatusLine().getStatusCode(); - if (code != HttpServletResponse.SC_OK) { - logger.warn("PROV5010: readRemoteLoglist failed, bad error code: " + code); - return bs; - } - HttpEntity entity = response.getEntity(); - String ctype = entity.getContentType().getValue().trim(); - if (!TEXT_CT.equals(ctype)) { - logger.warn("PROV5011: readRemoteLoglist failed, bad content type: " + ctype); - return bs; - } - InputStream is = entity.getContent(); - int ch; - while ((ch = is.read()) >= 0) { - bos.write(ch); - } - bs.set(bos.toString()); - is.close(); - } catch (Exception e) { - logger.warn("PROV5012: readRemoteLoglist failed, exception: " + e); - return bs; - } finally { - get.releaseConnection(); - } - return bs; - } - - /** - * Issue a POST on the peer POD's /internal/drlogs/ URL to fetch log records available in the remote database that - * we wish to copy to the local database. - * - * @param bs the bitset (an RELBitSet) of log records to fetch - */ - void replicateDataRouterLogs(RLEBitSet bs) { - String url = URLUtilities.generatePeerLogsURL(); - HttpPost post = new HttpPost(url); - try { - String str = bs.toString(); - HttpEntity body = new ByteArrayEntity(str.getBytes(), ContentType.create(TEXT_CT)); - post.setEntity(body); - if (logger.isDebugEnabled()) { - logger.debug("Requesting records: " + str); - } - - HttpResponse response = httpclient.execute(post); - int code = response.getStatusLine().getStatusCode(); - if (code != HttpServletResponse.SC_OK) { - logger.warn("PROV5010: replicateDataRouterLogs failed, bad error code: " + code); - return; - } - HttpEntity entity = response.getEntity(); - String ctype = entity.getContentType().getValue().trim(); - if (!TEXT_CT.equals(ctype)) { - logger.warn("PROV5011: replicateDataRouterLogs failed, bad content type: " + ctype); - return; - } - - String spoolname = "" + System.currentTimeMillis(); - Path tmppath = Paths.get(spooldir, spoolname); - Path donepath = Paths.get(spooldir, "IN." + spoolname); - Files.copy(entity.getContent(), Paths.get(spooldir, spoolname), StandardCopyOption.REPLACE_EXISTING); - Files.move(tmppath, donepath, StandardCopyOption.REPLACE_EXISTING); - logger.info("Approximately " + bs.cardinality() + " records replicated."); - } catch (Exception e) { - logger.warn("PROV5012: replicateDataRouterLogs failed, exception: " + e); - } finally { - post.releaseConnection(); - } - } -} diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Deleteable.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Deleteable.java index 6910e0d7..65a54e08 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Deleteable.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Deleteable.java @@ -37,5 +37,5 @@ public interface Deleteable { * @param conn the JDBC Connection to use * @return true if the DELETE succeeded, false otherwise */ - public boolean doDelete(Connection conn); + boolean doDelete(Connection conn); } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRoute.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRoute.java index 2b6462db..8cd19866 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRoute.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRoute.java @@ -35,7 +35,7 @@ import java.util.Objects; import java.util.SortedSet; import java.util.TreeSet; import org.json.JSONObject; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * The representation of one route in the Egress Route Table. @@ -71,16 +71,10 @@ public class EgressRoute extends NodeClass implements Comparable { */ public static SortedSet getAllEgressRoutes() { SortedSet set = new TreeSet<>(); - DB db = new DB(); - String sql = "select SUBID, NODEID from EGRESS_ROUTES"; - try (Connection conn = db.getConnection()) { - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery(sql)) { - addEgressRouteToSet(set, rs); - } - } finally { - db.release(conn); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery("select SUBID, NODEID from EGRESS_ROUTES")) { + addEgressRouteToSet(set, rs); } catch (SQLException e) { intlogger.error("PROV0008 EgressRoute.getAllEgressRoutes: " + e.getMessage(), e); } @@ -103,18 +97,13 @@ public class EgressRoute extends NodeClass implements Comparable { */ public static EgressRoute getEgressRoute(int sub) { EgressRoute er = null; - DB db = new DB(); - String sql = "select NODEID from EGRESS_ROUTES where SUBID = ?"; - try (Connection conn = db.getConnection(); - PreparedStatement ps = conn.prepareStatement(sql)) { + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement("select NODEID from EGRESS_ROUTES where SUBID = ?")) { ps.setInt(1, sub); - try (ResultSet rs = ps.executeQuery()) { - if (rs.next()) { - int node = rs.getInt("NODEID"); - er = new EgressRoute(sub, node); - } - } finally { - db.release(conn); + ResultSet rs = ps.executeQuery(); + if (rs.next()) { + int node = rs.getInt("NODEID"); + er = new EgressRoute(sub, node); } } catch (SQLException e) { intlogger.error("PROV0009 EgressRoute.getEgressRoute: " + e.getMessage(), e); @@ -125,8 +114,7 @@ public class EgressRoute extends NodeClass implements Comparable { @Override public boolean doDelete(Connection conn) { boolean rv = true; - String sql = "delete from EGRESS_ROUTES where SUBID = ?"; - try (PreparedStatement ps = conn.prepareStatement(sql)) { + try (PreparedStatement ps = conn.prepareStatement("delete from EGRESS_ROUTES where SUBID = ?")) { ps.setInt(1, subid); ps.execute(); } catch (SQLException e) { @@ -139,9 +127,7 @@ public class EgressRoute extends NodeClass implements Comparable { @Override public boolean doInsert(Connection conn) { boolean rv = false; - String sql = "insert into EGRESS_ROUTES (SUBID, NODEID) values (?, ?)"; - try (PreparedStatement ps = conn.prepareStatement(sql)) { - // Create the NETWORK_ROUTES row + try (PreparedStatement ps = conn.prepareStatement("insert into EGRESS_ROUTES (SUBID, NODEID) values (?, ?)")) { ps.setInt(1, this.subid); ps.setInt(2, this.nodeid); ps.execute(); @@ -155,8 +141,7 @@ public class EgressRoute extends NodeClass implements Comparable { @Override public boolean doUpdate(Connection conn) { boolean rv = true; - String sql = "update EGRESS_ROUTES set NODEID = ? where SUBID = ?"; - try (PreparedStatement ps = conn.prepareStatement(sql)) { + try (PreparedStatement ps = conn.prepareStatement("update EGRESS_ROUTES set NODEID = ? where SUBID = ?")) { ps.setInt(1, nodeid); ps.setInt(2, subid); ps.executeUpdate(); diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Feed.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Feed.java index 3dccc02c..ac1f70af 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Feed.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Feed.java @@ -42,8 +42,8 @@ import java.util.Set; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; -import org.onap.dmaap.datarouter.provisioning.utils.DB; import org.onap.dmaap.datarouter.provisioning.utils.JSONUtilities; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; import org.onap.dmaap.datarouter.provisioning.utils.URLUtilities; @@ -58,7 +58,6 @@ public class Feed extends Syncable { private static EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog"); private static int nextFeedID = getMaxFeedID() + 1; - private static final String SQLEXCEPTION = "SQLException: "; private static final String FEED_ID_SQL = "FEEDID"; private static final String FEED_ID = "feedid"; private static final String DEL = "deleted"; @@ -224,20 +223,17 @@ public class Feed extends Syncable { * @return true if it is valid */ @SuppressWarnings("resource") - public static boolean isFeedValid(int id) { + static boolean isFeedValid(int id) { int count = 0; - try { - DB db = new DB(); - Connection conn = db.getConnection(); - try (PreparedStatement stmt = conn.prepareStatement("select COUNT(*) from FEEDS where FEEDID = ?")) { - stmt.setInt(1, id); - try (ResultSet rs = stmt.executeQuery()) { - if (rs.next()) { - count = rs.getInt(1); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "select COUNT(*) from FEEDS where FEEDID = ?")) { + ps.setInt(1, id); + try (ResultSet rs = ps.executeQuery()) { + if (rs.next()) { + count = rs.getInt(1); } } - db.release(conn); } catch (SQLException e) { intlogger.warn("PROV0024 Feed.isFeedValid: " + e.getMessage(), e); } @@ -276,17 +272,14 @@ public class Feed extends Syncable { */ public static int countActiveFeeds() { int count = 0; - try { - DB db = new DB(); - @SuppressWarnings("resource") Connection conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery("select count(*) from FEEDS where DELETED = 0")) { - if (rs.next()) { - count = rs.getInt(1); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "select count(*) from FEEDS where DELETED = 0")) { + try (ResultSet rs = ps.executeQuery()) { + if (rs.next()) { + count = rs.getInt(1); } } - db.release(conn); } catch (SQLException e) { intlogger.warn("PROV0025 Feed.countActiveFeeds: " + e.getMessage(), e); } @@ -299,18 +292,14 @@ public class Feed extends Syncable { */ public static int getMaxFeedID() { int max = 0; - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery("select MAX(feedid) from FEEDS")) { - if (rs.next()) { - max = rs.getInt(1); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "select MAX(feedid) from FEEDS")) { + try (ResultSet rs = ps.executeQuery()) { + if (rs.next()) { + max = rs.getInt(1); } } - db.release(conn); } catch (SQLException e) { intlogger.warn("PROV0026 Feed.getMaxFeedID: " + e.getMessage(), e); } @@ -323,44 +312,37 @@ public class Feed extends Syncable { */ public static Collection getAllFeeds() { Map map = new HashMap<>(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery("select * from FEEDS")) { - while (rs.next()) { - Feed feed = new Feed(rs); - map.put(feed.getFeedid(), feed); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { + try (PreparedStatement ps = conn.prepareStatement("select * from FEEDS"); + ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + Feed feed = new Feed(rs); + map.put(feed.getFeedid(), feed); } - - String sql = "select * from FEED_ENDPOINT_IDS"; - try (ResultSet rs = stmt.executeQuery(sql)) { - while (rs.next()) { - int id = rs.getInt(FEED_ID_SQL); - Feed feed = map.get(id); - if (feed != null) { - FeedEndpointID epi = new FeedEndpointID(rs); - Collection ecoll = feed.getAuthorization().getEndpointIDS(); - ecoll.add(epi); - } + } + try (PreparedStatement ps = conn.prepareStatement("select * from FEED_ENDPOINT_IDS"); + ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + int id = rs.getInt(FEED_ID_SQL); + Feed feed = map.get(id); + if (feed != null) { + FeedEndpointID epi = new FeedEndpointID(rs); + Collection ecoll = feed.getAuthorization().getEndpointIDS(); + ecoll.add(epi); } } - - sql = "select * from FEED_ENDPOINT_ADDRS"; - try (ResultSet rs = stmt.executeQuery(sql)) { - while (rs.next()) { - int id = rs.getInt(FEED_ID_SQL); - Feed feed = map.get(id); - if (feed != null) { - Collection acoll = feed.getAuthorization().getEndpointAddrs(); - acoll.add(rs.getString("ADDR")); - } + } + try (PreparedStatement ps = conn.prepareStatement("select * from FEED_ENDPOINT_ADDRS"); + ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + int id = rs.getInt(FEED_ID_SQL); + Feed feed = map.get(id); + if (feed != null) { + Collection acoll = feed.getAuthorization().getEndpointAddrs(); + acoll.add(rs.getString("ADDR")); } } } - db.release(conn); } catch (SQLException e) { intlogger.warn("PROV0027 Feed.getAllFeeds: " + e.getMessage(), e); } @@ -382,26 +364,21 @@ public class Feed extends Syncable { sql += " and PUBLISHER = ?"; } else if (name.equals("subs")) { sql = "select distinct FEEDS.SELF_LINK from FEEDS, SUBSCRIPTIONS " - + "where DELETED = 0 " - + "and FEEDS.FEEDID = SUBSCRIPTIONS.FEEDID " - + "and SUBSCRIPTIONS.SUBSCRIBER = ?"; + + "where DELETED = 0 " + + "and FEEDS.FEEDID = SUBSCRIPTIONS.FEEDID " + + "and SUBSCRIPTIONS.SUBSCRIBER = ?"; } - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (PreparedStatement ps = conn.prepareStatement(sql)) { - if (sql.indexOf('?') >= 0) { - ps.setString(1, val); - } - try (ResultSet rs = ps.executeQuery()) { - while (rs.next()) { - String str = rs.getString(1); - list.add(str.trim()); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement(sql)) { + if (sql.indexOf('?') >= 0) { + ps.setString(1, val); + } + try (ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + String str = rs.getString(1); + list.add(str.trim()); } } - db.release(conn); } catch (SQLException e) { intlogger.warn("PROV0028 Feed.getFilteredFeedUrlList: " + e.getMessage(), e); } @@ -411,9 +388,7 @@ public class Feed extends Syncable { @SuppressWarnings("resource") private static Feed getFeedBySQL(String sql) { Feed feed = null; - try { - DB db = new DB(); - Connection conn = db.getConnection(); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { try (Statement stmt = conn.createStatement()) { try (ResultSet rs = stmt.executeQuery(sql)) { if (rs.next()) { @@ -438,7 +413,6 @@ public class Feed extends Syncable { } } } - db.release(conn); } catch (SQLException e) { intlogger.warn("PROV0029 Feed.getFeedBySQL: " + e.getMessage(), e); } @@ -453,7 +427,7 @@ public class Feed extends Syncable { /** * Set feedid with FeedLinks. - * @param feedid Feedid to set to + * @param feedid Feedid to set to */ public void setFeedid(int feedid) { this.feedid = feedid; @@ -470,10 +444,6 @@ public class Feed extends Syncable { return aafInstance; } - public void setAafInstance(String aafInstance) { - this.aafInstance = aafInstance; - } - //new getter setters for groups- Rally:US708115 - 1610 public int getGroupid() { return groupid; @@ -508,11 +478,11 @@ public class Feed extends Syncable { } // New field is added - Groups feature Rally:US708102 - 1610 - public String getBusinessDescription() { + String getBusinessDescription() { return businessDescription; } - public void setBusinessDescription(String businessDescription) { + void setBusinessDescription(String businessDescription) { this.businessDescription = businessDescription; } @@ -545,7 +515,7 @@ public class Feed extends Syncable { return links; } - public void setLinks(FeedLinks links) { + void setLinks(FeedLinks links) { this.links = links; } @@ -557,11 +527,11 @@ public class Feed extends Syncable { this.deleted = deleted; } - public boolean isSuspended() { + boolean isSuspended() { return suspended; } - public void setSuspended(boolean suspended) { + void setSuspended(boolean suspended) { this.suspended = suspended; } @@ -621,23 +591,12 @@ public class Feed extends Syncable { @Override public boolean doDelete(Connection conn) { boolean rv = true; - PreparedStatement ps = null; - try { - String sql = "delete from FEEDS where FEEDID = ?"; - ps = conn.prepareStatement(sql); + try (PreparedStatement ps = conn.prepareStatement("delete from FEEDS where FEEDID = ?")) { ps.setInt(1, feedid); ps.execute(); } catch (SQLException e) { rv = false; intlogger.error("PROV0007 doDelete: " + e.getMessage(), e); - } finally { - try { - if (ps != null) { - ps.close(); - } - } catch (SQLException e) { - intlogger.error(SQLEXCEPTION + e.getMessage(), e); - } } return rv; } @@ -653,51 +612,46 @@ public class Feed extends Syncable { if (feedid > nextFeedID) { nextFeedID = feedid + 1; } - // Create FEED_ENDPOINT_IDS rows FeedAuthorization auth = getAuthorization(); - String sql = "insert into FEED_ENDPOINT_IDS values (?, ?, ?)"; - try (PreparedStatement ps2 = conn.prepareStatement(sql)) { + try (PreparedStatement ps = conn.prepareStatement("insert into FEED_ENDPOINT_IDS values (?, ?, ?)")) { for (FeedEndpointID fid : auth.getEndpointIDS()) { - ps2.setInt(1, feedid); - ps2.setString(2, fid.getId()); - ps2.setString(3, fid.getPassword()); - ps2.executeUpdate(); + ps.setInt(1, feedid); + ps.setString(2, fid.getId()); + ps.setString(3, fid.getPassword()); + ps.executeUpdate(); } } - // Create FEED_ENDPOINT_ADDRS rows - sql = "insert into FEED_ENDPOINT_ADDRS values (?, ?)"; - try (PreparedStatement ps2 = conn.prepareStatement(sql)) { + try (PreparedStatement ps = conn.prepareStatement("insert into FEED_ENDPOINT_ADDRS values (?, ?)")) { for (String t : auth.getEndpointAddrs()) { - ps2.setInt(1, feedid); - ps2.setString(2, t); - ps2.executeUpdate(); + ps.setInt(1, feedid); + ps.setString(2, t); + ps.executeUpdate(); } } - // Finally, create the FEEDS row - sql = "insert into FEEDS (FEEDID, NAME, VERSION, DESCRIPTION, AUTH_CLASS, PUBLISHER, SELF_LINK, " - + "PUBLISH_LINK, SUBSCRIBE_LINK, LOG_LINK, DELETED, SUSPENDED," - + "BUSINESS_DESCRIPTION, GROUPID, AAF_INSTANCE) " - + "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; - try (PreparedStatement ps2 = conn.prepareStatement(sql)) { - ps2.setInt(1, feedid); - ps2.setString(2, getName()); - ps2.setString(3, getVersion()); - ps2.setString(4, getDescription()); - ps2.setString(5, getAuthorization().getClassification()); - ps2.setString(6, getPublisher()); - ps2.setString(7, getLinks().getSelf()); - ps2.setString(8, getLinks().getPublish()); - ps2.setString(9, getLinks().getSubscribe()); - ps2.setString(10, getLinks().getLog()); - ps2.setBoolean(11, isDeleted()); - ps2.setBoolean(12, isSuspended()); - ps2.setString(13, getBusinessDescription()); - ps2.setInt(14, groupid); - ps2.setString(15, getAafInstance()); - ps2.executeUpdate(); + try (PreparedStatement ps = conn.prepareStatement( + "insert into FEEDS (FEEDID, NAME, VERSION, DESCRIPTION, AUTH_CLASS, PUBLISHER, SELF_LINK, " + + "PUBLISH_LINK, SUBSCRIBE_LINK, LOG_LINK, DELETED, SUSPENDED," + + "BUSINESS_DESCRIPTION, GROUPID, AAF_INSTANCE) " + + "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { + ps.setInt(1, feedid); + ps.setString(2, getName()); + ps.setString(3, getVersion()); + ps.setString(4, getDescription()); + ps.setString(5, getAuthorization().getClassification()); + ps.setString(6, getPublisher()); + ps.setString(7, getLinks().getSelf()); + ps.setString(8, getLinks().getPublish()); + ps.setString(9, getLinks().getSubscribe()); + ps.setString(10, getLinks().getLog()); + ps.setBoolean(11, isDeleted()); + ps.setBoolean(12, isSuspended()); + ps.setString(13, getBusinessDescription()); + ps.setInt(14, groupid); + ps.setString(15, getAafInstance()); + ps.executeUpdate(); } } catch (SQLException e) { rv = false; @@ -709,88 +663,72 @@ public class Feed extends Syncable { @Override public boolean doUpdate(Connection conn) { boolean rv = true; - Feed oldobj = getFeedById(feedid); - PreparedStatement ps = null; try { + Feed oldobj = getFeedById(feedid); Set newset = getAuthorization().getEndpointIDS(); Set oldset = oldobj.getAuthorization().getEndpointIDS(); - - // Insert new FEED_ENDPOINT_IDS rows - String sql = "insert into FEED_ENDPOINT_IDS values (?, ?, ?)"; - ps = conn.prepareStatement(sql); - for (FeedEndpointID fid : newset) { - if (!oldset.contains(fid)) { - ps.setInt(1, feedid); - ps.setString(2, fid.getId()); - ps.setString(3, fid.getPassword()); - ps.executeUpdate(); + try (PreparedStatement ps = conn.prepareStatement("insert into FEED_ENDPOINT_IDS values (?, ?, ?)")) { + // Insert new FEED_ENDPOINT_IDS rows + for (FeedEndpointID fid : newset) { + if (!oldset.contains(fid)) { + ps.setInt(1, feedid); + ps.setString(2, fid.getId()); + ps.setString(3, fid.getPassword()); + ps.executeUpdate(); + } } } - ps.close(); - // Delete old FEED_ENDPOINT_IDS rows - sql = "delete from FEED_ENDPOINT_IDS where FEEDID = ? AND USERID = ? AND PASSWORD = ?"; - ps = conn.prepareStatement(sql); - for (FeedEndpointID fid : oldset) { - if (!newset.contains(fid)) { - ps.setInt(1, feedid); - ps.setString(2, fid.getId()); - ps.setString(3, fid.getPassword()); - ps.executeUpdate(); + try (PreparedStatement ps = conn.prepareStatement( + "delete from FEED_ENDPOINT_IDS where FEEDID = ? AND USERID = ? AND PASSWORD = ?")) { + for (FeedEndpointID fid : oldset) { + if (!newset.contains(fid)) { + ps.setInt(1, feedid); + ps.setString(2, fid.getId()); + ps.setString(3, fid.getPassword()); + ps.executeUpdate(); + } } } - ps.close(); - - // Insert new FEED_ENDPOINT_ADDRS rows Set newset2 = getAuthorization().getEndpointAddrs(); Set oldset2 = oldobj.getAuthorization().getEndpointAddrs(); - sql = "insert into FEED_ENDPOINT_ADDRS values (?, ?)"; - ps = conn.prepareStatement(sql); - for (String t : newset2) { - if (!oldset2.contains(t)) { - ps.setInt(1, feedid); - ps.setString(2, t); - ps.executeUpdate(); + // Insert new FEED_ENDPOINT_ADDRS rows + try (PreparedStatement ps = conn.prepareStatement("insert into FEED_ENDPOINT_ADDRS values (?, ?)")) { + for (String t : newset2) { + if (!oldset2.contains(t)) { + ps.setInt(1, feedid); + ps.setString(2, t); + ps.executeUpdate(); + } } } - ps.close(); - // Delete old FEED_ENDPOINT_ADDRS rows - sql = "delete from FEED_ENDPOINT_ADDRS where FEEDID = ? AND ADDR = ?"; - ps = conn.prepareStatement(sql); - for (String t : oldset2) { - if (!newset2.contains(t)) { - ps.setInt(1, feedid); - ps.setString(2, t); - ps.executeUpdate(); + try (PreparedStatement ps = conn.prepareStatement( + "delete from FEED_ENDPOINT_ADDRS where FEEDID = ? AND ADDR = ?")) { + for (String t : oldset2) { + if (!newset2.contains(t)) { + ps.setInt(1, feedid); + ps.setString(2, t); + ps.executeUpdate(); + } } } - ps.close(); - - // Finally, update the FEEDS row - sql = "update FEEDS set DESCRIPTION = ?, AUTH_CLASS = ?, DELETED = ?, SUSPENDED = ?, " - + "BUSINESS_DESCRIPTION=?, GROUPID=? where FEEDID = ?"; - ps = conn.prepareStatement(sql); - ps.setString(1, getDescription()); - ps.setString(2, getAuthorization().getClassification()); - ps.setInt(3, deleted ? 1 : 0); - ps.setInt(4, suspended ? 1 : 0); - ps.setString(5, getBusinessDescription()); - ps.setInt(6, groupid); - ps.setInt(7, feedid); - ps.executeUpdate(); - ps.close(); + try (PreparedStatement ps = conn.prepareStatement( + "update FEEDS set DESCRIPTION = ?, AUTH_CLASS = ?, DELETED = ?, SUSPENDED = ?, " + + "BUSINESS_DESCRIPTION=?, GROUPID=? where FEEDID = ?")) { + // Finally, update the FEEDS row + ps.setString(1, getDescription()); + ps.setString(2, getAuthorization().getClassification()); + ps.setInt(3, deleted ? 1 : 0); + ps.setInt(4, suspended ? 1 : 0); + ps.setString(5, getBusinessDescription()); + ps.setInt(6, groupid); + ps.setInt(7, feedid); + ps.executeUpdate(); + } } catch (SQLException e) { rv = false; intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e); - } finally { - try { - if (ps != null) { - ps.close(); - } - } catch (SQLException e) { - intlogger.error(SQLEXCEPTION + e.getMessage(), e); - } } return rv; } @@ -801,29 +739,15 @@ public class Feed extends Syncable { */ public boolean changeOwnerShip() { boolean rv = true; - PreparedStatement ps = null; - try { - - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - String sql = "update FEEDS set PUBLISHER = ? where FEEDID = ?"; - ps = conn.prepareStatement(sql); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "update FEEDS set PUBLISHER = ? where FEEDID = ?")) { ps.setString(1, this.publisher); ps.setInt(2, feedid); ps.execute(); - ps.close(); } catch (SQLException e) { rv = false; intlogger.warn("PROV0008 changeOwnerShip: " + e.getMessage(), e); - } finally { - try { - if (ps != null) { - ps.close(); - } - } catch (SQLException e) { - intlogger.error(SQLEXCEPTION + e.getMessage(), e); - } } return rv; } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java index 3f685900..0b7e0655 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java @@ -30,14 +30,13 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Objects; import org.json.JSONObject; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * The representation of a Subscription. Subscriptions can be retrieved from the DB, or stored/updated in the DB. @@ -51,7 +50,6 @@ public class Group extends Syncable { private static final String GROUP_ID_CONST = "groupid"; private static EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog"); private static int nextGroupid = getMaxGroupID() + 1; - private static final String SQLEXCEPTION = "SQLException: "; private int groupid; private String authid; @@ -125,7 +123,7 @@ public class Group extends Syncable { } catch (InvalidObjectException e) { throw e; } catch (Exception e) { - intlogger.warn("Invalid JSON: " + e.getMessage(), e); + intlogger.error("Invalid JSON: " + e.getMessage(), e); throw new InvalidObjectException("Invalid JSON: " + e.getMessage()); } } @@ -137,8 +135,8 @@ public class Group extends Syncable { */ public static Group getGroupMatching(Group gup) { String sql = String.format( - "select * from GROUPS where NAME='%s'", - gup.getName() + "select * from GROUPS where NAME='%s'", + gup.getName() ); List list = getGroupsForSQL(sql); return !list.isEmpty() ? list.get(0) : null; @@ -152,7 +150,7 @@ public class Group extends Syncable { */ public static Group getGroupMatching(Group gup, int groupid) { String sql = String.format( - "select * from GROUPS where NAME = '%s' and GROUPID != %d ", gup.getName(), gup.getGroupid()); + "select * from GROUPS where NAME = '%s' and GROUPID != %d ", gup.getName(), groupid); List list = getGroupsForSQL(sql); return !list.isEmpty() ? list.get(0) : null; } @@ -185,19 +183,13 @@ public class Group extends Syncable { private static List getGroupsForSQL(String sql) { List list = new ArrayList<>(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery(sql)) { - while (rs.next()) { - Group group = new Group(rs); - list.add(group); - } - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement(sql); + ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + Group group = new Group(rs); + list.add(group); } - db.release(conn); } catch (SQLException e) { intlogger.error("PROV0009 getGroupsForSQL: " + e.getMessage(), e); } @@ -206,20 +198,14 @@ public class Group extends Syncable { private static int getMaxGroupID() { int max = 0; - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery("select MAX(groupid) from GROUPS")) { - if (rs.next()) { - max = rs.getInt(1); - } - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement("select MAX(groupid) from GROUPS"); + ResultSet rs = ps.executeQuery()) { + if (rs.next()) { + max = rs.getInt(1); } - db.release(conn); } catch (SQLException e) { - intlogger.info("PROV0001 getMaxSubID: " + e.getMessage(), e); + intlogger.error("PROV0001 getMaxSubID: " + e.getMessage(), e); } return max; } @@ -292,21 +278,18 @@ public class Group extends Syncable { @Override public boolean doInsert(Connection conn) { boolean rv = true; - PreparedStatement ps = null; - try { + try (PreparedStatement ps = conn.prepareStatement( + "insert into GROUPS(GROUPID, AUTHID, NAME, DESCRIPTION, CLASSIFICATION, MEMBERS) " + + "values (?, ?, ?, ?, ?, ?)", new String[]{"GROUPID"})) { if (groupid == -1) { // No feed ID assigned yet, so assign the next available one setGroupid(nextGroupid++); } - // In case we insert a gropup from synchronization + // In case we insert a group from synchronization if (groupid > nextGroupid) { nextGroupid = groupid + 1; } - // Create the GROUPS row - String sql = "insert into GROUPS (GROUPID, AUTHID, NAME, DESCRIPTION, CLASSIFICATION, MEMBERS) " - + "values (?, ?, ?, ?, ?, ?)"; - ps = conn.prepareStatement(sql, new String[]{"GROUPID"}); ps.setInt(1, groupid); ps.setString(2, authid); ps.setString(3, name); @@ -314,18 +297,9 @@ public class Group extends Syncable { ps.setString(5, classification); ps.setString(6, members); ps.execute(); - ps.close(); } catch (SQLException e) { rv = false; - intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e); - } finally { - try { - if (ps != null) { - ps.close(); - } - } catch (SQLException e) { - intlogger.error(SQLEXCEPTION + e.getMessage(), e); - } + intlogger.error("PROV0005 doInsert: " + e.getMessage(), e); } return rv; } @@ -333,11 +307,8 @@ public class Group extends Syncable { @Override public boolean doUpdate(Connection conn) { boolean rv = true; - PreparedStatement ps = null; - try { - String sql = "update GROUPS set AUTHID = ?, NAME = ?, DESCRIPTION = ?, CLASSIFICATION = ? , MEMBERS = ? " - + "where GROUPID = ?"; - ps = conn.prepareStatement(sql); + try (PreparedStatement ps = conn.prepareStatement( + "update GROUPS set AUTHID = ?, NAME = ?, DESCRIPTION = ?, CLASSIFICATION = ? , MEMBERS = ? where GROUPID = ?")) { ps.setString(1, authid); ps.setString(2, name); ps.setString(3, description); @@ -347,15 +318,7 @@ public class Group extends Syncable { ps.executeUpdate(); } catch (SQLException e) { rv = false; - intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e); - } finally { - try { - if (ps != null) { - ps.close(); - } - } catch (SQLException e) { - intlogger.error(SQLEXCEPTION + e.getMessage(), e); - } + intlogger.error("PROV0006 doUpdate: " + e.getMessage(), e); } return rv; } @@ -363,23 +326,12 @@ public class Group extends Syncable { @Override public boolean doDelete(Connection conn) { boolean rv = true; - PreparedStatement ps = null; - try { - String sql = "delete from GROUPS where GROUPID = ?"; - ps = conn.prepareStatement(sql); + try (PreparedStatement ps = conn.prepareStatement("delete from GROUPS where GROUPID = ?")) { ps.setInt(1, groupid); ps.execute(); } catch (SQLException e) { rv = false; - intlogger.warn("PROV0007 doDelete: " + e.getMessage(), e); - } finally { - try { - if (ps != null) { - ps.close(); - } - } catch (SQLException e) { - intlogger.error(SQLEXCEPTION + e.getMessage(), e); - } + intlogger.error("PROV0007 doDelete: " + e.getMessage(), e); } return rv; } @@ -398,23 +350,19 @@ public class Group extends Syncable { if (groupid != os.groupid) { return false; } - if (authid != os.authid) { + if (!authid.equals(os.authid)) { return false; } if (!name.equals(os.name)) { return false; } - if (description != os.description) { + if (!description.equals(os.description)) { return false; } if (!classification.equals(os.classification)) { return false; } - if (!members.equals(os.members)) { - return false; - } - - return true; + return members.equals(os.members); } @Override diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRoute.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRoute.java index 555c5b69..d520a417 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRoute.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRoute.java @@ -32,7 +32,6 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; import java.util.Collection; import java.util.Set; import java.util.SortedSet; @@ -41,7 +40,7 @@ import javax.servlet.http.HttpServletRequest; import org.apache.commons.codec.binary.Base64; import org.json.JSONArray; import org.json.JSONObject; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * The representation of one route in the Ingress Route Table. @@ -137,21 +136,15 @@ public class IngressRoute extends NodeClass implements Comparable */ public static Set getIngressRoutesForSeq(int seq) { return getAllIngressRoutesForSQL( - "select SEQUENCE, FEEDID, USERID, SUBNET, NODESET from INGRESS_ROUTES where SEQUENCE = " + seq); + "select SEQUENCE, FEEDID, USERID, SUBNET, NODESET from INGRESS_ROUTES where SEQUENCE = " + seq); } private static SortedSet getAllIngressRoutesForSQL(String sql) { SortedSet set = new TreeSet<>(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery(sql)) { - addIngressRouteToSet(set, rs); - } - } - db.release(conn); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement(sql); + ResultSet rs = ps.executeQuery()) { + addIngressRouteToSet(set, rs); } catch (SQLException e) { intlogger.error("PROV0001 getAllIngressRoutesForSQL: " + e.getMessage(), e); } @@ -189,15 +182,12 @@ public class IngressRoute extends NodeClass implements Comparable private static int getMax(String sql) { int rv = 0; - DB db = new DB(); - try (Connection conn = db.getConnection(); - Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery(sql)) { - if (rs.next()) { - rv = rs.getInt("MAX"); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement(sql); + ResultSet rs = ps.executeQuery(sql)) { + if (rs.next()) { + rv = rs.getInt("MAX"); } - db.release(conn); } catch (SQLException e) { intlogger.error("PROV0002 getMax: " + e.getMessage(), e); } @@ -214,10 +204,9 @@ public class IngressRoute extends NodeClass implements Comparable */ public static IngressRoute getIngressRoute(int feedid, String user, String subnet) { IngressRoute ir = null; - DB db = new DB(); - String sql = "select SEQUENCE, NODESET from INGRESS_ROUTES where FEEDID = ? AND USERID = ? and SUBNET = ?"; - try (Connection conn = db.getConnection(); - PreparedStatement ps = conn.prepareStatement(sql)) { + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "select SEQUENCE, NODESET from INGRESS_ROUTES where FEEDID = ? AND USERID = ? and SUBNET = ?")) { ps.setInt(1, feedid); ps.setString(2, user); ps.setString(3, subnet); @@ -228,7 +217,6 @@ public class IngressRoute extends NodeClass implements Comparable ir = new IngressRoute(seq, feedid, user, subnet, nodeset); } } - db.release(conn); } catch (SQLException e) { intlogger.error("PROV0003 getIngressRoute: " + e.getMessage(), e); } @@ -283,7 +271,7 @@ public class IngressRoute extends NodeClass implements Comparable * Compare IP addresses as byte arrays to a subnet specified as a CIDR. Taken from * org.onap.dmaap.datarouter.node.SubnetMatcher and modified somewhat. */ - public class SubnetMatcher { + public static class SubnetMatcher { private byte[] sn; private int len; @@ -295,7 +283,7 @@ public class IngressRoute extends NodeClass implements Comparable * * @param subnet The CIDR to match */ - public SubnetMatcher(String subnet) { + SubnetMatcher(String subnet) { int index = subnet.lastIndexOf('/'); if (index == -1) { try { @@ -359,14 +347,10 @@ public class IngressRoute extends NodeClass implements Comparable private Collection readNodes() { Collection set = new TreeSet<>(); - DB db = new DB(); - String sql = "select NODEID from NODESETS where SETID = ?"; - try (Connection conn = db.getConnection()) { - try (PreparedStatement ps = conn.prepareStatement(sql)) { - ps.setInt(1, nodelist); - addNodeToSet(set, ps); - } - db.release(conn); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement("select NODEID from NODESETS where SETID = ?")) { + ps.setInt(1, nodelist); + addNodeToSet(set, ps); } catch (SQLException e) { intlogger.error(SQLEXCEPTION + e.getMessage(), e); } @@ -391,8 +375,8 @@ public class IngressRoute extends NodeClass implements Comparable public boolean doDelete(Connection conn) { boolean rv = true; try (PreparedStatement ps = conn.prepareStatement( - "delete from INGRESS_ROUTES where FEEDID = ? and USERID = ? and SUBNET = ?"); - PreparedStatement ps2 = conn.prepareStatement("delete from NODESETS where SETID = ?")) { + "delete from INGRESS_ROUTES where FEEDID = ? and USERID = ? and SUBNET = ?"); + PreparedStatement ps2 = conn.prepareStatement("delete from NODESETS where SETID = ?")) { // Delete the Ingress Route ps.setInt(1, feedid); ps.setString(2, userid); @@ -403,7 +387,7 @@ public class IngressRoute extends NodeClass implements Comparable ps2.execute(); } catch (SQLException e) { rv = false; - intlogger.warn("PROV0007 doDelete: " + e.getMessage(), e); + intlogger.error("PROV0007 doDelete: " + e.getMessage(), e); } return rv; } @@ -412,8 +396,8 @@ public class IngressRoute extends NodeClass implements Comparable public boolean doInsert(Connection conn) { boolean rv = false; try (PreparedStatement ps = conn.prepareStatement("insert into NODESETS (SETID, NODEID) values (?,?)"); - PreparedStatement ps2 = conn.prepareStatement("insert into INGRESS_ROUTES (SEQUENCE, FEEDID, USERID," - + " SUBNET, NODESET) values (?, ?, ?, ?, ?)")) { + PreparedStatement ps2 = conn.prepareStatement("insert into INGRESS_ROUTES (SEQUENCE, FEEDID, USERID," + + " SUBNET, NODESET) values (?, ?, ?, ?, ?)")) { // Create the NODESETS rows & set nodelist this.nodelist = getMaxNodeSetID() + 1; for (String node : nodes) { @@ -431,7 +415,7 @@ public class IngressRoute extends NodeClass implements Comparable ps2.execute(); rv = true; } catch (SQLException e) { - intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e); + intlogger.error("PROV0005 doInsert: " + e.getMessage(), e); } return rv; } @@ -460,7 +444,7 @@ public class IngressRoute extends NodeClass implements Comparable @Override public String getKey() { return String - .format("%d/%s/%s/%d", feedid, (userid == null) ? "" : userid, (subnet == null) ? "" : subnet, seq); + .format("%d/%s/%s/%d", feedid, (userid == null) ? "" : userid, (subnet == null) ? "" : subnet, seq); } @Override @@ -503,6 +487,6 @@ public class IngressRoute extends NodeClass implements Comparable @Override public String toString() { return String.format("INGRESS: feed=%d, userid=%s, subnet=%s, seq=%d", feedid, (userid == null) ? "" : userid, - (subnet == null) ? "" : subnet, seq); + (subnet == null) ? "" : subnet, seq); } } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecord.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecord.java index b26e551b..0d027033 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecord.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecord.java @@ -35,8 +35,7 @@ import java.sql.SQLException; import java.sql.Types; import java.text.ParseException; import java.util.Iterator; - -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; import org.onap.dmaap.datarouter.provisioning.utils.RLEBitSet; @@ -49,13 +48,6 @@ import org.onap.dmaap.datarouter.provisioning.utils.RLEBitSet; */ public class LogRecord extends BaseLogRecord { - /** - * Print all log records whose RECORD_IDs are in the bit set provided. - * - * @param os the {@link OutputStream} to print the records on - * @param bs the {@link RLEBitSet} listing the record IDs to print - * @throws IOException in case of I/O error - */ private static EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog"); private final String type; private final String feedFileID; @@ -76,7 +68,7 @@ public class LogRecord extends BaseLogRecord { * @param rs ResultSet from SQL statement * @throws SQLException in case of SQL error */ - public LogRecord(ResultSet rs) throws SQLException { + private LogRecord(ResultSet rs) throws SQLException { super(rs); this.type = rs.getString("TYPE"); this.feedFileID = rs.getString("FEED_FILEID"); @@ -122,28 +114,27 @@ public class LogRecord extends BaseLogRecord { } /** - * Get Log Records. - * @param os outputstream - * @param bs RLEBitSet object + * Print all log records whose RECORD_IDs are in the bit set provided. + * + * @param os the {@link OutputStream} to print the records on + * @param bs the {@link RLEBitSet} listing the record IDs to print * @throws IOException in case of I/O error */ public static void printLogRecords(OutputStream os, RLEBitSet bs) throws IOException { - final String sql = "select * from LOG_RECORDS where RECORD_ID >= ? AND RECORD_ID <= ?"; - DB db = new DB(); - try (Connection conn = db.getConnection()) { - Iterator iter = bs.getRangeIterator(); - try (PreparedStatement ps = conn.prepareStatement(sql)) { - while (iter.hasNext()) { - Long[] nxt = iter.next(); - ps.setLong(1, nxt[0]); - ps.setLong(2, nxt[1]); - try (ResultSet rs = ps.executeQuery()) { - while (rs.next()) { - LogRecord lr = new LogRecord(rs); - os.write(lr.toString().getBytes()); - } - ps.clearParameters(); + Iterator iter = bs.getRangeIterator(); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "select * from LOG_RECORDS where RECORD_ID >= ? AND RECORD_ID <= ?")) { + while (iter.hasNext()) { + Long[] nxt = iter.next(); + ps.setLong(1, nxt[0]); + ps.setLong(2, nxt[1]); + try (ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + LogRecord lr = new LogRecord(rs); + os.write(lr.toString().getBytes()); } + ps.clearParameters(); } } } catch (SQLException e) { @@ -158,27 +149,27 @@ public class LogRecord extends BaseLogRecord { @Override public String toString() { return - sdf.format(getEventTime()) + "|" - + "LOG|" - + getPublishId() + "|" - + getFeedid() + "|" - + getRequestUri() + "|" - + getMethod() + "|" - + getContentType() + "|" - + getContentLength() + "|" - + type + "|" - + feedFileID + "|" - + remoteAddr + "|" - + user + "|" - + status + "|" - + subID + "|" - + fileID + "|" - + result + "|" - + attempts + "|" - + reason + "|" - + recordId + "|" - + clength2 - + "\n"; + sdf.format(getEventTime()) + "|" + + "LOG|" + + getPublishId() + "|" + + getFeedid() + "|" + + getRequestUri() + "|" + + getMethod() + "|" + + getContentType() + "|" + + getContentLength() + "|" + + type + "|" + + feedFileID + "|" + + remoteAddr + "|" + + user + "|" + + status + "|" + + subID + "|" + + fileID + "|" + + result + "|" + + attempts + "|" + + reason + "|" + + recordId + "|" + + clength2 + + "\n"; } @Override diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java index dd9a624e..2ada1ff9 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java @@ -30,12 +30,11 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; import java.util.Objects; import java.util.SortedSet; import java.util.TreeSet; import org.json.JSONObject; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * The representation of one route in the Network Route Table. @@ -104,17 +103,10 @@ public class NetworkRoute extends NodeClass implements Comparable */ public static SortedSet getAllNetworkRoutes() { SortedSet set = new TreeSet<>(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery("select FROMNODE, TONODE, VIANODE from NETWORK_ROUTES")) { - addNetworkRouteToSet(set, rs); - } - } finally { - db.release(conn); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement("select FROMNODE, TONODE, VIANODE from NETWORK_ROUTES"); + ResultSet rs = ps.executeQuery()) { + addNetworkRouteToSet(set, rs); } catch (SQLException e) { intlogger.error(SQLEXCEPTION + e.getMessage(), e); } @@ -138,15 +130,15 @@ public class NetworkRoute extends NodeClass implements Comparable return tonode; } - public int getVianode() { + int getVianode() { return vianode; } @Override public boolean doDelete(Connection conn) { boolean rv = true; - String sql = "delete from NETWORK_ROUTES where FROMNODE = ? AND TONODE = ?"; - try (PreparedStatement ps = conn.prepareStatement(sql)) { + try (PreparedStatement ps = conn.prepareStatement( + "delete from NETWORK_ROUTES where FROMNODE = ? AND TONODE = ?")) { ps.setInt(1, fromnode); ps.setInt(2, tonode); ps.execute(); @@ -160,9 +152,9 @@ public class NetworkRoute extends NodeClass implements Comparable @Override public boolean doInsert(Connection conn) { boolean rv = false; - String sql = "insert into NETWORK_ROUTES (FROMNODE, TONODE, VIANODE) values (?, ?, ?)"; if (this.vianode >= 0) { - try (PreparedStatement ps = conn.prepareStatement(sql)) { + try (PreparedStatement ps = conn.prepareStatement( + "insert into NETWORK_ROUTES (FROMNODE, TONODE, VIANODE) values (?, ?, ?)")) { // Create the NETWORK_ROUTES row ps.setInt(1, this.fromnode); ps.setInt(2, this.tonode); @@ -179,8 +171,8 @@ public class NetworkRoute extends NodeClass implements Comparable @Override public boolean doUpdate(Connection conn) { boolean rv = true; - String sql = "update NETWORK_ROUTES set VIANODE = ? where FROMNODE = ? and TONODE = ?"; - try (PreparedStatement ps = conn.prepareStatement(sql)) { + try (PreparedStatement ps = conn.prepareStatement( + "update NETWORK_ROUTES set VIANODE = ? where FROMNODE = ? and TONODE = ?")) { ps.setInt(1, vianode); ps.setInt(2, fromnode); ps.setInt(3, tonode); diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NodeClass.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NodeClass.java index ef491cab..19cbf55c 100755 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NodeClass.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NodeClass.java @@ -3,7 +3,7 @@ * * org.onap.dmaap * * =========================================================================== * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== + * * =========================================================================== * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at @@ -34,7 +34,7 @@ import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.TreeSet; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * This class is used to aid in the mapping of node names from/to node IDs. @@ -46,12 +46,12 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB; public abstract class NodeClass extends Syncable { private static final String PROV_0005_DO_INSERT = "PROV0005 doInsert: "; - private static Map map; + private static Map nodesMap; private static EELFLogger intLogger = EELFManager.getInstance().getLogger("InternalLog"); NodeClass() { // init on first use - if (map == null) { + if (nodesMap == null) { reload(); } } @@ -63,11 +63,11 @@ public abstract class NodeClass extends Syncable { * @param nodes a pipe separated list of the current nodes */ public static void setNodes(String[] nodes) { - if (map == null) { + if (nodesMap == null) { reload(); } int nextid = 0; - for (Integer n : map.values()) { + for (Integer n : nodesMap.values()) { if (n >= nextid) { nextid = n + 1; } @@ -76,9 +76,9 @@ public abstract class NodeClass extends Syncable { for (String node : nodes) { node = normalizeNodename(node); - if (!map.containsKey(node)) { + if (!nodesMap.containsKey(node)) { intLogger.info("..adding " + node + " to NODES with index " + nextid); - map.put(node, nextid); + nodesMap.put(node, nextid); insertNodesToTable(nextid, node); nextid++; } @@ -86,44 +86,35 @@ public abstract class NodeClass extends Syncable { } private static void insertNodesToTable(int nextid, String node) { - DB db = new DB(); - try (Connection conn = db.getConnection()) { - try (PreparedStatement ps = conn - .prepareStatement("insert into NODES (NODEID, NAME, ACTIVE) values (?, ?, 1)")) { - ps.setInt(1, nextid); - ps.setString(2, node); - ps.execute(); - } finally { - db.release(conn); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "insert into NODES (NODEID, NAME, ACTIVE) values (?, ?, 1)")) { + ps.setInt(1, nextid); + ps.setString(2, node); + ps.execute(); } catch (SQLException e) { intLogger.error(PROV_0005_DO_INSERT + e.getMessage(), e); } } private static void reload() { - Map hmap = new HashMap<>(); - String sql = "select NODEID, NAME from NODES"; - DB db = new DB(); - try (Connection conn = db.getConnection(); - PreparedStatement ps = conn.prepareStatement(sql)) { - try (ResultSet rs = ps.executeQuery()) { - while (rs.next()) { - int id = rs.getInt("NODEID"); - String name = rs.getString("NAME"); - hmap.put(name, id); - } - } finally { - db.release(conn); + Map tmpNodesMap = new HashMap<>(); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement("select NODEID, NAME from NODES"); + ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + int id = rs.getInt("NODEID"); + String name = rs.getString("NAME"); + tmpNodesMap.put(name, id); } } catch (SQLException e) { intLogger.error(PROV_0005_DO_INSERT + e.getMessage(),e); } - map = hmap; + nodesMap = tmpNodesMap; } static Integer lookupNodeName(final String name) { - Integer nodeName = map.get(name); + Integer nodeName = nodesMap.get(name); if (nodeName == null) { throw new IllegalArgumentException("Invalid node name: " + name); } @@ -137,7 +128,7 @@ public abstract class NodeClass extends Syncable { */ public static Collection lookupNodeNames(String patt) { Collection coll = new TreeSet<>(); - final Set keyset = map.keySet(); + final Set keyset = nodesMap.keySet(); for (String s : patt.toLowerCase().split(",")) { if (s.endsWith("*")) { addNodeToCollection(coll, keyset, s); @@ -181,7 +172,7 @@ public abstract class NodeClass extends Syncable { } String lookupNodeID(int node) { - for (Map.Entry entry : map.entrySet()) { + for (Map.Entry entry : nodesMap.entrySet()) { if (entry.getValue() == node) { return entry.getKey(); } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java index e89cfd47..14a0a9dc 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java @@ -29,14 +29,13 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Objects; import org.json.JSONObject; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * Methods to provide access to Provisioning parameters in the DB. This class also provides constants of the standard @@ -54,7 +53,7 @@ public class Parameters extends Syncable { public static final String PROV_AUTH_SUBJECTS = "PROV_AUTH_SUBJECTS"; public static final String PROV_NAME = "PROV_NAME"; public static final String PROV_ACTIVE_NAME = "PROV_ACTIVE_NAME"; - public static final String PROV_DOMAIN = "PROV_DOMAIN"; + static final String PROV_DOMAIN = "PROV_DOMAIN"; public static final String PROV_MAXFEED_COUNT = "PROV_MAXFEED_COUNT"; public static final String PROV_MAXSUB_COUNT = "PROV_MAXSUB_COUNT"; public static final String PROV_POKETIMER1 = "PROV_POKETIMER1"; @@ -108,17 +107,13 @@ public class Parameters extends Syncable { */ public static Collection getParameterCollection() { Collection coll = new ArrayList<>(); - DB db = new DB(); - String sql = "select * from PARAMETERS"; - try (Connection conn = db.getConnection(); - Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery(sql)) { - while (rs.next()) { - Parameters param = new Parameters(rs); - coll.add(param); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement("select * from PARAMETERS")) { + ResultSet rs = ps.executeQuery(); + while (rs.next()) { + Parameters param = new Parameters(rs); + coll.add(param); } - db.release(conn); } catch (SQLException e) { intlogger.error(SQLEXCEPTION + e.getMessage(), e); } @@ -133,17 +128,14 @@ public class Parameters extends Syncable { */ public static Parameters getParameter(String key) { Parameters val = null; - DB db = new DB(); - String sql = "select KEYNAME, VALUE from PARAMETERS where KEYNAME = ?"; - try (Connection conn = db.getConnection(); - PreparedStatement stmt = conn.prepareStatement(sql)) { + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement stmt = conn.prepareStatement( + "select KEYNAME, VALUE from PARAMETERS where KEYNAME = ?")) { stmt.setString(1, key); - try (ResultSet rs = stmt.executeQuery()) { - if (rs.next()) { - val = new Parameters(rs); - } + ResultSet rs = stmt.executeQuery(); + if (rs.next()) { + val = new Parameters(rs); } - db.release(conn); } catch (SQLException e) { intlogger.error(SQLEXCEPTION + e.getMessage(), e); } @@ -173,8 +165,7 @@ public class Parameters extends Syncable { @Override public boolean doInsert(Connection conn) { boolean rv = true; - String sql = "insert into PARAMETERS values (?, ?)"; - try (PreparedStatement ps = conn.prepareStatement(sql)) { + try (PreparedStatement ps = conn.prepareStatement("insert into PARAMETERS values (?, ?)")) { ps.setString(1, getKeyname()); ps.setString(2, getValue()); ps.execute(); @@ -188,8 +179,7 @@ public class Parameters extends Syncable { @Override public boolean doUpdate(Connection conn) { boolean rv = true; - String sql = "update PARAMETERS set VALUE = ? where KEYNAME = ?"; - try (PreparedStatement ps = conn.prepareStatement(sql)) { + try (PreparedStatement ps = conn.prepareStatement("update PARAMETERS set VALUE = ? where KEYNAME = ?")) { ps.setString(1, getValue()); ps.setString(2, getKeyname()); ps.executeUpdate(); @@ -203,8 +193,7 @@ public class Parameters extends Syncable { @Override public boolean doDelete(Connection conn) { boolean rv = true; - String sql = "delete from PARAMETERS where KEYNAME = ?"; - try (PreparedStatement ps = conn.prepareStatement(sql)) { + try (PreparedStatement ps = conn.prepareStatement("delete from PARAMETERS where KEYNAME = ?")) { ps.setString(1, getKeyname()); ps.execute(); } catch (SQLException e) { diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Subscription.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Subscription.java index 749980e5..5741881c 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Subscription.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Subscription.java @@ -30,14 +30,14 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Properties; import org.json.JSONObject; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.ProvRunner; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; import org.onap.dmaap.datarouter.provisioning.utils.URLUtilities; @@ -116,7 +116,7 @@ public class Subscription extends Syncable { this.followRedirect = rs.getBoolean("FOLLOW_REDIRECTS"); this.subscriber = rs.getString("SUBSCRIBER"); this.links = new SubLinks(rs.getString("SELF_LINK"), URLUtilities.generateFeedURL(feedid), - rs.getString("LOG_LINK")); + rs.getString("LOG_LINK")); this.suspended = rs.getBoolean("SUSPENDED"); this.lastMod = rs.getDate("LAST_MOD"); this.createdDate = rs.getDate("CREATED_DATE"); @@ -148,7 +148,7 @@ public class Subscription extends Syncable { final boolean use100 = jdeli.getBoolean("use100"); //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047. - Properties prop = (new DB()).getProperties(); + Properties prop = ProvRunner.getProvProperties(); if (!url.startsWith("https://") && isHttpsRelaxationFalseAndHasSyncKey(jo, prop)) { throw new InvalidObjectException("delivery URL is not HTTPS"); } @@ -187,16 +187,16 @@ public class Subscription extends Syncable { public static Subscription getSubscriptionMatching(Subscription sub) { SubDelivery deli = sub.getDelivery(); String sql = String.format( - "select * from SUBSCRIPTIONS where FEEDID = %d and DELIVERY_URL = \"%s\" and DELIVERY_USER = \"%s\" " - + "and DELIVERY_PASSWORD = \"%s\" and DELIVERY_USE100 = %d and METADATA_ONLY = %d " - + "and FOLLOW_REDIRECTS = %d", - sub.getFeedid(), - deli.getUrl(), - deli.getUser(), - deli.getPassword(), - deli.isUse100() ? 1 : 0, - sub.isMetadataOnly() ? 1 : 0, - sub.isFollowRedirect() ? 1 : 0 + "select * from SUBSCRIPTIONS where FEEDID = %d and DELIVERY_URL = \"%s\" and DELIVERY_USER = \"%s\" " + + "and DELIVERY_PASSWORD = \"%s\" and DELIVERY_USE100 = %d and METADATA_ONLY = %d " + + "and FOLLOW_REDIRECTS = %d", + sub.getFeedid(), + deli.getUrl(), + deli.getUser(), + deli.getPassword(), + deli.isUse100() ? 1 : 0, + sub.isMetadataOnly() ? 1 : 0, + sub.isFollowRedirect() ? 1 : 0 ); List list = getSubscriptionsForSQL(sql); return !list.isEmpty() ? list.get(0) : null; @@ -224,19 +224,13 @@ public class Subscription extends Syncable { */ private static List getSubscriptionsForSQL(String sql) { List list = new ArrayList<>(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery(sql)) { - while (rs.next()) { - Subscription sub = new Subscription(rs); - list.add(sub); - } - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement(sql); + ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + Subscription sub = new Subscription(rs); + list.add(sub); } - db.release(conn); } catch (SQLException e) { intlogger.error("PROV0001 getSubscriptionsForSQL: " + e.toString(), e); } @@ -249,18 +243,12 @@ public class Subscription extends Syncable { */ public static int getMaxSubID() { int max = 0; - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery("select MAX(subid) from SUBSCRIPTIONS")) { - if (rs.next()) { - max = rs.getInt(1); - } - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement("select MAX(subid) from SUBSCRIPTIONS"); + ResultSet rs = ps.executeQuery()) { + if (rs.next()) { + max = rs.getInt(1); } - db.release(conn); } catch (SQLException e) { intlogger.info("getMaxSubID: " + e.getMessage(), e); } @@ -274,22 +262,15 @@ public class Subscription extends Syncable { */ public static Collection getSubscriptionUrlList(int feedid) { List list = new ArrayList<>(); - String sql = "select SUBID from SUBSCRIPTIONS where FEEDID = ?"; - - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (PreparedStatement stmt = conn.prepareStatement(sql)) { - stmt.setString(1, String.valueOf(feedid)); - try (ResultSet rs = stmt.executeQuery()) { - while (rs.next()) { - int subid = rs.getInt(SUBID_COL); - list.add(URLUtilities.generateSubscriptionURL(subid)); - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement stmt = conn.prepareStatement("select SUBID from SUBSCRIPTIONS where FEEDID = ?")) { + stmt.setString(1, String.valueOf(feedid)); + try (ResultSet rs = stmt.executeQuery()) { + while (rs.next()) { + int subid = rs.getInt(SUBID_COL); + list.add(URLUtilities.generateSubscriptionURL(subid)); } } - db.release(conn); } catch (SQLException e) { intlogger.error(SQLEXCEPTION + e.getMessage(), e); } @@ -303,18 +284,12 @@ public class Subscription extends Syncable { */ public static int countActiveSubscriptions() { int count = 0; - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery("select count(*) from SUBSCRIPTIONS")) { - if (rs.next()) { - count = rs.getInt(1); - } - } + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement("select count(*) from SUBSCRIPTIONS"); + ResultSet rs = ps.executeQuery()) { + if (rs.next()) { + count = rs.getInt(1); } - db.release(conn); } catch (SQLException e) { intlogger.warn("PROV0008 countActiveSubscriptions: " + e.getMessage(), e); } @@ -323,7 +298,7 @@ public class Subscription extends Syncable { private boolean isHttpsRelaxationFalseAndHasSyncKey(JSONObject jo, Properties prop) { return prop.get("org.onap.dmaap.datarouter.provserver.https.relaxation").toString().equals("false") && !jo - .has("sync"); + .has("sync"); } public int getSubid() { @@ -514,9 +489,9 @@ public class Subscription extends Syncable { // Create the SUBSCRIPTIONS row String sql = "insert into SUBSCRIPTIONS (SUBID, FEEDID, DELIVERY_URL, DELIVERY_USER, DELIVERY_PASSWORD, " - + "DELIVERY_USE100, METADATA_ONLY, SUBSCRIBER, SUSPENDED, GROUPID, " - + "PRIVILEGED_SUBSCRIBER, FOLLOW_REDIRECTS, DECOMPRESS, AAF_INSTANCE) " - + "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; + + "DELIVERY_USE100, METADATA_ONLY, SUBSCRIBER, SUSPENDED, GROUPID, " + + "PRIVILEGED_SUBSCRIBER, FOLLOW_REDIRECTS, DECOMPRESS, AAF_INSTANCE) " + + "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; ps = conn.prepareStatement(sql, new String[]{SUBID_COL}); ps.setInt(1, subid); ps.setInt(2, feedid); @@ -560,13 +535,10 @@ public class Subscription extends Syncable { @Override public boolean doUpdate(Connection conn) { boolean rv = true; - PreparedStatement ps = null; - try { - String sql = "update SUBSCRIPTIONS set DELIVERY_URL = ?, DELIVERY_USER = ?, DELIVERY_PASSWORD = ?, " - + "DELIVERY_USE100 = ?, METADATA_ONLY = ?, " + "SUSPENDED = ?, GROUPID = ?, " - + "PRIVILEGED_SUBSCRIBER = ?, " - + "FOLLOW_REDIRECTS = ?, DECOMPRESS = ? where SUBID = ?"; - ps = conn.prepareStatement(sql); + try (PreparedStatement ps = conn.prepareStatement( + "update SUBSCRIPTIONS set DELIVERY_URL = ?, DELIVERY_USER = ?, DELIVERY_PASSWORD = ?, " + + "DELIVERY_USE100 = ?, METADATA_ONLY = ?, SUSPENDED = ?, GROUPID = ?, PRIVILEGED_SUBSCRIBER = ?, " + + "FOLLOW_REDIRECTS = ?, DECOMPRESS = ? where SUBID = ?")) { ps.setString(1, delivery.getUrl()); ps.setString(2, delivery.getUser()); ps.setString(3, delivery.getPassword()); @@ -582,14 +554,6 @@ public class Subscription extends Syncable { } catch (SQLException e) { rv = false; intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e); - } finally { - try { - if (ps != null) { - ps.close(); - } - } catch (SQLException e) { - intlogger.error(SQLEXCEPTION + e.getMessage(), e); - } } return rv; } @@ -600,29 +564,15 @@ public class Subscription extends Syncable { */ public boolean changeOwnerShip() { boolean rv = true; - PreparedStatement ps = null; - try { - - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - String sql = "update SUBSCRIPTIONS set SUBSCRIBER = ? where SUBID = ?"; - ps = conn.prepareStatement(sql); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "update SUBSCRIPTIONS set SUBSCRIBER = ? where SUBID = ?")) { ps.setString(1, this.subscriber); ps.setInt(2, subid); ps.execute(); - ps.close(); } catch (SQLException e) { rv = false; intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e); - } finally { - try { - if (ps != null) { - ps.close(); - } - } catch (SQLException e) { - intlogger.error(SQLEXCEPTION + e.getMessage(), e); - } } return rv; } @@ -631,23 +581,12 @@ public class Subscription extends Syncable { @Override public boolean doDelete(Connection conn) { boolean rv = true; - PreparedStatement ps = null; - try { - String sql = "delete from SUBSCRIPTIONS where SUBID = ?"; - ps = conn.prepareStatement(sql); + try (PreparedStatement ps = conn.prepareStatement("delete from SUBSCRIPTIONS where SUBID = ?")) { ps.setInt(1, subid); ps.execute(); } catch (SQLException e) { rv = false; intlogger.warn("PROV0007 doDelete: " + e.getMessage(), e); - } finally { - try { - if (ps != null) { - ps.close(); - } - } catch (SQLException e) { - intlogger.error(SQLEXCEPTION + e.getMessage(), e); - } } return rv; } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java deleted file mode 100644 index a83f81a5..00000000 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java +++ /dev/null @@ -1,272 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - -package org.onap.dmaap.datarouter.provisioning.utils; - -import static java.lang.System.exit; -import static java.lang.System.getProperty; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileReader; -import java.io.IOException; -import java.io.LineNumberReader; -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.NoSuchElementException; -import java.util.Properties; -import java.util.Queue; -import java.util.Set; - -/** - * Load the DB JDBC driver, and manage a simple pool of connections to the DB. - * - * @author Robert Eby - * @version $Id$ - */ -public class DB { - - private static EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog"); - - private static String dbUrl; - private static String dbLogin; - private static String dbPassword; - private static Properties props; - private static final Queue queue = new LinkedList<>(); - - private static String httpsPort; - private static String httpPort; - - /** - * Construct a DB object. If this is the very first creation of this object, it will load a copy of the properties - * for the server, and attempt to load the JDBC driver for the database. If a fatal error occurs (e.g. either the - * properties file or the DB driver is missing), the JVM will exit. - */ - public DB() { - if (props == null) { - props = new Properties(); - try { - props.load(new FileInputStream(getProperty( - "org.onap.dmaap.datarouter.provserver.properties", - "/opt/app/datartr/etc/provserver.properties"))); - dbUrl = (String) props.get("org.onap.dmaap.datarouter.db.url"); - dbLogin = (String) props.get("org.onap.dmaap.datarouter.db.login"); - dbPassword = (String) props.get("org.onap.dmaap.datarouter.db.password"); - httpsPort = (String) props.get("org.onap.dmaap.datarouter.provserver.https.port"); - httpPort = (String) props.get("org.onap.dmaap.datarouter.provserver.http.port"); - String dbDriver = (String) props.get("org.onap.dmaap.datarouter.db.driver"); - Class.forName(dbDriver); - } catch (IOException e) { - intlogger.error("PROV9003 Opening properties: " + e.getMessage(), e); - exit(1); - } catch (ClassNotFoundException e) { - intlogger.error("PROV9004 cannot find the DB driver: " + e); - exit(1); - } - } - } - - /** - * Get the provisioning server properties (loaded from provserver.properties). - * - * @return the Properties object - */ - public Properties getProperties() { - return props; - } - - /** - * Get a JDBC connection to the DB from the pool. Creates a new one if none are available. - * - * @return the Connection - */ - public Connection getConnection() throws SQLException { - Connection connection = null; - while (connection == null) { - synchronized (queue) { - try { - connection = queue.remove(); - } catch (NoSuchElementException ignore) { - int num = 0; - do { - // Try up to 3 times to get a connection - try { - connection = DriverManager.getConnection(dbUrl, dbLogin, dbPassword); - } catch (SQLException sqlEx) { - if (++num >= 3) { - throw sqlEx; - } - } - } - while (connection == null); - } - } - if (connection != null && !connection.isValid(1)) { - connection.close(); - connection = null; - } - } - return connection; - } - - /** - * Returns a JDBC connection to the pool. - * - * @param connection the Connection to return - */ - public void release(Connection connection) { - if (connection != null) { - synchronized (queue) { - if (!queue.contains(connection)) { - queue.add(connection); - } - } - } - } - - /** - * Run all necessary retrofits required to bring the database up to the level required for this version of the - * provisioning server. This should be run before the server itself is started. - * - * @return true if all retrofits worked, false otherwise - */ - public boolean runRetroFits() { - return retroFit1(); - } - - - public static String getHttpsPort() { - return httpsPort; - } - - public static String getHttpPort() { - return httpPort; - } - - /** - * Retrofit 1 - Make sure the expected tables are in DB and are initialized. Uses sql_init_01.sql to setup the DB. - * - * @return true if the retrofit worked, false otherwise - */ - private boolean retroFit1() { - final String[] expectedTables = { - "FEEDS", "FEED_ENDPOINT_ADDRS", "FEED_ENDPOINT_IDS", "PARAMETERS", - "SUBSCRIPTIONS", "LOG_RECORDS", "INGRESS_ROUTES", "EGRESS_ROUTES", - "NETWORK_ROUTES", "NODESETS", "NODES", "GROUPS" - }; - Connection connection = null; - try { - connection = getConnection(); - Set actualTables = getTableSet(connection); - boolean initialize = false; - for (String tableName : expectedTables) { - initialize |= !actualTables.contains(tableName); - } - if (initialize) { - intlogger.info("PROV9001: First time startup; The database is being initialized."); - runInitScript(connection, 1); - } - } catch (SQLException e) { - intlogger.error("PROV9000: The database credentials are not working: " + e.getMessage(), e); - return false; - } finally { - if (connection != null) { - release(connection); - } - } - return true; - } - - /** - * Get a set of all table names in the DB. - * - * @param connection a DB connection - * @return the set of table names - */ - private Set getTableSet(Connection connection) { - Set tables = new HashSet<>(); - try { - DatabaseMetaData md = connection.getMetaData(); - ResultSet rs = md.getTables(null, null, "%", null); - if (rs != null) { - while (rs.next()) { - tables.add(rs.getString("TABLE_NAME").toUpperCase()); - } - rs.close(); - } - } catch (SQLException e) { - intlogger.error("PROV9010: Failed to get TABLE data from DB: " + e.getMessage(), e); - } - return tables; - } - - /** - * Initialize the tables by running the initialization scripts located in the directory specified by the property - * org.onap.dmaap.datarouter.provserver.dbscripts. Scripts have names of the form - * sql_init_NN.sql - * - * @param connection a DB connection - * @param scriptId the number of the sql_init_NN.sql script to run - */ - private void runInitScript(Connection connection, int scriptId) { - String scriptDir = (String) props.get("org.onap.dmaap.datarouter.provserver.dbscripts"); - String scriptFile = String.format("%s/sql_init_%02d.sql", scriptDir, scriptId); - if (!(new File(scriptFile)).exists()) { - intlogger.error("PROV9005 Failed to load sql script from : " + scriptFile); - exit(1); - } - try (LineNumberReader lineReader = new LineNumberReader(new FileReader(scriptFile)); - Statement statement = connection.createStatement()) { - StringBuilder strBuilder = new StringBuilder(); - String line; - while ((line = lineReader.readLine()) != null) { - if (!line.startsWith("--")) { - line = line.trim(); - strBuilder.append(line); - executeDdlStatement(statement, strBuilder, line); - } - } - strBuilder.setLength(0); - } catch (Exception e) { - intlogger.error("PROV9002 Error when initializing table: " + e.getMessage(), e); - exit(1); - } - } - - private void executeDdlStatement(Statement statement, StringBuilder strBuilder, String line) throws SQLException { - if (line.endsWith(";")) { - // Execute one DDL statement - String sql = strBuilder.toString(); - strBuilder.setLength(0); - statement.execute(sql); - } - } -} diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRRouteCLI.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRRouteCLI.java index f078d80e..187364f9 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRRouteCLI.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRRouteCLI.java @@ -53,6 +53,7 @@ import org.apache.http.util.EntityUtils; import org.json.JSONArray; import org.json.JSONObject; import org.json.JSONTokener; +import org.onap.dmaap.datarouter.provisioning.ProvRunner; /** * This class provides a Command Line Interface for the routing tables in the DR Release 2.0 DB. @@ -117,7 +118,7 @@ public class DRRouteCLI { this.server = server; this.httpclient = new DefaultHttpClient(); - Properties provProperties = (new DB()).getProperties(); + Properties provProperties = ProvRunner.getProvProperties(); try { AafPropsUtils.init(new File(provProperties.getProperty( "org.onap.dmaap.datarouter.provserver.aafprops.path", diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java index afbadcd2..18ffea0f 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java @@ -47,6 +47,7 @@ import java.util.Map; import java.util.TreeSet; import java.util.zip.GZIPInputStream; import org.onap.dmaap.datarouter.provisioning.BaseServlet; +import org.onap.dmaap.datarouter.provisioning.ProvRunner; import org.onap.dmaap.datarouter.provisioning.beans.DeliveryExtraRecord; import org.onap.dmaap.datarouter.provisioning.beans.DeliveryRecord; import org.onap.dmaap.datarouter.provisioning.beans.ExpiryRecord; @@ -79,18 +80,12 @@ public class LogfileLoader extends Thread { */ private static LogfileLoader logfileLoader; - /** - * The PreparedStatement which is loaded by a Loadable. - */ - private static final String INSERT_SQL = "insert into LOG_RECORDS " - + "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; /** * Each server can assign this many IDs. */ private static final long SET_SIZE = (1L << 56); private final EELFLogger logger; - private final DB db; private final String spooldir; private final long setStart; private final long setEnd; @@ -100,8 +95,7 @@ public class LogfileLoader extends Thread { private LogfileLoader() { this.logger = EELFManager.getInstance().getLogger("InternalLog"); - this.db = new DB(); - this.spooldir = db.getProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir"); + this.spooldir = ProvRunner.getProvProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir"); this.setStart = getIdRange(); this.setEnd = setStart + SET_SIZE - 1; this.seqSet = new RLEBitSet(); @@ -257,12 +251,10 @@ public class LogfileLoader extends Thread { cutoff *= 86400000L; logger.debug(" Pruning records older than=" + (cutoff / 86400000L) + " (" + new Date(cutoff) + ")"); - Connection conn = null; - try { + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { // Limit to a million at a time to avoid typing up the DB for too long. - conn = db.getConnection(); try (PreparedStatement ps = conn.prepareStatement( - "DELETE from LOG_RECORDS where EVENT_TIME < ? limit 1000000")) { + "DELETE from LOG_RECORDS where EVENT_TIME < ? limit 1000000")) { ps.setLong(1, cutoff); while (count > 0) { if (!ps.execute()) { @@ -283,8 +275,6 @@ public class LogfileLoader extends Thread { } } catch (SQLException e) { logger.error("LogfileLoader.pruneRecords: " + e.getMessage(), e); - } finally { - db.release(conn); } } return did1; @@ -292,14 +282,11 @@ public class LogfileLoader extends Thread { private long countRecords() { long count = 0; - try (Connection conn = db.getConnection(); - Statement stmt = conn.createStatement()) { - try (ResultSet rs = stmt.executeQuery("SELECT COUNT(*) as COUNT from LOG_RECORDS")) { - if (rs.next()) { - count = rs.getLong("COUNT"); - } - } finally { - db.release(conn); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement("SELECT COUNT(*) as COUNT from LOG_RECORDS"); + ResultSet rs = ps.executeQuery()) { + if (rs.next()) { + count = rs.getLong("COUNT"); } } catch (SQLException e) { logger.error("LogfileLoader.countRecords: " + e.getMessage(), e); @@ -309,19 +296,16 @@ public class LogfileLoader extends Thread { private Map getHistogram() { Map map = new HashMap<>(); - try (Connection conn = db.getConnection(); - Statement stmt = conn.createStatement()) { + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "SELECT FLOOR(EVENT_TIME/86400000) AS DAY, COUNT(*) AS COUNT FROM LOG_RECORDS GROUP BY DAY"); + ResultSet rs = ps.executeQuery()) { logger.debug(" LOG_RECORD table histogram..."); - try (ResultSet rs = stmt.executeQuery( - "SELECT FLOOR(EVENT_TIME/86400000) AS DAY, COUNT(*) AS COUNT FROM LOG_RECORDS GROUP BY DAY")) { - while (rs.next()) { - long day = rs.getLong("DAY"); - long cnt = rs.getLong("COUNT"); - map.put(day, cnt); - logger.debug(" " + day + " " + cnt); - } - } finally { - db.release(conn); + while (rs.next()) { + long day = rs.getLong("DAY"); + long cnt = rs.getLong("COUNT"); + map.put(day, cnt); + logger.debug(" " + day + " " + cnt); } } catch (SQLException e) { logger.error("LogfileLoader.getHistogram: " + e.getMessage(), e); @@ -330,9 +314,7 @@ public class LogfileLoader extends Thread { } private void initializeNextid() { - Connection conn = null; - try { - conn = db.getConnection(); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { RLEBitSet nbs = new RLEBitSet(); try (Statement stmt = conn.createStatement()) { // Build a bitset of all records in the LOG_RECORDS table @@ -376,8 +358,6 @@ public class LogfileLoader extends Thread { logger.debug(String.format("LogfileLoader.initializeNextid, next ID is %d (%x)", nextId, nextId)); } catch (SQLException e) { logger.error("LogfileLoader.initializeNextid: " + e.getMessage(), e); - } finally { - db.release(conn); } } @@ -385,9 +365,9 @@ public class LogfileLoader extends Thread { int[] process(File file) { int ok = 0; int total = 0; - try { - Connection conn = db.getConnection(); - PreparedStatement ps = conn.prepareStatement(INSERT_SQL); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "insert into LOG_RECORDS values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { Reader reader = file.getPath().endsWith(".gz") ? new InputStreamReader(new GZIPInputStream(new FileInputStream(file))) : new FileReader(file); @@ -428,8 +408,6 @@ public class LogfileLoader extends Thread { total++; } } - ps.close(); - db.release(conn); } catch (SQLException | IOException e) { logger.warn("PROV8007 Exception reading " + file + ": " + e); } @@ -462,7 +440,7 @@ public class LogfileLoader extends Thread { ExpiryRecord expiryRecord = new ExpiryRecord(pp); if ("other".equals(expiryRecord.getReason())) { logger.info("Invalid reason '" + pp[9] + "' changed to 'other' for record: " - + expiryRecord.getPublishId()); + + expiryRecord.getPublishId()); } return new Loadable[]{expiryRecord}; } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PasswordProcessor.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PasswordProcessor.java index b8b668dd..a6a3e2b5 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PasswordProcessor.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PasswordProcessor.java @@ -30,6 +30,7 @@ import javax.crypto.SecretKey; import javax.crypto.SecretKeyFactory; import javax.crypto.spec.PBEKeySpec; import javax.crypto.spec.PBEParameterSpec; +import org.onap.dmaap.datarouter.provisioning.ProvRunner; /** * The Processing of a Password. Password can be encrypted and decrypted. @@ -40,7 +41,7 @@ public class PasswordProcessor { private static final String SECRET_KEY_FACTORY_TYPE = "PBEWithMD5AndDES"; private static final String PASSWORD_ENCRYPTION_STRING = - (new DB()).getProperties().getProperty("org.onap.dmaap.datarouter.provserver.passwordencryption"); + ProvRunner.getProvProperties().getProperty("org.onap.dmaap.datarouter.provserver.passwordencryption"); private static final char[] PASSWORD = PASSWORD_ENCRYPTION_STRING.toCharArray(); private static final byte[] SALT = {(byte) 0xde, (byte) 0x33, (byte) 0x10, (byte) 0x12, (byte) 0xde, (byte) 0x33, (byte) 0x10, (byte) 0x12,}; diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/Poker.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/Poker.java new file mode 100644 index 00000000..84c6e317 --- /dev/null +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/Poker.java @@ -0,0 +1,330 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + +package org.onap.dmaap.datarouter.provisioning.utils; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.InetAddress; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.UnknownHostException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.Timer; +import java.util.TimerTask; +import java.util.TreeSet; +import org.json.JSONException; +import org.json.JSONObject; +import org.json.JSONTokener; + +import org.onap.dmaap.datarouter.provisioning.ProvRunner; +import org.onap.dmaap.datarouter.provisioning.beans.EgressRoute; +import org.onap.dmaap.datarouter.provisioning.beans.Feed; +import org.onap.dmaap.datarouter.provisioning.beans.Group; +import org.onap.dmaap.datarouter.provisioning.beans.IngressRoute; +import org.onap.dmaap.datarouter.provisioning.beans.NetworkRoute; +import org.onap.dmaap.datarouter.provisioning.beans.Parameters; +import org.onap.dmaap.datarouter.provisioning.beans.Subscription; +import org.onap.dmaap.datarouter.provisioning.BaseServlet; + +/** + * This class handles the two timers (described in R1 Design Notes), and takes care of issuing the GET to each node of + * the URL to "poke". + * + * @author Robert Eby + * @version $Id: Poker.java,v 1.11 2014/01/08 16:13:47 eby Exp $ + */ + +public class Poker extends TimerTask { + + /** + * Template used to generate the URL to issue the GET against. + */ + private static final String POKE_URL_TEMPLATE = "http://%s/internal/fetchProv"; + + private static final Object lock = new Object(); + private static final String CARRIAGE_RETURN = "\n],\n"; + + /** + * This is a singleton -- there is only one Poker object in the server. + */ + private static Poker poker; + private long timer1; + private long timer2; + private String thisPod; // DNS name of this machine + private EELFLogger logger; + private String provString; + + + private Poker() { + timer1 = timer2 = 0; + logger = EELFManager.getInstance().getLogger("InternalLog"); + try { + thisPod = InetAddress.getLocalHost().getHostName(); + } catch (UnknownHostException e) { + thisPod = "*UNKNOWN_POD*"; // not a major problem + logger.info("UnknownHostException: Setting thisPod to \"*UNKNOWN_POD*\"", e); + } + provString = buildProvisioningString(); + Timer rolex = new Timer(); + rolex.scheduleAtFixedRate(this, 0L, 1000L); // Run once a second to check the timers + } + + /** + * Get the singleton Poker object. + * + * @return the Poker + */ + public static synchronized Poker getPoker() { + if (poker == null) { + poker = new Poker(); + } + return poker; + } + + /** + * This method sets the two timers described in the design notes. + * + * @param t1 the first timer controls how long to wait after a provisioning request before poking each node This + * timer can be reset if it has not "gone off". + * @param t2 the second timer set the outer bound on how long to wait. It cannot be reset. + */ + public void setTimers(long t1, long t2) { + synchronized (lock) { + if (timer1 == 0 || t1 > timer1) { + timer1 = t1; + } + if (timer2 == 0) { + timer2 = t2; + } + } + if (logger.isDebugEnabled()) { + logger.debug("Poker timers set to " + timer1 + " and " + timer2); + } + + + } + + /** + * Return the last provisioning string built. + * + * @return the last provisioning string built. + */ + public String getProvisioningString() { + return provString; + } + + /** + * The method to run at the predefined interval (once per second). This method checks to see if either of the two + * timers has expired, and if so, will rebuild the provisioning string, and poke all the nodes and other PODs. The + * timers are then reset to 0. + */ + @Override + public void run() { + try { + if (timer1 > 0) { + long now = System.currentTimeMillis(); + boolean fire = false; + synchronized (lock) { + if (now > timer1 || now > timer2) { + timer1 = timer2 = 0; + fire = true; + } + } + if (fire) { + pokeNodes(); + } + } + } catch (Exception e) { + logger.warn("PROV0020: Caught exception in Poker: " + e); + } + } + + private void pokeNodes() { + // Rebuild the prov string + provString = buildProvisioningString(); + // Only the active POD should poke nodes, etc. + boolean active = SynchronizerTask.getSynchronizer().isActive(); + if (active) { + // Poke all the DR nodes + for (String n : BaseServlet.getNodes()) { + pokeNode(n); + } + // Poke the pod that is not us + for (String n : BaseServlet.getPods()) { + if (n.length() > 0 && !n.equals(thisPod)) { + pokeNode(n); + } + } + } + } + + private void pokeNode(final String nodename) { + logger.debug("PROV0012 Poking node " + nodename + " ..."); + String nodeUrl = String.format(POKE_URL_TEMPLATE, nodename + ":" + ProvRunner.getProvProperties().get( + "org.onap.dmaap.datarouter.provserver.http.port"), "8080"); + Runnable runn = () -> { + try { + URL url = new URL(nodeUrl); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setConnectTimeout(60000); //Fixes for Itrack DATARTR-3, poke timeout + conn.connect(); + conn.getContentLength(); // Force the GET through + conn.disconnect(); + } catch (MalformedURLException e) { + logger.warn( + "PROV0013 MalformedURLException Error poking node at " + nodeUrl + " : " + e + .getMessage(), e); + } catch (IOException e) { + logger.warn("PROV0013 IOException Error poking node at " + nodeUrl + " : " + e + .getMessage(), e); + } + }; + runn.run(); + } + + private String buildProvisioningString() { + StringBuilder sb = new StringBuilder("{\n"); + + // Append Feeds to the string + String pfx = "\n"; + sb.append("\"feeds\": ["); + for (Feed f : Feed.getAllFeeds()) { + sb.append(pfx); + sb.append(f.asJSONObject().toString()); + pfx = ",\n"; + } + sb.append(CARRIAGE_RETURN); + + //Append groups to the string - Rally:US708115 - 1610 + pfx = "\n"; + sb.append("\"groups\": ["); + for (Group s : Group.getAllgroups()) { + sb.append(pfx); + sb.append(s.asJSONObject().toString()); + pfx = ",\n"; + } + sb.append(CARRIAGE_RETURN); + + // Append Subscriptions to the string + pfx = "\n"; + sb.append("\"subscriptions\": ["); + for (Subscription s : Subscription.getAllSubscriptions()) { + sb.append(pfx); + if (s != null) { + sb.append(s.asJSONObject().toString()); + } + pfx = ",\n"; + } + sb.append(CARRIAGE_RETURN); + + // Append Parameters to the string + pfx = "\n"; + sb.append("\"parameters\": {"); + Map props = Parameters.getParameters(); + Set ivals = new HashSet<>(); + String intv = props.get("_INT_VALUES"); + if (intv != null) { + ivals.addAll(Arrays.asList(intv.split("\\|"))); + } + for (String key : new TreeSet(props.keySet())) { + String val = props.get(key); + sb.append(pfx); + sb.append(" \"").append(key).append("\": "); + if (ivals.contains(key)) { + // integer value + sb.append(val); + } else if (key.endsWith("S")) { + // Split and append array of strings + String[] pp = val.split("\\|"); + String p2 = ""; + sb.append("["); + for (String t : pp) { + sb.append(p2).append("\"").append(quote(t)).append("\""); + p2 = ","; + } + sb.append("]"); + } else { + sb.append("\"").append(quote(val)).append("\""); + } + pfx = ",\n"; + } + sb.append("\n},\n"); + + // Append Routes to the string + pfx = "\n"; + sb.append("\"ingress\": ["); + for (IngressRoute in : IngressRoute.getAllIngressRoutes()) { + sb.append(pfx); + sb.append(in.asJSONObject().toString()); + pfx = ",\n"; + } + sb.append(CARRIAGE_RETURN); + + pfx = "\n"; + sb.append("\"egress\": {"); + for (EgressRoute eg : EgressRoute.getAllEgressRoutes()) { + sb.append(pfx); + String str = eg.asJSONObject().toString(); + str = str.substring(1, str.length() - 1); + sb.append(str); + pfx = ",\n"; + } + sb.append("\n},\n"); + + pfx = "\n"; + sb.append("\"routing\": ["); + for (NetworkRoute ne : NetworkRoute.getAllNetworkRoutes()) { + sb.append(pfx); + sb.append(ne.asJSONObject().toString()); + pfx = ",\n"; + } + sb.append("\n]"); + sb.append("\n}"); + + // Convert to string and verify it is valid JSON + String tempProvString = sb.toString(); + try { + new JSONObject(new JSONTokener(tempProvString)); + } catch (JSONException e) { + logger.warn("PROV0016: Possible invalid prov string: " + e); + } + return tempProvString; + } + + private String quote(String str) { + StringBuilder sb = new StringBuilder(); + for (char ch : str.toCharArray()) { + if (ch == '\\' || ch == '"') { + sb.append('\\'); + } + sb.append(ch); + } + return sb.toString(); + } +} diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/ProvDbUtils.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/ProvDbUtils.java new file mode 100644 index 00000000..34f05f46 --- /dev/null +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/ProvDbUtils.java @@ -0,0 +1,168 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2019 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.datarouter.provisioning.utils; + +import static java.lang.System.exit; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.File; +import java.io.FileReader; +import java.io.LineNumberReader; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.HashSet; +import java.util.Properties; +import java.util.Set; +import javax.sql.DataSource; +import org.apache.commons.dbcp2.BasicDataSource; +import org.onap.dmaap.datarouter.provisioning.ProvRunner; + +public class ProvDbUtils { + + private static EELFLogger intLogger = EELFManager.getInstance().getLogger("InternalLog"); + private static DataSource dataSource; + private static ProvDbUtils provDbUtils; + + private ProvDbUtils() { + } + + public static ProvDbUtils getInstance() { + if (provDbUtils == null) { + try { + provDbUtils = new ProvDbUtils(); + dataSource = setupDataSource(ProvRunner.getProvProperties()); + } catch (ClassNotFoundException e) { + intLogger.error("PROV9010: Failed to load DB Driver Class: " + e.getMessage(), e); + exit(1); + } + } + return provDbUtils; + } + + private static DataSource setupDataSource(Properties props) throws ClassNotFoundException { + intLogger.info("PROV9009: Setting up DB dataSource"); + Class.forName((String) props.get("org.onap.dmaap.datarouter.db.driver")); + BasicDataSource dataSource = new BasicDataSource(); + dataSource.setUrl((String) props.get("org.onap.dmaap.datarouter.db.url")); + dataSource.setUsername((String) props.get("org.onap.dmaap.datarouter.db.login")); + dataSource.setPassword((String) props.get("org.onap.dmaap.datarouter.db.password")); + dataSource.setMinIdle(5); + dataSource.setMaxIdle(15); + dataSource.setMaxOpenPreparedStatements(100); + return dataSource; + } + + public Connection getConnection() throws SQLException { + return dataSource.getConnection(); + } + + public boolean initProvDB() { + final String[] expectedTables = { + "FEEDS", "FEED_ENDPOINT_ADDRS", "FEED_ENDPOINT_IDS", "PARAMETERS", + "SUBSCRIPTIONS", "LOG_RECORDS", "INGRESS_ROUTES", "EGRESS_ROUTES", + "NETWORK_ROUTES", "NODESETS", "NODES", "GROUPS" + }; + try (Connection connection = getConnection()) { + Set actualTables = getTableSet(connection); + boolean initialize = false; + for (String tableName : expectedTables) { + initialize |= !actualTables.contains(tableName); + } + if (initialize) { + intLogger.info("PROV9001: First time startup; The database is being initialized."); + runInitScript(connection, 1); + } + } catch (SQLException e) { + intLogger.error("PROV9000: The database credentials are not working: " + e.getMessage(), e); + return false; + } + return true; + } + + /** + * Get a set of all table names in the DB. + * + * @param connection a DB connection + * @return the set of table names + */ + private Set getTableSet(Connection connection) { + Set tables = new HashSet<>(); + try { + DatabaseMetaData md = connection.getMetaData(); + ResultSet rs = md.getTables(null, null, "%", null); + if (rs != null) { + while (rs.next()) { + tables.add(rs.getString("TABLE_NAME").toUpperCase()); + } + rs.close(); + } + } catch (SQLException e) { + intLogger.error("PROV9010: Failed to get TABLE data from DB: " + e.getMessage(), e); + } + return tables; + } + + /** + * Initialize the tables by running the initialization scripts located in the directory specified by the property + * org.onap.dmaap.datarouter.provserver.dbscripts. Scripts have names of the form + * sql_init_NN.sql + * + * @param connection a DB connection + * @param scriptId the number of the sql_init_NN.sql script to run + */ + private void runInitScript(Connection connection, int scriptId) { + String scriptDir = ProvRunner.getProvProperties().getProperty("org.onap.dmaap.datarouter.provserver.dbscripts"); + String scriptFile = String.format("%s/sql_init_%02d.sql", scriptDir, scriptId); + if (!(new File(scriptFile)).exists()) { + intLogger.error("PROV9005 Failed to load sql script from : " + scriptFile); + exit(1); + } + try (LineNumberReader lineReader = new LineNumberReader(new FileReader(scriptFile)); + Statement statement = connection.createStatement()) { + StringBuilder strBuilder = new StringBuilder(); + String line; + while ((line = lineReader.readLine()) != null) { + if (!line.startsWith("--")) { + line = line.trim(); + strBuilder.append(line); + executeDdlStatement(statement, strBuilder, line); + } + } + strBuilder.setLength(0); + } catch (Exception e) { + intLogger.error("PROV9002 Error when initializing table: " + e.getMessage(), e); + exit(1); + } + } + + + private void executeDdlStatement(Statement statement, StringBuilder strBuilder, String line) throws SQLException { + if (line.endsWith(";")) { + String sql = strBuilder.toString(); + strBuilder.setLength(0); + statement.execute(sql); + } + } +} \ No newline at end of file diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java index 92ab86da..c4994bb3 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java @@ -32,6 +32,7 @@ import java.nio.file.Files; import java.util.Objects; import java.util.Properties; import java.util.TimerTask; +import org.onap.dmaap.datarouter.provisioning.ProvRunner; /** * This class provides a {@link TimerTask} that purges old logfiles (older than the number of days specified by the @@ -52,7 +53,7 @@ public class PurgeLogDirTask extends TimerTask { * PurgeLogDirTask constructor. */ public PurgeLogDirTask() { - Properties prop = (new DB()).getProperties(); + Properties prop = ProvRunner.getProvProperties(); logdir = prop.getProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir"); String str = prop.getProperty("org.onap.dmaap.datarouter.provserver.logretention", "30"); this.utilsLogger = EELFManager.getInstance().getLogger("UtilsLog"); diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/SynchronizerTask.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/SynchronizerTask.java new file mode 100644 index 00000000..5eeb45a2 --- /dev/null +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/SynchronizerTask.java @@ -0,0 +1,683 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.provisioning.utils; + +import static org.onap.dmaap.datarouter.provisioning.BaseServlet.TEXT_CT; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.security.KeyStore; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.Timer; +import java.util.TimerTask; +import java.util.TreeSet; +import javax.servlet.http.HttpServletResponse; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.conn.scheme.Scheme; +import org.apache.http.conn.ssl.SSLSocketFactory; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.impl.client.AbstractHttpClient; +import org.apache.http.impl.client.DefaultHttpClient; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.json.JSONTokener; +import org.onap.dmaap.datarouter.provisioning.BaseServlet; +import org.onap.dmaap.datarouter.provisioning.ProvRunner; +import org.onap.dmaap.datarouter.provisioning.beans.EgressRoute; +import org.onap.dmaap.datarouter.provisioning.beans.Feed; +import org.onap.dmaap.datarouter.provisioning.beans.Group; +import org.onap.dmaap.datarouter.provisioning.beans.IngressRoute; +import org.onap.dmaap.datarouter.provisioning.beans.NetworkRoute; +import org.onap.dmaap.datarouter.provisioning.beans.Parameters; +import org.onap.dmaap.datarouter.provisioning.beans.Subscription; +import org.onap.dmaap.datarouter.provisioning.beans.Syncable; + +/** + * This class handles synchronization between provisioning servers (PODs). It has three primary functions: + *
    + *
  1. Checking DNS once per minute to see which POD the DNS CNAME points to. The CNAME will point to + * the active (master) POD.
  2. + *
  3. On non-master (standby) PODs, fetches provisioning data and logs in order to keep MariaDB in sync.
  4. + *
  5. Providing information to other parts of the system as to the current role (ACTIVE_POD, STANDBY_POD, UNKNOWN_POD) + * of this POD.
  6. + *
+ *

For this to work correctly, the following code needs to be placed at the beginning of main().

+ * + * Security.setProperty("networkaddress.cache.ttl", "10"); + * + * + * @author Robert Eby + * @version $Id: SynchronizerTask.java,v 1.10 2014/03/21 13:50:10 eby Exp $ + */ + +public class SynchronizerTask extends TimerTask { + + /** + * This is a singleton -- there is only one SynchronizerTask object in the server. + */ + private static SynchronizerTask synctask; + + /** + * This POD is unknown -- not on the list of PODs. + */ + public static final int UNKNOWN_POD = 0; + /** + * This POD is active -- on the list of PODs, and the DNS CNAME points to us. + */ + public static final int ACTIVE_POD = 1; + /** + * This POD is standby -- on the list of PODs, and the DNS CNAME does not point to us. + */ + public static final int STANDBY_POD = 2; + + private static final String[] stnames = {"UNKNOWN_POD", "ACTIVE_POD", "STANDBY_POD"}; + private static final long ONE_HOUR = 60 * 60 * 1000L; + + private long nextMsg = 0; // only display the "Current podState" msg every 5 mins. + + private final EELFLogger logger; + private final Timer rolex; + private final String spooldir; + private int podState; + private boolean doFetch; + private long nextsynctime; + private AbstractHttpClient httpclient = null; + + @SuppressWarnings("deprecation") + private SynchronizerTask() { + logger = EELFManager.getInstance().getLogger("InternalLog"); + rolex = new Timer(); + spooldir = ProvRunner.getProvProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir"); + podState = UNKNOWN_POD; + doFetch = true; // start off with a fetch + nextsynctime = 0; + + logger.info("PROV5000: Sync task starting, server podState is UNKNOWN_POD"); + try { + // Set up keystore + String type = AafPropsUtils.KEYSTORE_TYPE_PROPERTY; + String store = ProvRunner.getAafPropsUtils().getKeystorePathProperty(); + String pass = ProvRunner.getAafPropsUtils().getKeystorePassProperty(); + KeyStore keyStore = KeyStore.getInstance(type); + try (FileInputStream instream = new FileInputStream(new File(store))) { + keyStore.load(instream, pass.toCharArray()); + + } + // Set up truststore + store = ProvRunner.getAafPropsUtils().getTruststorePathProperty(); + pass = ProvRunner.getAafPropsUtils().getTruststorePassProperty(); + KeyStore trustStore = null; + if (store != null && store.length() > 0) { + trustStore = KeyStore.getInstance(AafPropsUtils.TRUESTSTORE_TYPE_PROPERTY); + try (FileInputStream instream = new FileInputStream(new File(store))) { + trustStore.load(instream, pass.toCharArray()); + + } + } + + // We are connecting with the node name, but the certificate will have the CNAME + // So we need to accept a non-matching certificate name + String keystorepass = ProvRunner.getAafPropsUtils().getKeystorePassProperty(); + try (AbstractHttpClient hc = new DefaultHttpClient()) { + SSLSocketFactory socketFactory = + (trustStore == null) + ? new SSLSocketFactory(keyStore, keystorepass) + : new SSLSocketFactory(keyStore, keystorepass, trustStore); + socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); + Scheme sch = new Scheme("https", 443, socketFactory); + hc.getConnectionManager().getSchemeRegistry().register(sch); + httpclient = hc; + } + setSynchTimer(ProvRunner.getProvProperties().getProperty( + "org.onap.dmaap.datarouter.provserver.sync_interval", "5000")); + } catch (Exception e) { + logger.warn("PROV5005: Problem starting the synchronizer: " + e); + } + } + + private void setSynchTimer(String strInterval) { + // Run once every 5 seconds to check DNS, etc. + long interval; + try { + interval = Long.parseLong(strInterval); + } catch (NumberFormatException e) { + interval = 5000L; + } + rolex.scheduleAtFixedRate(this, 0L, interval); + } + + /** + * Get the singleton SynchronizerTask object. + * + * @return the SynchronizerTask + */ + public static synchronized SynchronizerTask getSynchronizer() { + if (synctask == null) { + synctask = new SynchronizerTask(); + } + return synctask; + } + + /** + * What is the podState of this POD?. + * + * @return one of ACTIVE_POD, STANDBY_POD, UNKNOWN_POD + */ + public int getPodState() { + return podState; + } + + /** + * Is this the active POD?. + * + * @return true if we are active (the master), false otherwise + */ + public boolean isActive() { + return podState == ACTIVE_POD; + } + + /** + * This method is used to signal that another POD (the active POD) has sent us a /fetchProv request, and that we + * should re-synchronize with the master. + */ + public void doFetch() { + doFetch = true; + } + + /** + * Runs once a minute in order to
    + *
  1. lookup DNS names,
  2. + *
  3. determine the podState of this POD,
  4. + *
  5. if this is a standby POD, and the fetch flag is set, perform a fetch of podState from the active POD.
  6. + *
  7. if this is a standby POD, check if there are any new log records to be replicated.
  8. + *
. + */ + @Override + public void run() { + try { + podState = lookupState(); + if (podState == STANDBY_POD) { + // Only copy provisioning data FROM the active server TO the standby + if (doFetch || (System.currentTimeMillis() >= nextsynctime)) { + syncProvisioningData(); + logger.info("PROV5013: Sync completed."); + nextsynctime = System.currentTimeMillis() + ONE_HOUR; + } + } else { + // Don't do fetches on non-standby PODs + doFetch = false; + } + + // Fetch DR logs as needed - server to server + LogfileLoader lfl = LogfileLoader.getLoader(); + if (lfl.isIdle()) { + // Only fetch new logs if the loader is waiting for them. + logger.trace("Checking for logs to replicate..."); + RLEBitSet local = lfl.getBitSet(); + RLEBitSet remote = readRemoteLoglist(); + remote.andNot(local); + if (!remote.isEmpty()) { + logger.debug(" Replicating logs: " + remote); + replicateDataRouterLogs(remote); + } + } + } catch (Exception e) { + logger.warn("PROV0020: Caught exception in SynchronizerTask: " + e); + } + } + + private void syncProvisioningData() { + logger.debug("Initiating a sync..."); + JSONObject jo = readProvisioningJson(); + if (jo != null) { + doFetch = false; + syncFeeds(jo.getJSONArray("feeds")); + syncSubs(jo.getJSONArray("subscriptions")); + syncGroups(jo.getJSONArray("groups")); //Rally:US708115 - 1610 + syncParams(jo.getJSONObject("parameters")); + // The following will not be present in a version=1.0 provfeed + JSONArray ja = jo.optJSONArray("ingress"); + if (ja != null) { + syncIngressRoutes(ja); + } + JSONObject j2 = jo.optJSONObject("egress"); + if (j2 != null) { + syncEgressRoutes(j2); + } + ja = jo.optJSONArray("routing"); + if (ja != null) { + syncNetworkRoutes(ja); + } + } + } + + /** + * This method is used to lookup the CNAME that points to the active server. + * It returns 0 (UNKNOWN_POD), 1(ACTIVE_POD), or (STANDBY_POD) to indicate the podState of this server. + * + * @return the current podState + */ + public int lookupState() { + int newPodState = UNKNOWN_POD; + try { + InetAddress myaddr = InetAddress.getLocalHost(); + if (logger.isTraceEnabled()) { + logger.trace("My address: " + myaddr); + } + String thisPod = myaddr.getHostName(); + Set pods = new TreeSet<>(Arrays.asList(BaseServlet.getPods())); + if (pods.contains(thisPod)) { + InetAddress pserver = InetAddress.getByName(BaseServlet.getActiveProvName()); + newPodState = myaddr.equals(pserver) ? ACTIVE_POD : STANDBY_POD; + if (logger.isDebugEnabled() && System.currentTimeMillis() >= nextMsg) { + logger.debug("Active POD = " + pserver + ", Current podState is " + stnames[newPodState]); + nextMsg = System.currentTimeMillis() + (5 * 60 * 1000L); + } + } else { + logger.warn("PROV5003: My name (" + thisPod + ") is missing from the list of provisioning servers."); + } + } catch (UnknownHostException e) { + logger.warn("PROV5002: Cannot determine the name of this provisioning server.", e); + } + + if (newPodState != podState) { + logger.info(String.format("PROV5001: Server podState changed from %s to %s", + stnames[podState], stnames[newPodState])); + } + return newPodState; + } + + /** + * Synchronize the Feeds in the JSONArray, with the Feeds in the DB. + */ + private void syncFeeds(JSONArray ja) { + Collection coll = new ArrayList<>(); + for (int n = 0; n < ja.length(); n++) { + try { + Feed feed = new Feed(ja.getJSONObject(n)); + coll.add(feed); + } catch (Exception e) { + logger.warn("PROV5004: Invalid object in feed: " + ja.optJSONObject(n), e); + } + } + if (sync(coll, Feed.getAllFeeds())) { + BaseServlet.provisioningDataChanged(); + } + } + + /** + * Synchronize the Subscriptions in the JSONArray, with the Subscriptions in the DB. + */ + private void syncSubs(JSONArray ja) { + Collection coll = new ArrayList<>(); + for (int n = 0; n < ja.length(); n++) { + try { + //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047. + JSONObject jsonObject = ja.getJSONObject(n); + jsonObject.put("sync", "true"); + Subscription sub = new Subscription(jsonObject); + coll.add(sub); + } catch (Exception e) { + logger.warn("PROV5004: Invalid object in subscription: " + ja.optJSONObject(n), e); + } + } + if (sync(coll, Subscription.getAllSubscriptions())) { + BaseServlet.provisioningDataChanged(); + } + } + + /** + * Rally:US708115 - Synchronize the Groups in the JSONArray, with the Groups in the DB. + */ + private void syncGroups(JSONArray ja) { + Collection coll = new ArrayList<>(); + for (int n = 0; n < ja.length(); n++) { + try { + Group group = new Group(ja.getJSONObject(n)); + coll.add(group); + } catch (Exception e) { + logger.warn("PROV5004: Invalid object in group: " + ja.optJSONObject(n), e); + } + } + if (sync(coll, Group.getAllgroups())) { + BaseServlet.provisioningDataChanged(); + } + } + + + /** + * Synchronize the Parameters in the JSONObject, with the Parameters in the DB. + */ + private void syncParams(JSONObject jo) { + Collection coll = new ArrayList<>(); + for (String k : jo.keySet()) { + String val = ""; + try { + val = jo.getString(k); + } catch (JSONException e) { + logger.warn("PROV5004: Invalid object in parameters: " + jo.optJSONObject(k), e); + try { + val = "" + jo.getInt(k); + } catch (JSONException e1) { + logger.warn("PROV5004: Invalid object in parameters: " + jo.optInt(k), e1); + JSONArray ja = jo.getJSONArray(k); + for (int i = 0; i < ja.length(); i++) { + if (i > 0) { + val += "|"; + } + val += ja.getString(i); + } + } + } + coll.add(new Parameters(k, val)); + } + if (sync(coll, Parameters.getParameterCollection())) { + BaseServlet.provisioningDataChanged(); + BaseServlet.provisioningParametersChanged(); + } + } + + private void syncIngressRoutes(JSONArray ja) { + Collection coll = new ArrayList<>(); + for (int n = 0; n < ja.length(); n++) { + try { + IngressRoute in = new IngressRoute(ja.getJSONObject(n)); + coll.add(in); + } catch (NumberFormatException e) { + logger.warn("PROV5004: Invalid object in ingress routes: " + ja.optJSONObject(n)); + } + } + if (sync(coll, IngressRoute.getAllIngressRoutes())) { + BaseServlet.provisioningDataChanged(); + } + } + + private void syncEgressRoutes(JSONObject jo) { + Collection coll = new ArrayList<>(); + for (String key : jo.keySet()) { + try { + int sub = Integer.parseInt(key); + String node = jo.getString(key); + EgressRoute er = new EgressRoute(sub, node); + coll.add(er); + } catch (NumberFormatException e) { + logger.warn("PROV5004: Invalid subid in egress routes: " + key, e); + } catch (IllegalArgumentException e) { + logger.warn("PROV5004: Invalid node name in egress routes: " + key, e); + } + } + if (sync(coll, EgressRoute.getAllEgressRoutes())) { + BaseServlet.provisioningDataChanged(); + } + } + + private void syncNetworkRoutes(JSONArray ja) { + Collection coll = new ArrayList<>(); + for (int n = 0; n < ja.length(); n++) { + try { + NetworkRoute nr = new NetworkRoute(ja.getJSONObject(n)); + coll.add(nr); + } catch (JSONException e) { + logger.warn("PROV5004: Invalid object in network routes: " + ja.optJSONObject(n), e); + } + } + if (sync(coll, NetworkRoute.getAllNetworkRoutes())) { + BaseServlet.provisioningDataChanged(); + } + } + + private boolean sync(Collection newc, Collection oldc) { + boolean changes = false; + try { + Map newmap = getMap(newc); + Map oldmap = getMap(oldc); + Set union = new TreeSet<>(newmap.keySet()); + union.addAll(oldmap.keySet()); + for (String n : union) { + Syncable newobj = newmap.get(n); + Syncable oldobj = oldmap.get(n); + if (oldobj == null) { + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { + changes = insertRecord(conn, newobj); + } + } else if (newobj == null) { + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { + changes = deleteRecord(conn, oldobj); + } + } else if (!newobj.equals(oldobj)) { + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { + changes = updateRecord(conn, newobj, oldobj); + } + } + } + } catch (SQLException e) { + logger.warn("PROV5009: problem during sync, exception: " + e); + } + return changes; + } + + private boolean updateRecord(Connection conn, Syncable newobj, Syncable oldobj) { + if (logger.isDebugEnabled()) { + logger.debug(" Updating record: " + newobj); + } + boolean changes = newobj.doUpdate(conn); + checkChangeOwner(newobj, oldobj); + + return changes; + } + + private boolean deleteRecord(Connection conn, Syncable oldobj) { + if (logger.isDebugEnabled()) { + logger.debug(" Deleting record: " + oldobj); + } + return oldobj.doDelete(conn); + } + + private boolean insertRecord(Connection conn, Syncable newobj) { + if (logger.isDebugEnabled()) { + logger.debug(" Inserting record: " + newobj); + } + return newobj.doInsert(conn); + } + + private Map getMap(Collection coll) { + Map map = new HashMap<>(); + for (Syncable v : coll) { + map.put(v.getKey(), v); + } + return map; + } + + /** + * Change owner of FEED/SUBSCRIPTION. + * Rally US708115 Change Ownership of FEED - 1610 + */ + private void checkChangeOwner(Syncable newobj, Syncable oldobj) { + if (newobj instanceof Feed) { + Feed oldfeed = (Feed) oldobj; + Feed newfeed = (Feed) newobj; + + if (!oldfeed.getPublisher().equals(newfeed.getPublisher())) { + logger.info("PROV5013 - Previous publisher: " + + oldfeed.getPublisher() + ": New publisher-" + newfeed.getPublisher()); + oldfeed.setPublisher(newfeed.getPublisher()); + oldfeed.changeOwnerShip(); + } + } else if (newobj instanceof Subscription) { + Subscription oldsub = (Subscription) oldobj; + Subscription newsub = (Subscription) newobj; + + if (!oldsub.getSubscriber().equals(newsub.getSubscriber())) { + logger.info("PROV5013 - Previous subscriber: " + + oldsub.getSubscriber() + ": New subscriber-" + newsub.getSubscriber()); + oldsub.setSubscriber(newsub.getSubscriber()); + oldsub.changeOwnerShip(); + } + } + + } + + /** + * Issue a GET on the peer POD's /internal/prov/ URL to get a copy of its provisioning data. + * + * @return the provisioning data (as a JONObject) + */ + private synchronized JSONObject readProvisioningJson() { + String url = URLUtilities.generatePeerProvURL(); + HttpGet get = new HttpGet(url); + try { + HttpResponse response = httpclient.execute(get); + int code = response.getStatusLine().getStatusCode(); + if (code != HttpServletResponse.SC_OK) { + logger.warn("PROV5010: readProvisioningJson failed, bad error code: " + code); + return null; + } + HttpEntity entity = response.getEntity(); + String ctype = entity.getContentType().getValue().trim(); + if (!ctype.equals(BaseServlet.PROVFULL_CONTENT_TYPE1) + && !ctype.equals(BaseServlet.PROVFULL_CONTENT_TYPE2)) { + logger.warn("PROV5011: readProvisioningJson failed, bad content type: " + ctype); + return null; + } + return new JSONObject(new JSONTokener(entity.getContent())); + } catch (Exception e) { + logger.warn("PROV5012: readProvisioningJson failed, exception: " + e); + return null; + } finally { + get.releaseConnection(); + } + } + + /** + * Issue a GET on the peer POD's /internal/drlogs/ URL to get an RELBitSet representing the log records available in + * the remote database. + * + * @return the bitset + */ + public RLEBitSet readRemoteLoglist() { + RLEBitSet bs = new RLEBitSet(); + String url = URLUtilities.generatePeerLogsURL(); + + //Fixing if only one Prov is configured, not to give exception to fill logs, return empty bitset. + if ("".equals(url)) { + return bs; + } + //End of fix. + + HttpGet get = new HttpGet(url); + try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) { + HttpResponse response = httpclient.execute(get); + int code = response.getStatusLine().getStatusCode(); + if (code != HttpServletResponse.SC_OK) { + logger.warn("PROV5010: readRemoteLoglist failed, bad error code: " + code); + return bs; + } + HttpEntity entity = response.getEntity(); + String ctype = entity.getContentType().getValue().trim(); + if (!TEXT_CT.equals(ctype)) { + logger.warn("PROV5011: readRemoteLoglist failed, bad content type: " + ctype); + return bs; + } + InputStream is = entity.getContent(); + int ch; + while ((ch = is.read()) >= 0) { + bos.write(ch); + } + bs.set(bos.toString()); + is.close(); + } catch (Exception e) { + logger.warn("PROV5012: readRemoteLoglist failed, exception: " + e); + return bs; + } finally { + get.releaseConnection(); + } + return bs; + } + + /** + * Issue a POST on the peer POD's /internal/drlogs/ URL to fetch log records available in the remote database that + * we wish to copy to the local database. + * + * @param bs the bitset (an RELBitSet) of log records to fetch + */ + public void replicateDataRouterLogs(RLEBitSet bs) { + String url = URLUtilities.generatePeerLogsURL(); + HttpPost post = new HttpPost(url); + try { + String str = bs.toString(); + HttpEntity body = new ByteArrayEntity(str.getBytes(), ContentType.create(TEXT_CT)); + post.setEntity(body); + if (logger.isDebugEnabled()) { + logger.debug("Requesting records: " + str); + } + + HttpResponse response = httpclient.execute(post); + int code = response.getStatusLine().getStatusCode(); + if (code != HttpServletResponse.SC_OK) { + logger.warn("PROV5010: replicateDataRouterLogs failed, bad error code: " + code); + return; + } + HttpEntity entity = response.getEntity(); + String ctype = entity.getContentType().getValue().trim(); + if (!TEXT_CT.equals(ctype)) { + logger.warn("PROV5011: replicateDataRouterLogs failed, bad content type: " + ctype); + return; + } + + String spoolname = "" + System.currentTimeMillis(); + Path tmppath = Paths.get(spooldir, spoolname); + Path donepath = Paths.get(spooldir, "IN." + spoolname); + Files.copy(entity.getContent(), Paths.get(spooldir, spoolname), StandardCopyOption.REPLACE_EXISTING); + Files.move(tmppath, donepath, StandardCopyOption.REPLACE_EXISTING); + logger.info("Approximately " + bs.cardinality() + " records replicated."); + } catch (Exception e) { + logger.warn("PROV5012: replicateDataRouterLogs failed, exception: " + e); + } finally { + post.releaseConnection(); + } + } +} diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java index 2f08b176..3b3b72a9 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java @@ -37,7 +37,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeSet; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * Generate a daily per feed latency report. The report is a .csv file containing the following columns: @@ -65,10 +65,6 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB; */ public class DailyLatencyReport extends ReportBase { - private static final String SELECT_SQL = - "select EVENT_TIME, TYPE, PUBLISH_ID, FEED_FILEID, FEEDID, CONTENT_LENGTH from LOG_RECORDS" + - " where EVENT_TIME >= ? and EVENT_TIME <= ?"; - private class Job { private long pubtime = 0; private long clen = 0; @@ -161,33 +157,29 @@ public class DailyLatencyReport extends ReportBase { Map map = new HashMap<>(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); long start = System.currentTimeMillis(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) { - ps.setLong(1, from); - ps.setLong(2, to); - try (ResultSet rs = ps.executeQuery()) { - while (rs.next()) { - String id = rs.getString("PUBLISH_ID"); - int feed = rs.getInt("FEEDID"); - long etime = rs.getLong("EVENT_TIME"); - String type = rs.getString("TYPE"); - String fid = rs.getString("FEED_FILEID"); - long clen = rs.getLong("CONTENT_LENGTH"); - String date = sdf.format(new Date(getPstart(id))); - String key = date + "," + feed; - Counters c = map.get(key); - if (c == null) { - c = new Counters(date, feed); - map.put(key, c); - } - c.addEvent(etime, type, id, fid, clen); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "select EVENT_TIME, TYPE, PUBLISH_ID, FEED_FILEID, FEEDID, " + + "CONTENT_LENGTH from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ?")) { + ps.setLong(1, from); + ps.setLong(2, to); + try (ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + String id = rs.getString("PUBLISH_ID"); + int feed = rs.getInt("FEEDID"); + long etime = rs.getLong("EVENT_TIME"); + String type = rs.getString("TYPE"); + String fid = rs.getString("FEED_FILEID"); + long clen = rs.getLong("CONTENT_LENGTH"); + String date = sdf.format(new Date(getPstart(id))); + String key = date + "," + feed; + Counters c = map.get(key); + if (c == null) { + c = new Counters(date, feed); + map.put(key, c); } + c.addEvent(etime, type, id, fid, clen); } - - db.release(conn); } } catch (SQLException e) { logger.error("SQLException: " + e.getMessage()); diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/FeedReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/FeedReport.java index 65297886..3c63c28c 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/FeedReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/FeedReport.java @@ -37,10 +37,9 @@ import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; - import org.json.JSONException; import org.json.JSONObject; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * Generate a feeds report. The report is a .CSV file. @@ -49,15 +48,6 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB; * @version $Id: FeedReport.java,v 1.2 2013/11/06 16:23:55 eby Exp $ */ public class FeedReport extends ReportBase { - private static final String SELECT_SQL = - // Note to use the time in the publish_id, use date(from_unixtime(substring(publish_id, 1, 10))) - // To just use month, substring(from_unixtime(event_time div 1000), 1, 7) - "select date(from_unixtime(event_time div 1000)) as date, type, feedid, delivery_subid, count(*) as count" + - " from LOG_RECORDS" + - " where type = 'pub' or type = 'del'" + - " group by date, type, feedid, delivery_subid"; - private static final String SELECT_SQL_OLD = - "select PUBLISH_ID, TYPE, FEEDID, DELIVERY_SUBID from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ?"; @Override public void run() { @@ -65,23 +55,22 @@ public class FeedReport extends ReportBase { JSONObject jo = new JSONObject(); long start = System.currentTimeMillis(); StringBuilder sb = new StringBuilder(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try( PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) { - try (ResultSet rs = ps.executeQuery()) { - while (rs.next()) { - String date = rs.getString("date"); - String type = rs.getString("type"); - int feedid = rs.getInt("feedid"); - int subid = type.equals("del") ? rs.getInt("delivery_subid") : 0; - int count = rs.getInt("count"); - sb.append(date + "," + type + "," + feedid + "," + subid + "," + count + "\n"); - } - } - } - db.release(conn); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + // Note to use the time in the publish_id, use date(from_unixtime(substring(publish_id, 1, 10))) + // To just use month, substring(from_unixtime(event_time div 1000), 1, 7) + "select date(from_unixtime(event_time div 1000)) as date, type, feedid, delivery_subid, count(*) " + + "as count from LOG_RECORDS where type = 'pub' or type = 'del' group by date, type, feedid, delivery_subid")) { + try (ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + String date = rs.getString("date"); + String type = rs.getString("type"); + int feedid = rs.getInt("feedid"); + int subid = type.equals("del") ? rs.getInt("delivery_subid") : 0; + int count = rs.getInt("count"); + sb.append(date + "," + type + "," + feedid + "," + subid + "," + count + "\n"); + } + } } catch (SQLException e) { logger.error(e.toString()); } @@ -99,53 +88,50 @@ public class FeedReport extends ReportBase { JSONObject jo = new JSONObject(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); long start = System.currentTimeMillis(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL_OLD)) { - ps.setLong(1, from); - ps.setLong(2, to); - ps.setFetchSize(100000); - try(ResultSet rs = ps.executeQuery()) { - while (rs.next()) { - String id = rs.getString("PUBLISH_ID"); - String date = sdf.format(new Date(getPstart(id))); - JSONObject datemap = jo.optJSONObject(date); - if (datemap == null) { - datemap = new JSONObject(); - jo.put(date, datemap); - } - int feed = rs.getInt("FEEDID"); - JSONObject feedmap = datemap.optJSONObject("" + feed); - if (feedmap == null) { - feedmap = new JSONObject(); - feedmap.put("pubcount", 0); - datemap.put("" + feed, feedmap); + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "select PUBLISH_ID, TYPE, FEEDID, DELIVERY_SUBID from LOG_RECORDS " + + "where EVENT_TIME >= ? and EVENT_TIME <= ?")) { + ps.setLong(1, from); + ps.setLong(2, to); + ps.setFetchSize(100000); + try(ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + String id = rs.getString("PUBLISH_ID"); + String date = sdf.format(new Date(getPstart(id))); + JSONObject datemap = jo.optJSONObject(date); + if (datemap == null) { + datemap = new JSONObject(); + jo.put(date, datemap); + } + int feed = rs.getInt("FEEDID"); + JSONObject feedmap = datemap.optJSONObject("" + feed); + if (feedmap == null) { + feedmap = new JSONObject(); + feedmap.put("pubcount", 0); + datemap.put("" + feed, feedmap); + } + String type = rs.getString("TYPE"); + if (type.equals("pub")) { + try { + int n = feedmap.getInt("pubcount"); + feedmap.put("pubcount", n + 1); + } catch (JSONException e) { + feedmap.put("pubcount", 1); + logger.error(e.toString()); } - String type = rs.getString("TYPE"); - if (type.equals("pub")) { - try { - int n = feedmap.getInt("pubcount"); - feedmap.put("pubcount", n + 1); - } catch (JSONException e) { - feedmap.put("pubcount", 1); - logger.error(e.toString()); - } - } else if (type.equals("del")) { - String subid = "" + rs.getInt("DELIVERY_SUBID"); - try { - int n = feedmap.getInt(subid); - feedmap.put(subid, n + 1); - } catch (JSONException e) { - feedmap.put(subid, 1); - logger.error(e.toString()); - } + } else if (type.equals("del")) { + String subid = "" + rs.getInt("DELIVERY_SUBID"); + try { + int n = feedmap.getInt(subid); + feedmap.put(subid, n + 1); + } catch (JSONException e) { + feedmap.put(subid, 1); + logger.error(e.toString()); } } } } - db.release(conn); } catch (SQLException e) { logger.error(e.toString()); } @@ -225,11 +211,11 @@ public class FeedReport extends ReportBase { String[] feeds = JSONObject.getNames(j2); Arrays.sort(feeds); s.append("") - .append(date) - .append(""); + .append(date) + .append(""); s.append("") - .append(feeds.length) - .append(""); + .append(feeds.length) + .append(""); String px1 = ""; for (String feed : feeds) { JSONObject j3 = j2.getJSONObject(feed); @@ -239,15 +225,15 @@ public class FeedReport extends ReportBase { String[] subs = JSONObject.getNames(j3); Arrays.sort(subs); s.append(px1) - .append("") - .append(feed) - .append(""); + .append("") + .append(feed) + .append(""); s.append("") - .append(pubcount) - .append(""); + .append(pubcount) + .append(""); s.append("") - .append(subcnt) - .append(""); + .append(subcnt) + .append(""); String px2 = ""; for (String sub : subs) { if (!sub.equals("pubcount")) { diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java index f5001409..f98116d2 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java @@ -32,8 +32,7 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; - -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * Generate a per-file latency report. It reports on the details related to one file published @@ -54,9 +53,6 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB; * @version $Id: LatencyReport.java,v 1.1 2013/10/28 18:06:53 eby Exp $ */ public class LatencyReport extends ReportBase { - private static final String SELECT_SQL = - "select EVENT_TIME, TYPE, PUBLISH_ID, FEED_FILEID, FEEDID, CONTENT_LENGTH from LOG_RECORDS" + - " where EVENT_TIME >= ? and EVENT_TIME <= ? order by PUBLISH_ID, EVENT_TIME"; private class Event { public final String type; @@ -80,7 +76,7 @@ public class LatencyReport extends ReportBase { feedid = fid; clen = c; fileid = s; - events = new ArrayList(); + events = new ArrayList<>(); } private long pubtime; @@ -141,15 +137,14 @@ public class LatencyReport extends ReportBase { @Override public void run() { long start = System.currentTimeMillis(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); - try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)){ + try (Connection conn = ProvDbUtils.getInstance().getConnection(); + PreparedStatement ps = conn.prepareStatement( + "select EVENT_TIME, TYPE, PUBLISH_ID, FEED_FILEID, FEEDID, CONTENT_LENGTH from LOG_RECORDS where " + + "EVENT_TIME >= ? and EVENT_TIME <= ? order by PUBLISH_ID, EVENT_TIME")) { ps.setLong(1, from); ps.setLong(2, to); try(ResultSet rs = ps.executeQuery()) { - try(PrintWriter os = new PrintWriter(outfile)) { + try (PrintWriter os = new PrintWriter(outfile)) { os.println("recordid,feedid,uri,size,min,max,avg,fanout"); Counters c = null; while (rs.next()) { @@ -174,8 +169,6 @@ public class LatencyReport extends ReportBase { c.addEvent(type, etime); } } - db.release(conn); - } } } catch (FileNotFoundException e) { System.err.println("File cannot be written: " + outfile); diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java index b580af77..aac6dab1 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java @@ -33,8 +33,7 @@ import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import java.util.TreeSet; - -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * Generate a subscribers report. The report is a .CSV file. It contains information per-day and per-subscriber, @@ -45,15 +44,15 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB; */ public class SubscriberReport extends ReportBase { private static final String SELECT_SQL = - "select date(from_unixtime(EVENT_TIME div 1000)) as DATE, DELIVERY_SUBID, RESULT, COUNT(RESULT) as COUNT" + - " from LOG_RECORDS" + - " where TYPE = 'del' and EVENT_TIME >= ? and EVENT_TIME <= ?" + - " group by DATE, DELIVERY_SUBID, RESULT"; + "select date(from_unixtime(EVENT_TIME div 1000)) as DATE, DELIVERY_SUBID, RESULT, COUNT(RESULT) as COUNT" + + " from LOG_RECORDS" + + " where TYPE = 'del' and EVENT_TIME >= ? and EVENT_TIME <= ?" + + " group by DATE, DELIVERY_SUBID, RESULT"; private static final String SELECT_SQL2 = - "select date(from_unixtime(EVENT_TIME div 1000)) as DATE, DELIVERY_SUBID, COUNT(CONTENT_LENGTH_2) as COUNT" + - " from LOG_RECORDS" + - " where TYPE = 'dlx' and CONTENT_LENGTH_2 = -1 and EVENT_TIME >= ? and EVENT_TIME <= ?" + - " group by DATE, DELIVERY_SUBID"; + "select date(from_unixtime(EVENT_TIME div 1000)) as DATE, DELIVERY_SUBID, COUNT(CONTENT_LENGTH_2) as COUNT" + + " from LOG_RECORDS" + + " where TYPE = 'dlx' and CONTENT_LENGTH_2 = -1 and EVENT_TIME >= ? and EVENT_TIME <= ?" + + " group by DATE, DELIVERY_SUBID"; private class Counters { private String date; @@ -89,20 +88,17 @@ public class SubscriberReport extends ReportBase { @Override public String toString() { return date + "," + sub + "," + - c100 + "," + c200 + "," + c300 + "," + c400 + "," + c500 + "," + - cm1 + "," + cdlx; + c100 + "," + c200 + "," + c300 + "," + c400 + "," + c500 + "," + + cm1 + "," + cdlx; } } @Override public void run() { - Map map = new HashMap(); + Map map = new HashMap<>(); long start = System.currentTimeMillis(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) { ps.setLong(1, from); ps.setLong(2, to); @@ -123,33 +119,31 @@ public class SubscriberReport extends ReportBase { } } - try( PreparedStatement ps2 = conn.prepareStatement(SELECT_SQL2)) { - ps2.setLong(1, from); - ps2.setLong(2, to); - try(ResultSet rs2 = ps2.executeQuery()) { - while (rs2.next()) { - String date = rs2.getString("DATE"); - int sub = rs2.getInt("DELIVERY_SUBID"); - int count = rs2.getInt("COUNT"); - String key = date + "," + sub; - Counters c = map.get(key); - if (c == null) { - c = new Counters(date, sub); - map.put(key, c); - } - c.addDlxCount(count); - } - } - } - - db.release(conn); + try( PreparedStatement ps2 = conn.prepareStatement(SELECT_SQL2)) { + ps2.setLong(1, from); + ps2.setLong(2, to); + try (ResultSet rs2 = ps2.executeQuery()) { + while (rs2.next()) { + String date = rs2.getString("DATE"); + int sub = rs2.getInt("DELIVERY_SUBID"); + int count = rs2.getInt("COUNT"); + String key = date + "," + sub; + Counters c = map.get(key); + if (c == null) { + c = new Counters(date, sub); + map.put(key, c); + } + c.addDlxCount(count); + } + } + } } catch (SQLException e) { logger.error("SQLException: " + e.getMessage()); } logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms"); - try (PrintWriter os = new PrintWriter(outfile)){ + try (PrintWriter os = new PrintWriter(outfile)) { os.println("date,subid,count100,count200,count300,count400,count500,countminus1,countdlx"); - for (String key : new TreeSet(map.keySet())) { + for (String key : new TreeSet<>(map.keySet())) { Counters c = map.get(key); os.println(c.toString()); } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java index 7d5268b1..732e7c59 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java @@ -24,6 +24,8 @@ package org.onap.dmaap.datarouter.reports; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.sql.Connection; @@ -35,10 +37,7 @@ import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.TreeSet; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; /** * Generate a traffic volume report. The report is a .csv file containing the following columns: @@ -57,9 +56,7 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB; * @version $Id: VolumeReport.java,v 1.3 2014/02/28 15:11:13 eby Exp $ */ public class VolumeReport extends ReportBase { - private static final String SELECT_SQL = "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT" + - " from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?"; - private EELFLogger loggerVolumeReport= EELFManager.getInstance().getLogger("ReportLog");; + private EELFLogger loggerVolumeReport= EELFManager.getInstance().getLogger("ReportLog"); private class Counters { int filespublished, filesdelivered, filesexpired; long bytespublished, bytesdelivered, bytesexpired; @@ -67,33 +64,32 @@ public class VolumeReport extends ReportBase { @Override public String toString() { return String.format("%d,%d,%d,%d,%d,%d", - filespublished, bytespublished, filesdelivered, - bytesdelivered, filesexpired, bytesexpired); + filespublished, bytespublished, filesdelivered, + bytesdelivered, filesexpired, bytesexpired); } } @Override public void run() { - Map map = new HashMap(); + Map map = new HashMap<>(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); long start = System.currentTimeMillis(); - try { - DB db = new DB(); - @SuppressWarnings("resource") - Connection conn = db.getConnection(); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { // We need to run this SELECT in stages, because otherwise we run out of memory! final long stepsize = 6000000L; - boolean go_again = true; - for (long i = 0; go_again; i += stepsize) { - try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) { + boolean goAgain = true; + for (long i = 0; goAgain; i += stepsize) { + try (PreparedStatement ps = conn.prepareStatement( + "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT from LOG_RECORDS " + + "where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?")) { ps.setLong(1, from); ps.setLong(2, to); ps.setLong(3, i); ps.setLong(4, stepsize); - try(ResultSet rs = ps.executeQuery()) { - go_again = false; + try (ResultSet rs = ps.executeQuery()) { + goAgain = false; while (rs.next()) { - go_again = true; + goAgain = true; long etime = rs.getLong("EVENT_TIME"); String type = rs.getString("TYPE"); int feed = rs.getInt("FEEDID"); @@ -120,14 +116,10 @@ public class VolumeReport extends ReportBase { } } } - } - catch (SQLException sqlException) - { - loggerVolumeReport.error("SqlException",sqlException); + } catch (SQLException sqlException) { + loggerVolumeReport.error("SqlException", sqlException); } } - - db.release(conn); } catch (SQLException e) { loggerVolumeReport.error("SQLException: " + e.getMessage()); } diff --git a/datarouter-prov/src/main/resources/docker/startup.sh b/datarouter-prov/src/main/resources/docker/startup.sh index aa2f69b6..19e381d6 100644 --- a/datarouter-prov/src/main/resources/docker/startup.sh +++ b/datarouter-prov/src/main/resources/docker/startup.sh @@ -9,7 +9,7 @@ CLASSPATH=$ETC for FILE in `find $LIB -name *.jar`; do CLASSPATH=$CLASSPATH:$FILE done -java -classpath $CLASSPATH org.onap.dmaap.datarouter.provisioning.Main +java -classpath $CLASSPATH org.onap.dmaap.datarouter.provisioning.ProvRunner runner_file="$LIB/datarouter-prov-jar-with-dependencies.jar" echo "Starting using" $runner_file diff --git a/datarouter-prov/src/main/resources/misc/drtrprov b/datarouter-prov/src/main/resources/misc/drtrprov index e764c145..195756bf 100644 --- a/datarouter-prov/src/main/resources/misc/drtrprov +++ b/datarouter-prov/src/main/resources/misc/drtrprov @@ -66,7 +66,7 @@ start() { exit 0 fi echo '0 1 * * * /opt/app/datartr/bin/runreports' | crontab - nohup java $JAVA_OPTS org.onap.dmaap.datarouter.provisioning.Main listAppender; @@ -93,7 +97,6 @@ public class FeedServletTest extends DrServletTestBase { public void setUp() throws Exception { listAppender = setTestLogger(FeedServlet.class); feedServlet = new FeedServlet(); - db = new DB(); setAuthoriserToReturnRequestIsAuthorized(); setUpValidAuthorisedRequest(); setUpValidSecurityOnHttpRequest(); @@ -277,7 +280,7 @@ public class FeedServletTest extends DrServletTestBase { when(request.getInputStream()).thenReturn(inStream); when(request.getPathInfo()).thenReturn("/2"); FeedServlet feedServlet = new FeedServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { return null; } }; @@ -289,7 +292,7 @@ public class FeedServletTest extends DrServletTestBase { public void Given_Request_Is_HTTP_PUT_And_Request_Contains_Invalid_JSON_Then_Bad_Request_Response_Is_Generated() throws Exception { when(request.getPathInfo()).thenReturn("/2"); FeedServlet feedServlet = new FeedServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { return new JSONObject(); } }; @@ -303,7 +306,7 @@ public class FeedServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/2"); JSONObject JSObject = buildRequestJsonObject(); FeedServlet feedServlet = new FeedServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "1.0"); @@ -320,7 +323,7 @@ public class FeedServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/2"); JSONObject JSObject = buildRequestJsonObject(); FeedServlet feedServlet = new FeedServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "not_stub_name"); jo.put("version", "1.0"); @@ -337,7 +340,7 @@ public class FeedServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/2"); JSONObject JSObject = buildRequestJsonObject(); FeedServlet feedServlet = new FeedServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "AafFeed"); jo.put("version", "v0.2"); @@ -355,7 +358,7 @@ public class FeedServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/2"); JSONObject JSObject = buildRequestJsonObject(); FeedServlet feedServlet = new FeedServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "AafFeed"); jo.put("version", "v0.1"); @@ -372,7 +375,7 @@ public class FeedServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/2"); JSONObject JSObject = buildRequestJsonObject(); FeedServlet feedServlet = new FeedServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "AafFeed"); jo.put("version", "v0.1"); @@ -393,7 +396,7 @@ public class FeedServletTest extends DrServletTestBase { when(request.isUserInRole("org.onap.dmaap-dr.feed|*|edit")).thenReturn(true); JSONObject JSObject = buildRequestJsonObject(); FeedServlet feedServlet = new FeedServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "AafFeed"); jo.put("version", "v0.1"); @@ -419,7 +422,7 @@ public class FeedServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/2"); JSONObject JSObject = buildRequestJsonObject(); FeedServlet feedServlet = new FeedServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "AafFeed"); jo.put("version", "v0.1"); @@ -443,7 +446,7 @@ public class FeedServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/2"); JSONObject JSObject = buildRequestJsonObject(); FeedServlet feedServlet = new FeedServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "AafFeed"); jo.put("version", "v0.1"); @@ -534,6 +537,8 @@ public class FeedServletTest extends DrServletTestBase { feed.setFeedid(1); feed.setGroupid(1); feed.setDeleted(false); - feed.doUpdate(db.getConnection()); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { + feed.doUpdate(conn); + } } } \ No newline at end of file diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/GroupServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/GroupServletTest.java index a0781264..67687d08 100755 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/GroupServletTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/GroupServletTest.java @@ -34,6 +34,7 @@ import org.onap.dmaap.datarouter.authz.AuthorizationResponse; import org.onap.dmaap.datarouter.authz.Authorizer; import org.onap.dmaap.datarouter.provisioning.beans.Insertable; import org.onap.dmaap.datarouter.provisioning.beans.Updateable; +import org.onap.dmaap.datarouter.provisioning.utils.Poker; import org.powermock.modules.junit4.PowerMockRunner; import javax.persistence.EntityManager; @@ -294,7 +295,7 @@ public class GroupServletTest { private GroupServlet overideGetJSONFromInputToReturnAnInvalidGroup(Boolean invalidName) { GroupServlet groupServlet = new GroupServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject invalidGroup = new JSONObject(); String invalidEntry = "groupNameThatIsTooLongTooBeValidgroupNameThatIsTooLongTooBeValid"; invalidEntry = invalidEntry + invalidEntry + invalidEntry + invalidEntry + invalidEntry; @@ -317,7 +318,7 @@ public class GroupServletTest { private GroupServlet overideGetJSONFromInputToReturnAValidGroupWithFail() { GroupServlet groupServlet = new GroupServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject validGroup = new JSONObject(); validGroup.put("name", "groupName"); validGroup.put("groupid", 2); @@ -341,7 +342,7 @@ public class GroupServletTest { private GroupServlet overideGetJSONFromInputToReturnGroupInDb() { GroupServlet groupServlet = new GroupServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject validGroup = new JSONObject(); validGroup.put("name", "Group1"); validGroup.put("groupid", 2); @@ -357,7 +358,7 @@ public class GroupServletTest { private GroupServlet overideGetJSONFromInputToReturnNewGroupToInsert() { GroupServlet groupServlet = new GroupServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject validGroup = new JSONObject(); validGroup.put("name", "Group2"); validGroup.put("groupid", 2); diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/InternalServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/InternalServletTest.java index 5239b800..51531669 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/InternalServletTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/InternalServletTest.java @@ -57,6 +57,7 @@ import org.onap.dmaap.datarouter.provisioning.beans.Insertable; import org.onap.dmaap.datarouter.provisioning.beans.LogRecord; import org.onap.dmaap.datarouter.provisioning.beans.Parameters; import org.onap.dmaap.datarouter.provisioning.beans.Updateable; +import org.onap.dmaap.datarouter.provisioning.utils.Poker; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/LogServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/LogServletTest.java index c2bccb71..a52f1a0b 100755 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/LogServletTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/LogServletTest.java @@ -60,9 +60,9 @@ public class LogServletTest extends DrServletTestBase { private HttpServletResponse response; @Mock - private ServletOutputStream s; + private ServletOutputStream servletOutputStream; - ListAppender listAppender; + private ListAppender listAppender; @BeforeClass public static void init() { @@ -153,8 +153,7 @@ public class LogServletTest extends DrServletTestBase { } @Test - public void Given_Request_Is_HTTP_GET_And_Is_FeedLog_A_STATUS_OK_Response_Is_Generated() - throws Exception { + public void Given_Request_Is_HTTP_GET_And_Is_FeedLog_A_STATUS_OK_Response_Is_Generated() { logServlet.doGet(request, response); verify(response).setStatus(eq(HttpServletResponse.SC_OK)); verifyEnteringExitCalled(listAppender); @@ -177,8 +176,7 @@ public class LogServletTest extends DrServletTestBase { } @Test - public void Given_Request_Is_GetPublishRecordsForFeed_And_Type_Is_Publish_A_STATUS_OK_Response_Is_Generated() - throws Exception { + public void Given_Request_Is_GetPublishRecordsForFeed_And_Type_Is_Publish_A_STATUS_OK_Response_Is_Generated() { when(request.getParameter("type")).thenReturn("pub"); when(request.getParameter("expiryReason")).thenReturn(null); logServlet.doGet(request, response); @@ -196,9 +194,9 @@ public class LogServletTest extends DrServletTestBase { when(request.getParameter("filename")).thenReturn("file123"); logServlet.doGet(request, response); verify(response).setStatus(eq(HttpServletResponse.SC_OK)); - verify(s, times(1)).print("["); - verify(s, times(1)).print(matches("\n\\{\"statusCode\":204,\"publishId\":\"ID\",\"requestURI\":\"URL/file123\",\"sourceIP\":\"172.0.0.8\",\"method\":\"PUT\",\"contentType\":\"application/vnd.dmaap-dr.log-list; version=1.0\",\"endpointId\":\"user\",\"type\":\"pub\",\"date\":\"2050-05-14T1[6-7]:46:04.422Z\",\"contentLength\":100,\"fileName\":\"file123\"}")); - verify(s, times(1)).print("["); + verify(servletOutputStream, times(1)).print("["); + verify(servletOutputStream, times(1)).print(matches("\n\\{\"statusCode\":204,\"publishId\":\"ID\",\"requestURI\":\"URL/file123\",\"sourceIP\":\"172.0.0.8\",\"method\":\"PUT\",\"contentType\":\"application/vnd.dmaap-dr.log-list; version=1.0\",\"endpointId\":\"user\",\"type\":\"pub\",\"date\":\"2050-05-14T1[6-7]:46:04.422Z\",\"contentLength\":100,\"fileName\":\"file123\"}")); + verify(servletOutputStream, times(1)).print("["); } @Test @@ -212,9 +210,9 @@ public class LogServletTest extends DrServletTestBase { when(request.getParameter("filename")).thenReturn("file456"); logServlet.doGet(request, response); verify(response).setStatus(eq(HttpServletResponse.SC_OK)); - verify(s, times(1)).print("["); - verify(s, times(0)).print(matches("\n\\{\"statusCode\":204,\"publishId\":\"ID\",\"requestURI\":\"URL/file123\",\"sourceIP\":\"172.0.0.8\",\"method\":\"PUT\",\"contentType\":\"application/vnd.dmaap-dr.log-list; version=1.0\",\"endpointId\":\"user\",\"type\":\"pub\",\"date\":\"2050-05-14T1[6-7]:46:04.422Z\",\"contentLength\":100,\"fileName\":\"file123\"}")); - verify(s, times(1)).print("["); + verify(servletOutputStream, times(1)).print("["); + verify(servletOutputStream, times(0)).print(matches("\n\\{\"statusCode\":204,\"publishId\":\"ID\",\"requestURI\":\"URL/file123\",\"sourceIP\":\"172.0.0.8\",\"method\":\"PUT\",\"contentType\":\"application/vnd.dmaap-dr.log-list; version=1.0\",\"endpointId\":\"user\",\"type\":\"pub\",\"date\":\"2050-05-14T1[6-7]:46:04.422Z\",\"contentLength\":100,\"fileName\":\"file123\"}")); + verify(servletOutputStream, times(1)).print("["); } @Test @@ -268,7 +266,7 @@ public class LogServletTest extends DrServletTestBase { when(request.getParameter("expiryReason")).thenReturn("other"); when(request.getParameter("start")).thenReturn("2536159564422"); when(request.getParameter("end")).thenReturn("2536159564422"); - s = mock(ServletOutputStream.class); - when(response.getOutputStream()).thenReturn(s); + servletOutputStream = mock(ServletOutputStream.class); + when(response.getOutputStream()).thenReturn(servletOutputStream); } } \ No newline at end of file diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/ProxyServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/ProxyServletTest.java index a0442c13..80fcb0e7 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/ProxyServletTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/ProxyServletTest.java @@ -53,6 +53,7 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; +import org.onap.dmaap.datarouter.provisioning.utils.SynchronizerTask; import org.onap.dmaap.datarouter.provisioning.utils.URLUtilities; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/PublishServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/PublishServletTest.java index 2202550b..e1a11f3e 100755 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/PublishServletTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/PublishServletTest.java @@ -29,7 +29,7 @@ import org.apache.commons.lang3.reflect.FieldUtils; import org.junit.*; import org.junit.runner.RunWith; import org.mockito.Mock; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @@ -67,9 +67,8 @@ public class PublishServletTest extends DrServletTestBase { private static EntityManagerFactory emf; private static EntityManager em; - private DB db; - ListAppender listAppender; + private ListAppender listAppender; @BeforeClass public static void init() { @@ -92,7 +91,7 @@ public class PublishServletTest extends DrServletTestBase { public void setUp() throws Exception { listAppender = setTestLogger(PublishServlet.class); publishServlet = new PublishServlet(); - db = new DB(); + ProvDbUtils.getInstance().initProvDB(); } @Test diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/StatisticsServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/StatisticsServletTest.java index 3438627d..f9fcd687 100755 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/StatisticsServletTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/StatisticsServletTest.java @@ -35,21 +35,14 @@ import javax.persistence.Persistence; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; - import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; -import org.onap.dmaap.datarouter.provisioning.utils.DB; import org.powermock.modules.junit4.PowerMockRunner; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.SQLException; - - @RunWith(PowerMockRunner.class) public class StatisticsServletTest { @@ -61,8 +54,6 @@ public class StatisticsServletTest { @Mock private HttpServletResponse response; - private DB db; - private static EntityManagerFactory emf; private static EntityManager em; @@ -85,7 +76,6 @@ public class StatisticsServletTest { @Before public void setUp() throws Exception { statisticsServlet = new StatisticsServlet(); - db = new DB(); buildRequestParameters(); } diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscribeServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscribeServletTest.java index 57007489..979c2c4a 100755 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscribeServletTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscribeServletTest.java @@ -53,7 +53,8 @@ import org.mockito.Mock; import org.onap.dmaap.datarouter.authz.AuthorizationResponse; import org.onap.dmaap.datarouter.authz.Authorizer; import org.onap.dmaap.datarouter.provisioning.beans.Insertable; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.Poker; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; import org.powermock.modules.junit4.PowerMockRunner; @@ -62,7 +63,6 @@ public class SubscribeServletTest extends DrServletTestBase { private static SubscribeServlet subscribeServlet; private static EntityManagerFactory emf; private static EntityManager em; - private DB db; @Mock private HttpServletRequest request; @@ -89,7 +89,7 @@ public class SubscribeServletTest extends DrServletTestBase { @Before public void setUp() throws Exception { - db = new DB(); + ProvDbUtils.getInstance().initProvDB(); listAppender = setTestLogger(SubscribeServlet.class); subscribeServlet = new SubscribeServlet(); setAuthoriserToReturnRequestIsAuthorized(); @@ -192,7 +192,7 @@ public class SubscribeServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/1"); JSONObject JSObject = buildRequestJsonObject(); SubscribeServlet subscribeServlet = new SubscribeServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -216,7 +216,7 @@ public class SubscribeServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/1"); JSONObject JSObject = buildRequestJsonObject(); SubscribeServlet subscribeServlet = new SubscribeServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -243,7 +243,7 @@ public class SubscribeServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/2"); JSONObject JSObject = buildRequestJsonObject(); SubscribeServlet subscribeServlet = new SubscribeServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -265,7 +265,7 @@ public class SubscribeServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/2"); JSONObject JSObject = buildRequestJsonObject(); SubscribeServlet subscribeServlet = new SubscribeServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -290,7 +290,7 @@ public class SubscribeServletTest extends DrServletTestBase { when(request.isUserInRole("org.onap.dmaap-dr.feed|*|approveSub")).thenReturn(true); JSONObject JSObject = buildRequestJsonObject(); SubscribeServlet subscribeServlet = new SubscribeServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -334,7 +334,7 @@ public class SubscribeServletTest extends DrServletTestBase { FieldUtils.writeDeclaredStaticField(BaseServlet.class, "maxSubs", 0, true); when(request.getPathInfo()).thenReturn("/1"); SubscribeServlet subscribeServlet = new SubscribeServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { return new JSONObject(); } }; @@ -347,7 +347,7 @@ public class SubscribeServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/2"); JSONObject JSObject = buildRequestJsonObject(); SubscribeServlet subscribeServlet = new SubscribeServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServletTest.java index 4a410ddd..cb0fa2bf 100755 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServletTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServletTest.java @@ -24,6 +24,7 @@ package org.onap.dmaap.datarouter.provisioning; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.read.ListAppender; +import java.sql.Connection; import org.apache.commons.lang3.reflect.FieldUtils; import org.jetbrains.annotations.NotNull; import org.json.JSONObject; @@ -39,8 +40,9 @@ import org.onap.dmaap.datarouter.provisioning.beans.Deleteable; import org.onap.dmaap.datarouter.provisioning.beans.SubDelivery; import org.onap.dmaap.datarouter.provisioning.beans.Subscription; import org.onap.dmaap.datarouter.provisioning.beans.Updateable; -import org.onap.dmaap.datarouter.provisioning.utils.DB; import org.onap.dmaap.datarouter.provisioning.utils.PasswordProcessor; +import org.onap.dmaap.datarouter.provisioning.utils.Poker; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @@ -67,7 +69,6 @@ public class SubscriptionServletTest extends DrServletTestBase { private static EntityManagerFactory emf; private static EntityManager em; private SubscriptionServlet subscriptionServlet; - private DB db; private final String URL= "https://172.100.0.5"; private final String USER = "user1"; private final String PASSWORD="password1"; @@ -100,7 +101,6 @@ public class SubscriptionServletTest extends DrServletTestBase { public void setUp() throws Exception { listAppender = setTestLogger(SubscriptionServlet.class); subscriptionServlet = new SubscriptionServlet(); - db = new DB(); setAuthoriserToReturnRequestIsAuthorized(); setPokerToNotCreateTimersWhenDeleteSubscriptionIsCalled(); setupValidAuthorisedRequest(); @@ -253,7 +253,7 @@ public class SubscriptionServletTest extends DrServletTestBase { when(request.getHeader("Content-Type")).thenReturn("application/vnd.dmaap-dr.subscription; version=1.0"); JSONObject JSObject = buildRequestJsonObject(); SubscriptionServlet subscriptionServlet = new SubscriptionServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -278,7 +278,7 @@ public class SubscriptionServletTest extends DrServletTestBase { when(request.getPathInfo()).thenReturn("/3"); JSONObject JSObject = buildRequestJsonObject(); SubscriptionServlet subscriptionServlet = new SubscriptionServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -307,7 +307,7 @@ public class SubscriptionServletTest extends DrServletTestBase { PowerMockito.mockStatic(PasswordProcessor.class); JSONObject JSObject = buildRequestJsonObject(); SubscriptionServlet subscriptionServlet = new SubscriptionServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -347,7 +347,7 @@ public class SubscriptionServletTest extends DrServletTestBase { public void Given_Request_Is_HTTP_PUT_And_Subscription_Object_Is_Invalid_Bad_Request_Response_Is_Generated() throws Exception { when(request.getHeader("Content-Type")).thenReturn("application/vnd.dmaap-dr.subscription; version=1.0"); SubscriptionServlet subscriptionServlet = new SubscriptionServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); return jo; } @@ -362,7 +362,7 @@ public class SubscriptionServletTest extends DrServletTestBase { when(request.getHeader("Content-Type")).thenReturn("application/vnd.dmaap-dr.subscription; version=1.0"); JSONObject JSObject = buildRequestJsonObject(); SubscriptionServlet subscriptionServlet = new SubscriptionServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -388,7 +388,7 @@ public class SubscriptionServletTest extends DrServletTestBase { when(request.getHeader("Content-Type")).thenReturn("application/vnd.dmaap-dr.subscription; version=1.0"); JSONObject JSObject = buildRequestJsonObject(); SubscriptionServlet subscriptionServlet = new SubscriptionServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -421,7 +421,7 @@ public class SubscriptionServletTest extends DrServletTestBase { PowerMockito.mockStatic(PasswordProcessor.class); JSONObject JSObject = buildRequestJsonObject(); SubscriptionServlet subscriptionServlet = new SubscriptionServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -502,7 +502,7 @@ public class SubscriptionServletTest extends DrServletTestBase { when(request.getHeader("Content-Type")).thenReturn("application/vnd.dmaap-dr.subscription-control; version=1.0"); JSONObject JSObject = buildRequestJsonObject(); SubscriptionServlet subscriptionServlet = new SubscriptionServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -524,7 +524,7 @@ public class SubscriptionServletTest extends DrServletTestBase { when(request.getHeader("Content-Type")).thenReturn("application/vnd.dmaap-dr.subscription-control; version=1.0"); JSONObject JSObject = buildRequestJsonObject(); SubscriptionServlet subscriptionServlet = new SubscriptionServlet() { - protected JSONObject getJSONfromInput(HttpServletRequest req) { + public JSONObject getJSONfromInput(HttpServletRequest req) { JSONObject jo = new JSONObject(); jo.put("name", "stub_name"); jo.put("version", "2.0"); @@ -610,7 +610,9 @@ public class SubscriptionServletTest extends DrServletTestBase { subscription.setPrivilegedSubscriber(false); subscription.setDecompress(false); subscription.changeOwnerShip(); - subscription.doUpdate(db.getConnection()); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { + subscription.doUpdate(conn); + } } private void resetAafSubscriptionInDB() throws SQLException { @@ -626,7 +628,9 @@ public class SubscriptionServletTest extends DrServletTestBase { subscription.setAafInstance("https://aaf-onap-test.osaaf.org:8095"); subscription.setDecompress(false); subscription.setPrivilegedSubscriber(false); - subscription.doUpdate(db.getConnection()); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { + subscription.doUpdate(conn); + } } private void addNewSubscriptionInDB() throws SQLException { @@ -640,6 +644,8 @@ public class SubscriptionServletTest extends DrServletTestBase { subscription.setMetadataOnly(false); subscription.setSuspended(false); subscription.setDecompress(false); - subscription.doInsert(db.getConnection()); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { + subscription.doInsert(conn); + } } } \ No newline at end of file diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTaskTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTaskTest.java index 8c48d705..7ebe45d9 100755 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTaskTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTaskTest.java @@ -54,6 +54,7 @@ import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; import org.onap.dmaap.datarouter.provisioning.utils.RLEBitSet; +import org.onap.dmaap.datarouter.provisioning.utils.SynchronizerTask; import org.onap.dmaap.datarouter.provisioning.utils.URLUtilities; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRouteTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRouteTest.java index 7b77a1ea..40d2d61d 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRouteTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRouteTest.java @@ -20,6 +20,7 @@ package org.onap.dmaap.datarouter.provisioning.beans; +import java.sql.Connection; import java.sql.SQLException; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; @@ -30,7 +31,7 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @@ -40,7 +41,6 @@ public class EgressRouteTest { private static EntityManagerFactory emf; private static EntityManager em; - private DB db; @BeforeClass public static void init() { @@ -59,7 +59,6 @@ public class EgressRouteTest { } @Before public void setUp() throws Exception { - db = new DB(); egressRoute = new EgressRoute(2, 1); } @@ -67,12 +66,14 @@ public class EgressRouteTest { public void Verify_EgressRoute_Is_Removed_Successfully() throws SQLException { Assert.assertEquals(1, EgressRoute.getAllEgressRoutes().size()); EgressRoute egressRoute = new EgressRoute(1, 1); - egressRoute.doDelete(db.getConnection()); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { + egressRoute.doDelete(conn); + } Assert.assertEquals(0, EgressRoute.getAllEgressRoutes().size()); } @Test - public void Verify_EgressRoute_Is_Updated_Successfully() throws SQLException { + public void Verify_EgressRoute_Is_Updated_Successfully() { EgressRoute egressRoute = new EgressRoute(1, 1); EgressRoute egressRoute1 = new EgressRoute(1, 1); Assert.assertEquals(egressRoute.hashCode(), egressRoute1.hashCode()); diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/FeedTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/FeedTest.java index 37e69c84..ec401075 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/FeedTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/FeedTest.java @@ -22,31 +22,33 @@ ******************************************************************************/ package org.onap.dmaap.datarouter.provisioning.beans; -import org.json.JSONObject; -import org.junit.*; -import org.junit.runner.RunWith; -import org.mockito.Mockito; -import org.onap.dmaap.datarouter.provisioning.utils.DB; -import org.powermock.modules.junit4.PowerMockRunner; +import static org.mockito.Matchers.anyString; -import javax.persistence.EntityManager; -import javax.persistence.EntityManagerFactory; -import javax.persistence.Persistence; -import java.io.InvalidObjectException; import java.sql.Connection; import java.sql.SQLException; import java.util.HashSet; import java.util.List; import java.util.Set; - -import static org.mockito.Matchers.anyString; +import javax.persistence.EntityManager; +import javax.persistence.EntityManagerFactory; +import javax.persistence.Persistence; +import org.json.JSONObject; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; +import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) public class FeedTest { private static EntityManagerFactory emf; private static EntityManager em; private Feed feed; - private DB db; + private ProvDbUtils provDbUtils; @BeforeClass public static void init() { @@ -66,7 +68,7 @@ public class FeedTest { @Before public void setUp() throws Exception { - db = new DB(); + provDbUtils = ProvDbUtils.getInstance(); feed = new Feed("Feed1","v0.1", "First Feed for testing", "First Feed for testing"); feed.setFeedid(1); feed.setGroupid(1); @@ -76,25 +78,25 @@ public class FeedTest { @Test public void Given_getFilteredFeedUrlList_With_Name_Then_Method_Returns_Self_Links() { - List list= feed.getFilteredFeedUrlList("name","Feed1"); + List list= Feed.getFilteredFeedUrlList("name","Feed1"); Assert.assertEquals("self_link",list.get(0)); } @Test public void Given_getFilteredFeedUrlList_With_Publ_Then_Method_Returns_Self_Links() { - List list= feed.getFilteredFeedUrlList("publ","pub"); + List list= Feed.getFilteredFeedUrlList("publ","pub"); Assert.assertEquals("self_link",list.get(0)); } @Test public void Given_getFilteredFeedUrlList_With_Subs_Then_Method_Returns_Self_Links() { - List list= feed.getFilteredFeedUrlList("subs","sub123"); + List list= Feed.getFilteredFeedUrlList("subs","sub123"); Assert.assertEquals("self_link",list.get(0)); } @Test - public void Given_doDelete_Succeeds_Then_doInsert_To_Put_Feed_Back_And_Bool_Is_True() throws SQLException, InvalidObjectException { - Boolean bool = feed.doDelete(db.getConnection()); + public void Given_doDelete_Succeeds_Then_doInsert_To_Put_Feed_Back_And_Bool_Is_True() throws SQLException { + Boolean bool = feed.doDelete(provDbUtils.getConnection()); Assert.assertEquals(true, bool); JSONObject jo = new JSONObject(); jo.put("self","self_link"); @@ -102,7 +104,7 @@ public class FeedTest { jo.put("subscribe","subscribe_link"); jo.put("log","log_link"); feed.setLinks(new FeedLinks(jo)); - bool = feed.doInsert(db.getConnection()); + bool = feed.doInsert(provDbUtils.getConnection()); Assert.assertEquals(true, bool); } @@ -134,7 +136,7 @@ public class FeedTest { @Test public void Given_doInsert_Throws_SQLException_Then_Returns_False() throws SQLException { - Connection connection = db.getConnection(); + Connection connection = provDbUtils.getConnection(); FeedAuthorization fa = new FeedAuthorization(); Set setA = new HashSet(); setA.add(new FeedEndpointID("1", "Name")); @@ -176,11 +178,11 @@ public class FeedTest { @Test public void Given_IsFeedValid_Called_And_Feed_Exists_Returns_True(){ - Assert.assertEquals(feed.isFeedValid(1), true); + Assert.assertTrue(Feed.isFeedValid(1)); } private Connection CreateSpyForDbConnection() throws SQLException { - Connection conn = db.getConnection(); + Connection conn = provDbUtils.getConnection(); return Mockito.spy(conn); } } \ No newline at end of file diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/GroupTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/GroupTest.java index b785fdc2..82ee0837 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/GroupTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/GroupTest.java @@ -22,24 +22,28 @@ ******************************************************************************/ package org.onap.dmaap.datarouter.provisioning.beans; -import org.junit.*; -import org.junit.runner.RunWith; -import org.onap.dmaap.datarouter.provisioning.utils.DB; -import org.powermock.modules.junit4.PowerMockRunner; - +import java.sql.Connection; +import java.util.Collection; +import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; -import java.util.Collection; -import java.util.Date; -import java.util.List; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; +import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) public class GroupTest { private static EntityManagerFactory emf; private static EntityManager em; private Group group; - private DB db; + private ProvDbUtils provDbUtils = ProvDbUtils.getInstance(); @BeforeClass public static void init() { @@ -59,9 +63,17 @@ public class GroupTest { @Before public void setUp() throws Exception { - db = new DB(); - group = new Group("GroupTest", "", ""); - group.doInsert(db.getConnection()); + group = new Group("GroupTest", "This group has a description", ""); + try (Connection conn = provDbUtils.getConnection()) { + group.doInsert(conn); + } + } + + @After + public void tearDown() throws Exception { + try (Connection conn = provDbUtils.getConnection()) { + group.doDelete(conn); + } } @Test @@ -79,26 +91,21 @@ public class GroupTest { public void Given_Group_Inserted_With_Same_Name_GetGroupMatching_With_Id_Returns_Correct_Group() throws Exception { Group sameGroupName = new Group("GroupTest", "This group has a description", ""); - sameGroupName.doInsert(db.getConnection()); + sameGroupName.doInsert(provDbUtils.getConnection()); Assert.assertEquals( "This group has a description", Group.getGroupMatching(group, 2).getDescription()); - sameGroupName.doDelete(db.getConnection()); - } - - @Test - public void Given_Group_Inserted_GetGroupById_Returns_Correct_Group() { - Assert.assertEquals(group, Group.getGroupById(group.getGroupid())); + sameGroupName.doDelete(provDbUtils.getConnection()); } @Test public void Given_Group_AuthId_Updated_GetGroupByAuthId_Returns_Correct_Group() throws Exception { group.setAuthid("Basic TmFtZTp6Z04wMFkyS3gybFppbXltNy94ZDhuMkdEYjA9"); - group.doUpdate(db.getConnection()); + group.doUpdate(provDbUtils.getConnection()); Assert.assertEquals(group, Group.getGroupByAuthId("Basic TmFtZTp6Z04wMFkyS3gybFppbXltNy94ZDhuMkdEYjA9")); } - @After - public void tearDown() throws Exception { - group.doDelete(db.getConnection()); + @Test + public void Given_Group_Inserted_GetGroupById_Returns_Correct_Group() { + Assert.assertEquals(group, Group.getGroupById(group.getGroupid())); } } diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRouteTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRouteTest.java index ad259a59..b26b0000 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRouteTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRouteTest.java @@ -52,7 +52,7 @@ public class IngressRouteTest { Feed feed = mock(Feed.class); PowerMockito.when(Feed.getFeedById(1)).thenReturn(feed); Map map = new HashMap<>(); - FieldUtils.writeDeclaredStaticField(NodeClass.class, "map", map, true); + FieldUtils.writeDeclaredStaticField(NodeClass.class, "nodesMap", map, true); } @Test diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecordTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecordTest.java index bd0ebdcb..24400f63 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecordTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecordTest.java @@ -20,13 +20,6 @@ package org.onap.dmaap.datarouter.provisioning.beans; -import org.junit.*; -import org.onap.dmaap.datarouter.provisioning.utils.DB; -import org.onap.dmaap.datarouter.provisioning.utils.RLEBitSet; - -import javax.persistence.EntityManager; -import javax.persistence.EntityManagerFactory; -import javax.persistence.Persistence; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; @@ -34,15 +27,21 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.text.ParseException; +import javax.persistence.EntityManager; +import javax.persistence.EntityManagerFactory; +import javax.persistence.Persistence; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; +import org.onap.dmaap.datarouter.provisioning.utils.RLEBitSet; public class LogRecordTest { - private LogRecord logRecord; private static EntityManagerFactory emf; private static EntityManager em; - private DB db; - private static final String INSERT_SQL = "insert into LOG_RECORDS values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; - private PreparedStatement ps; @BeforeClass public static void init() { @@ -62,9 +61,7 @@ public class LogRecordTest { @Before public void setUp() throws ParseException, SQLException { - db = new DB(); - Connection conn = db.getConnection(); - ps = conn.prepareStatement(INSERT_SQL); + } @Test @@ -103,9 +100,14 @@ public class LogRecordTest { private void setArgsLoadAndAssertEquals(String type, String s) throws ParseException, SQLException { String[] args = {"2018-08-29-10-10-10-543.", "LOG", "ID", "1", "URL/file123", "PUT", "application/vnd.dmaap-.log-list; version=1.0", "100", type, "1", "172.0.0.8", "user", "204", "1", "1", "204", "0", "other", "1", "100", "file123"}; - logRecord = new LogRecord(args); - logRecord.load(ps); - String compare_string = ps.toString().substring(ps.toString().indexOf("{1:"), ps.toString().indexOf("2:")) + ps.toString().substring(ps.toString().indexOf("3:")); - Assert.assertEquals(compare_string, s); + LogRecord logRecord = new LogRecord(args); + try (Connection conn = ProvDbUtils.getInstance().getConnection()) { + PreparedStatement ps = conn.prepareStatement( + "insert into LOG_RECORDS values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); + logRecord.load(ps); + String compare_string = ps.toString().substring(ps.toString().indexOf("{1:"), ps.toString().indexOf("2:")) + ps + .toString().substring(ps.toString().indexOf("3:")); + Assert.assertEquals(compare_string, s); + } } } diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRouteTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRouteTest.java index d727bc77..d02f39d4 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRouteTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRouteTest.java @@ -20,6 +20,7 @@ package org.onap.dmaap.datarouter.provisioning.beans; +import java.sql.Connection; import java.sql.SQLException; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; @@ -30,17 +31,18 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; -import org.onap.dmaap.datarouter.provisioning.utils.DB; +import org.onap.dmaap.datarouter.provisioning.utils.ProvDbUtils; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) public class NetworkRouteTest { - private NetworkRoute networkRoute; + private NetworkRoute networkRoute = new NetworkRoute("node01.","node03.","node02."); + private ProvDbUtils provDbUtils = ProvDbUtils.getInstance(); + private static EntityManagerFactory emf; private static EntityManager em; - private DB db; @BeforeClass public static void init() { @@ -59,23 +61,22 @@ public class NetworkRouteTest { } @Before public void setUp() throws Exception { - db = new DB(); - networkRoute = new NetworkRoute("node01.","node03.","node02."); + try (Connection conn = provDbUtils.getConnection()) { + networkRoute.doInsert(conn); + } } @Test public void Verify_NetworkRoute_Is_Removed_Successfully() throws SQLException { + Assert.assertEquals(2, NetworkRoute.getAllNetworkRoutes().size()); + networkRoute.doDelete(provDbUtils.getConnection()); Assert.assertEquals(1, NetworkRoute.getAllNetworkRoutes().size()); - NetworkRoute networkRoute = new NetworkRoute("stub_from.", "stub_to."); - networkRoute.doDelete(db.getConnection()); - Assert.assertEquals(0, NetworkRoute.getAllNetworkRoutes().size()); } @Test public void Verify_NetworkRoute_Is_Updated_Successfully() throws SQLException { NetworkRoute networkRoute = new NetworkRoute("stub_from.", "stub_to.", "node02."); - networkRoute.doUpdate(db.getConnection()); - //Assert.assertTrue(NetworkRoute.getAllNetworkRoutes().contains(networkRoute)); + networkRoute.doUpdate(provDbUtils.getConnection()); for (NetworkRoute net : NetworkRoute.getAllNetworkRoutes()) { Assert.assertEquals(5, net.getVianode()); diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/SubscriptionTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/SubscriptionTest.java index 214cc6e7..7f049e5d 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/SubscriptionTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/SubscriptionTest.java @@ -32,7 +32,6 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; -import org.onap.dmaap.datarouter.provisioning.utils.DB; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) @@ -42,7 +41,6 @@ public class SubscriptionTest { private static EntityManagerFactory emf; private static EntityManager em; - private DB db; @BeforeClass public static void init() { @@ -61,7 +59,6 @@ public class SubscriptionTest { } @Before public void setUp() throws Exception { - db = new DB(); subscription = new Subscription(); } diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/utils/DbTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/utils/DbTest.java index 056469a8..d1bedabb 100644 --- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/utils/DbTest.java +++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/utils/DbTest.java @@ -35,8 +35,6 @@ public class DbTest { private static EntityManagerFactory emf; private static EntityManager em; - private DB db = new DB(); - @BeforeClass public static void init() { emf = Persistence.createEntityManagerFactory("db-unit-tests"); @@ -55,7 +53,7 @@ public class DbTest { @Test public void Verify_DB_Is_Initialised_Successfully() { - Assert.assertTrue(db.runRetroFits()); + Assert.assertTrue(ProvDbUtils.getInstance().initProvDB()); } } -- cgit 1.2.3-korg