aboutsummaryrefslogtreecommitdiffstats
path: root/datarouter-node
diff options
context:
space:
mode:
authorRam Koya <rk541m@att.com>2018-10-02 14:07:31 +0000
committerGerrit Code Review <gerrit@onap.org>2018-10-02 14:07:31 +0000
commit3a2e2a602b9aa3677d941f3d5d65ea0dce80b7ab (patch)
tree9db02aecf738697ab0c62b0f14510fbc31772283 /datarouter-node
parentf499514699009bca5f7649725024a50a85204a04 (diff)
parent158f65097526b497de5f30f0c954e465bee0d355 (diff)
Merge "Fix formatting"1.0.2
Diffstat (limited to 'datarouter-node')
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java91
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java27
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java89
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java16
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java13
5 files changed, 134 insertions, 102 deletions
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java
index 265aafd3..c40d29c3 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java
@@ -24,22 +24,25 @@
package org.onap.dmaap.datarouter.node;
-import java.util.*;
-import java.io.*;
+import java.io.File;
+import java.util.HashSet;
+import java.util.Hashtable;
+import java.util.Vector;
/**
* Processed configuration for this node.
* <p>
- * The NodeConfig represents a processed configuration from the Data Router
- * provisioning server. Each time configuration data is received from the
- * provisioning server, a new NodeConfig is created and the previous one
+ * The NodeConfig represents a processed configuration from the Data Router provisioning server. Each time
+ * configuration data is received from the provisioning server, a new NodeConfig is created and the previous one
* discarded.
*/
public class NodeConfig {
+
/**
* Raw configuration entry for a data router node
*/
public static class ProvNode {
+
private String cname;
/**
@@ -63,14 +66,15 @@ public class NodeConfig {
* Raw configuration entry for a provisioning parameter
*/
public static class ProvParam {
+
private String name;
private String value;
/**
* Construct a provisioning parameter configuration entry.
*
- * @param name The name of the parameter.
- * @param value The value of the parameter.
+ * @param name The name of the parameter.
+ * @param value The value of the parameter.
*/
public ProvParam(String name, String value) {
this.name = name;
@@ -96,6 +100,7 @@ public class NodeConfig {
* Raw configuration entry for a data feed.
*/
public static class ProvFeed {
+
private String id;
private String logdata;
private String status;
@@ -103,9 +108,10 @@ public class NodeConfig {
/**
* Construct a feed configuration entry.
*
- * @param id The feed ID of the entry.
+ * @param id The feed ID of the entry.
* @param logdata String for log entries about the entry.
- * @param status The reason why this feed cannot be used (Feed has been deleted, Feed has been suspended) or null if it is valid.
+ * @param status The reason why this feed cannot be used (Feed has been deleted, Feed has been suspended) or
+ * null if it is valid.
*/
public ProvFeed(String id, String logdata, String status) {
this.id = id;
@@ -139,6 +145,7 @@ public class NodeConfig {
* Raw configuration entry for a feed user.
*/
public static class ProvFeedUser {
+
private String feedid;
private String user;
private String credentials;
@@ -146,8 +153,8 @@ public class NodeConfig {
/**
* Construct a feed user configuration entry
*
- * @param feedid The feed id.
- * @param user The user that will publish to the feed.
+ * @param feedid The feed id.
+ * @param user The user that will publish to the feed.
* @param credentials The Authorization header the user will use to publish.
*/
public ProvFeedUser(String feedid, String user, String credentials) {
@@ -182,6 +189,7 @@ public class NodeConfig {
* Raw configuration entry for a feed subnet
*/
public static class ProvFeedSubnet {
+
private String feedid;
private String cidr;
@@ -189,7 +197,7 @@ public class NodeConfig {
* Construct a feed subnet configuration entry
*
* @param feedid The feed ID
- * @param cidr The CIDR allowed to publish to the feed.
+ * @param cidr The CIDR allowed to publish to the feed.
*/
public ProvFeedSubnet(String feedid, String cidr) {
this.feedid = feedid;
@@ -215,6 +223,7 @@ public class NodeConfig {
* Raw configuration entry for a subscription
*/
public static class ProvSubscription {
+
private String subid;
private String feedid;
private String url;
@@ -226,15 +235,17 @@ public class NodeConfig {
/**
* Construct a subscription configuration entry
*
- * @param subid The subscription ID
- * @param feedid The feed ID
- * @param url The base delivery URL (not including the fileid)
- * @param authuser The user in the credentials used to deliver
- * @param credentials The credentials used to authenticate to the delivery URL exactly as they go in the Authorization header.
- * @param metaonly Is this a meta data only subscription?
- * @param use100 Should we send Expect: 100-continue?
- */
- public ProvSubscription(String subid, String feedid, String url, String authuser, String credentials, boolean metaonly, boolean use100) {
+ * @param subid The subscription ID
+ * @param feedid The feed ID
+ * @param url The base delivery URL (not including the fileid)
+ * @param authuser The user in the credentials used to deliver
+ * @param credentials The credentials used to authenticate to the delivery URL exactly as they go in the
+ * Authorization header.
+ * @param metaonly Is this a meta data only subscription?
+ * @param use100 Should we send Expect: 100-continue?
+ */
+ public ProvSubscription(String subid, String feedid, String url, String authuser, String credentials,
+ boolean metaonly, boolean use100) {
this.subid = subid;
this.feedid = feedid;
this.url = url;
@@ -298,6 +309,7 @@ public class NodeConfig {
* Raw configuration entry for controlled ingress to the data router node
*/
public static class ProvForceIngress {
+
private String feedid;
private String subnet;
private String user;
@@ -307,9 +319,10 @@ public class NodeConfig {
* Construct a forced ingress configuration entry
*
* @param feedid The feed ID that this entry applies to
- * @param subnet The CIDR for which publisher IP addresses this entry applies to or "" if it applies to all publisher IP addresses
- * @param user The publishing user this entry applies to or "" if it applies to all publishing users.
- * @param nodes The array of FQDNs of the data router nodes to redirect publication attempts to.
+ * @param subnet The CIDR for which publisher IP addresses this entry applies to or "" if it applies to all
+ * publisher IP addresses
+ * @param user The publishing user this entry applies to or "" if it applies to all publishing users.
+ * @param nodes The array of FQDNs of the data router nodes to redirect publication attempts to.
*/
public ProvForceIngress(String feedid, String subnet, String user, String[] nodes) {
this.feedid = feedid;
@@ -351,6 +364,7 @@ public class NodeConfig {
* Raw configuration entry for controlled egress from the data router
*/
public static class ProvForceEgress {
+
private String subid;
private String node;
@@ -358,7 +372,7 @@ public class NodeConfig {
* Construct a forced egress configuration entry
*
* @param subid The subscription ID the subscription with forced egress
- * @param node The node handling deliveries for this subscription
+ * @param node The node handling deliveries for this subscription
*/
public ProvForceEgress(String subid, String node) {
this.subid = subid;
@@ -384,6 +398,7 @@ public class NodeConfig {
* Raw configuration entry for routing within the data router network
*/
public static class ProvHop {
+
private String from;
private String to;
private String via;
@@ -399,8 +414,8 @@ public class NodeConfig {
* Construct a hop entry
*
* @param from The FQDN of the node with the data to be delivered
- * @param to The FQDN of the node that will deliver to the subscriber
- * @param via The FQDN of the node where the from node should send the data
+ * @param to The FQDN of the node that will deliver to the subscriber
+ * @param via The FQDN of the node where the from node should send the data
*/
public ProvHop(String from, String to, String via) {
this.from = from;
@@ -431,12 +446,14 @@ public class NodeConfig {
}
private static class Redirection {
+
SubnetMatcher snm;
String user;
String[] nodes;
}
private static class Feed {
+
String loginfo;
String status;
SubnetMatcher[] subnets;
@@ -458,10 +475,10 @@ public class NodeConfig {
/**
* Process the raw provisioning data to configure this node
*
- * @param pd The parsed provisioning data
- * @param myname My name as seen by external systems
- * @param spooldir The directory where temporary files live
- * @param port The port number for URLs
+ * @param pd The parsed provisioning data
+ * @param myname My name as seen by external systems
+ * @param spooldir The directory where temporary files live
+ * @param port The port number for URLs
* @param nodeauthkey The keying string used to generate node authentication credentials
*/
public NodeConfig(ProvData pd, String myname, String spooldir, int port, String nodeauthkey) {
@@ -477,7 +494,8 @@ public class NodeConfig {
continue;
}
String auth = NodeUtils.getNodeAuthHdr(cn, nodeauthkey);
- DestInfo di = new DestInfo("n:" + cn, spooldir + "/n/" + cn, null, "n2n-" + cn, "https://" + cn + ":" + port + "/internal/publish", cn, myauth, false, true);
+ DestInfo di = new DestInfo("n:" + cn, spooldir + "/n/" + cn, null, "n2n-" + cn,
+ "https://" + cn + ":" + port + "/internal/publish", cn, myauth, false, true);
(new File(di.getSpool())).mkdirs();
div.add(di);
nodeinfo.put(cn, di);
@@ -547,7 +565,8 @@ public class NodeConfig {
} catch (Exception e) {
}
String siddir = sididx + "/" + sid;
- DestInfo di = new DestInfo("s:" + sid, spooldir + "/s/" + siddir, sid, fid, ps.getURL(), ps.getAuthUser(), ps.getCredentials(), ps.isMetaDataOnly(), ps.isUsing100());
+ DestInfo di = new DestInfo("s:" + sid, spooldir + "/s/" + siddir, sid, fid, ps.getURL(), ps.getAuthUser(),
+ ps.getCredentials(), ps.isMetaDataOnly(), ps.isUsing100());
(new File(di.getSpool())).mkdirs();
div.add(di);
subinfo.put(sid, di);
@@ -653,7 +672,7 @@ public class NodeConfig {
* Check whether this is a valid node-to-node transfer
*
* @param credentials Credentials offered by the supposed node
- * @param ip IP address the request came from
+ * @param ip IP address the request came from
*/
public boolean isAnotherNode(String credentials, String ip) {
IsFrom n = nodes.get(credentials);
@@ -663,9 +682,9 @@ public class NodeConfig {
/**
* Check whether publication is allowed.
*
- * @param feedid The ID of the feed being requested.
+ * @param feedid The ID of the feed being requested.
* @param credentials The offered credentials
- * @param ip The requesting IP address
+ * @param ip The requesting IP address
*/
public String isPublishPermitted(String feedid, String credentials, String ip) {
Feed f = feeds.get(feedid);
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java
index 7ecbaafd..17f902e5 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java
@@ -24,15 +24,17 @@
package org.onap.dmaap.datarouter.node;
-import java.net.*;
-import java.util.*;
-import java.io.*;
-
-import org.apache.log4j.Logger;
-import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
-
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.net.URL;
+import java.util.Properties;
+import java.util.Timer;
+import org.apache.log4j.Logger;
+import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
/**
@@ -48,7 +50,7 @@ import com.att.eelf.configuration.EELFManager;
public class NodeConfigManager implements DeliveryQueueHelper {
private static EELFLogger eelflogger = EELFManager.getInstance()
- .getLogger("org.onap.dmaap.datarouter.node.NodeConfigManager");
+ .getLogger("org.onap.dmaap.datarouter.node.NodeConfigManager");
private static Logger logger = Logger.getLogger("org.onap.dmaap.datarouter.node.NodeConfigManager");
private static NodeConfigManager base = new NodeConfigManager();
@@ -108,13 +110,14 @@ public class NodeConfigManager implements DeliveryQueueHelper {
Properties p = new Properties();
try {
p.load(new FileInputStream(System
- .getProperty("org.onap.dmaap.datarouter.node.properties", "/opt/app/datartr/etc/node.properties")));
+ .getProperty("org.onap.dmaap.datarouter.node.properties", "/opt/app/datartr/etc/node.properties")));
} catch (Exception e) {
NodeUtils.setIpAndFqdnForEelf("NodeConfigManager");
eelflogger.error(EelfMsgs.MESSAGE_PROPERTIES_LOAD_ERROR);
logger.error("NODE0301 Unable to load local configuration file " + System
- .getProperty("org.onap.dmaap.datarouter.node.properties", "/opt/app/datartr/etc/node.properties"), e);
+ .getProperty("org.onap.dmaap.datarouter.node.properties", "/opt/app/datartr/etc/node.properties"),
+ e);
}
provurl = p.getProperty("ProvisioningURL", "https://feeds-drtr.web.att.com/internal/prov");
try {
@@ -679,10 +682,10 @@ public class NodeConfigManager implements DeliveryQueueHelper {
String sdir = config.getSpoolDir(subid);
if (sdir != null) {
logger.info("NODE0310 Received subscription reset request for subscription " + subid
- + " from provisioning server " + remoteaddr);
+ + " from provisioning server " + remoteaddr);
} else {
logger.info("NODE0311 Received subscription reset request for unknown subscription " + subid
- + " from provisioning server " + remoteaddr);
+ + " from provisioning server " + remoteaddr);
}
return (sdir);
} else {
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java
index 51e59925..26513358 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java
@@ -24,6 +24,8 @@
package org.onap.dmaap.datarouter.node;
+import static org.onap.dmaap.datarouter.node.NodeUtils.sendResponseError;
+
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
import java.io.File;
@@ -42,12 +44,9 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-
import org.apache.log4j.Logger;
import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
-import static org.onap.dmaap.datarouter.node.NodeUtils.sendResponseError;
-
/**
* Servlet for handling all http and https requests to the data router node
* <p>
@@ -60,12 +59,13 @@ import static org.onap.dmaap.datarouter.node.NodeUtils.sendResponseError;
* PUT/DELETE https://<i>node</i>/publish/<i>feedid</i>/<i>fileid</i> - publsh request
*/
public class NodeServlet extends HttpServlet {
+
private static Logger logger = Logger.getLogger("org.onap.dmaap.datarouter.node.NodeServlet");
private static NodeConfigManager config;
private static Pattern MetaDataPattern;
//Adding EELF Logger Rally:US664892
private static EELFLogger eelflogger = EELFManager.getInstance()
- .getLogger("org.onap.dmaap.datarouter.node.NodeServlet");
+ .getLogger("org.onap.dmaap.datarouter.node.NodeServlet");
static {
final String ws = "\\s*";
@@ -99,11 +99,11 @@ public class NodeServlet extends HttpServlet {
/**
* Handle a GET for /internal/fetchProv
*/
- protected void doGet(HttpServletRequest req, HttpServletResponse resp){
+ protected void doGet(HttpServletRequest req, HttpServletResponse resp) {
NodeUtils.setIpAndFqdnForEelf("doGet");
eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader("X-ATT-DR-ON-BEHALF-OF"),
- getIdFromPath(req) + "");
- try{
+ getIdFromPath(req) + "");
+ try {
if (down(resp)) {
return;
}
@@ -140,12 +140,12 @@ public class NodeServlet extends HttpServlet {
protected void doPut(HttpServletRequest req, HttpServletResponse resp) {
NodeUtils.setIpAndFqdnForEelf("doPut");
eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader("X-ATT-DR-ON-BEHALF-OF"),
- getIdFromPath(req) + "");
+ getIdFromPath(req) + "");
try {
common(req, resp, true);
- } catch(IOException ioe){
+ } catch (IOException ioe) {
logger.error("IOException" + ioe.getMessage());
- } catch(ServletException se){
+ } catch (ServletException se) {
logger.error("ServletException" + se.getMessage());
}
}
@@ -156,33 +156,34 @@ public class NodeServlet extends HttpServlet {
protected void doDelete(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
NodeUtils.setIpAndFqdnForEelf("doDelete");
eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader("X-ATT-DR-ON-BEHALF-OF"),
- getIdFromPath(req) + "");
+ getIdFromPath(req) + "");
try {
common(req, resp, false);
- } catch(IOException ioe){
+ } catch (IOException ioe) {
logger.error("IOException" + ioe.getMessage());
- } catch(ServletException se){
+ } catch (ServletException se) {
logger.error("ServletException" + se.getMessage());
}
}
private void common(HttpServletRequest req, HttpServletResponse resp, boolean isput)
- throws ServletException, IOException {
+ throws ServletException, IOException {
if (down(resp)) {
return;
}
if (!req.isSecure()) {
logger.info(
- "NODE0104 Rejecting insecure PUT or DELETE of " + req.getPathInfo() + " from " + req.getRemoteAddr());
+ "NODE0104 Rejecting insecure PUT or DELETE of " + req.getPathInfo() + " from " + req
+ .getRemoteAddr());
resp.sendError(HttpServletResponse.SC_FORBIDDEN, "https required on publish requests");
return;
}
String fileid = req.getPathInfo();
if (fileid == null) {
logger.info("NODE0105 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + " from " + req
- .getRemoteAddr());
+ .getRemoteAddr());
resp.sendError(HttpServletResponse.SC_NOT_FOUND,
- "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
+ "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
return;
}
String feedid = null;
@@ -190,7 +191,7 @@ public class NodeServlet extends HttpServlet {
String credentials = req.getHeader("Authorization");
if (credentials == null) {
logger.info("NODE0106 Rejecting unauthenticated PUT or DELETE of " + req.getPathInfo() + " from " + req
- .getRemoteAddr());
+ .getRemoteAddr());
resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Authorization header required");
return;
}
@@ -205,9 +206,9 @@ public class NodeServlet extends HttpServlet {
int i = fileid.indexOf('/');
if (i == -1 || i == fileid.length() - 1) {
logger.info("NODE0105 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + " from " + req
- .getRemoteAddr());
+ .getRemoteAddr());
resp.sendError(HttpServletResponse.SC_NOT_FOUND,
- "Invalid request URI. Expecting <feed-publishing-url>/<fileid>. Possible missing fileid.");
+ "Invalid request URI. Expecting <feed-publishing-url>/<fileid>. Possible missing fileid.");
return;
}
feedid = fileid.substring(0, i);
@@ -226,16 +227,16 @@ public class NodeServlet extends HttpServlet {
targets = config.parseRouting(req.getHeader("X-ATT-DR-ROUTING"));
} else {
logger.info("NODE0105 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + " from " + req
- .getRemoteAddr());
+ .getRemoteAddr());
resp.sendError(HttpServletResponse.SC_NOT_FOUND,
- "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
+ "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
return;
}
if (fileid.indexOf('/') != -1) {
logger.info("NODE0105 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + " from " + req
- .getRemoteAddr());
+ .getRemoteAddr());
resp.sendError(HttpServletResponse.SC_NOT_FOUND,
- "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
+ "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
return;
}
String qs = req.getQueryString();
@@ -253,8 +254,9 @@ public class NodeServlet extends HttpServlet {
String reason = config.isPublishPermitted(feedid, credentials, ip);
if (reason != null) {
logger.info(
- "NODE0111 Rejecting unauthorized publish attempt to feed " + feedid + " fileid " + fileid + " from "
- + ip + " reason " + reason);
+ "NODE0111 Rejecting unauthorized publish attempt to feed " + feedid + " fileid " + fileid
+ + " from "
+ + ip + " reason " + reason);
resp.sendError(HttpServletResponse.SC_FORBIDDEN, reason);
return;
}
@@ -268,8 +270,9 @@ public class NodeServlet extends HttpServlet {
}
String redirto = "https://" + newnode + port + "/publish/" + feedid + "/" + fileid;
logger.info(
- "NODE0108 Redirecting publish attempt for feed " + feedid + " user " + user + " ip " + ip + " to "
- + redirto);
+ "NODE0108 Redirecting publish attempt for feed " + feedid + " user " + user + " ip " + ip
+ + " to "
+ + redirto);
resp.sendRedirect(redirto);
return;
}
@@ -280,7 +283,7 @@ public class NodeServlet extends HttpServlet {
File meta = new File(fbase + ".M");
OutputStream dos = null;
InputStream is = null;
- try (Writer mw = new FileWriter(meta)){
+ try (Writer mw = new FileWriter(meta)) {
StringBuffer mx = new StringBuffer();
mx.append(req.getMethod()).append('\t').append(fileid).append('\n');
Enumeration hnames = req.getHeaderNames();
@@ -289,12 +292,12 @@ public class NodeServlet extends HttpServlet {
String hn = (String) hnames.nextElement();
String hnlc = hn.toLowerCase();
if ((isput && ("content-type".equals(hnlc) ||
- "content-language".equals(hnlc) ||
- "content-md5".equals(hnlc) ||
- "content-range".equals(hnlc))) ||
- "x-att-dr-meta".equals(hnlc) ||
- (feedid == null && "x-att-dr-received".equals(hnlc)) ||
- (hnlc.startsWith("x-") && !hnlc.startsWith("x-att-dr-"))) {
+ "content-language".equals(hnlc) ||
+ "content-md5".equals(hnlc) ||
+ "content-range".equals(hnlc))) ||
+ "x-att-dr-meta".equals(hnlc) ||
+ (feedid == null && "x-att-dr-received".equals(hnlc)) ||
+ (hnlc.startsWith("x-") && !hnlc.startsWith("x-att-dr-"))) {
Enumeration hvals = req.getHeaders(hn);
while (hvals.hasMoreElements()) {
String hv = (String) hvals.nextElement();
@@ -304,15 +307,15 @@ public class NodeServlet extends HttpServlet {
if ("x-att-dr-meta".equals(hnlc)) {
if (hv.length() > 4096) {
logger.info(
- "NODE0109 Rejecting publish attempt with metadata too long for feed " + feedid
- + " user " + user + " ip " + ip);
+ "NODE0109 Rejecting publish attempt with metadata too long for feed " + feedid
+ + " user " + user + " ip " + ip);
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Metadata too long");
return;
}
if (!MetaDataPattern.matcher(hv.replaceAll("\\\\.", "X")).matches()) {
logger.info(
- "NODE0109 Rejecting publish attempt with malformed metadata for feed " + feedid
- + " user " + user + " ip " + ip);
+ "NODE0109 Rejecting publish attempt with malformed metadata for feed " + feedid
+ + " user " + user + " ip " + ip);
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Malformed metadata");
return;
}
@@ -342,7 +345,7 @@ public class NodeServlet extends HttpServlet {
} catch (Exception e) {
}
StatusLog.logPubFail(pubid, feedid, logurl, req.getMethod(), ctype, exlen, data.length(), ip, user,
- ioe.getMessage());
+ ioe.getMessage());
throw ioe;
}
Path dpath = Paths.get(fbase);
@@ -363,11 +366,11 @@ public class NodeServlet extends HttpServlet {
resp.setStatus(HttpServletResponse.SC_NO_CONTENT);
resp.getOutputStream().close();
StatusLog.logPub(pubid, feedid, logurl, req.getMethod(), ctype, data.length(), ip, user,
- HttpServletResponse.SC_NO_CONTENT);
+ HttpServletResponse.SC_NO_CONTENT);
} catch (IOException ioe) {
logger.info(
- "NODE0110 IO Exception receiving publish attempt for feed " + feedid + " user " + user + " ip " + ip
- + " " + ioe.toString(), ioe);
+ "NODE0110 IO Exception receiving publish attempt for feed " + feedid + " user " + user + " ip " + ip
+ + " " + ioe.toString(), ioe);
throw ioe;
} finally {
if (is != null) {
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java
index cc21992c..63691957 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java
@@ -40,20 +40,19 @@ import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Enumeration;
import java.util.TimeZone;
+import javax.servlet.http.HttpServletResponse;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
import org.slf4j.MDC;
-import javax.servlet.http.HttpServletResponse;
-
/**
* Utility functions for the data router node
*/
public class NodeUtils {
private static EELFLogger eelfLogger = EELFManager.getInstance()
- .getLogger("org.onap.dmaap.datarouter.node.NodeUtils");
+ .getLogger("org.onap.dmaap.datarouter.node.NodeUtils");
private static Logger nodeUtilsLogger = Logger.getLogger("org.onap.dmaap.datarouter.node.NodeUtils");
private NodeUtils() {
@@ -97,7 +96,8 @@ public class NodeUtils {
return (getAuthHdr(node, base64Encode(md.digest())));
} catch (Exception exception) {
nodeUtilsLogger
- .error("Exception in generating Credentials for given node name:= " + exception.toString(), exception);
+ .error("Exception in generating Credentials for given node name:= " + exception.toString(),
+ exception);
return (null);
}
}
@@ -119,7 +119,7 @@ public class NodeUtils {
ks.load(fileInputStream, kspass.toCharArray());
} catch (IOException ioException) {
nodeUtilsLogger.error("IOException occurred while opening FileInputStream: " + ioException.getMessage(),
- ioException);
+ ioException);
return (null);
}
} catch (Exception e) {
@@ -176,7 +176,8 @@ public class NodeUtils {
return (InetAddress.getByName(ip).getAddress());
} catch (Exception exception) {
nodeUtilsLogger
- .error("Exception in generating byte array for given IP address := " + exception.toString(), exception);
+ .error("Exception in generating byte array for given IP address := " + exception.toString(),
+ exception);
}
return (null);
}
@@ -254,7 +255,8 @@ public class NodeUtils {
MDC.put(MDC_SERVER_IP_ADDRESS, InetAddress.getLocalHost().getHostAddress());
} catch (Exception exception) {
nodeUtilsLogger
- .error("Exception in generating byte array for given IP address := " + exception.toString(), exception);
+ .error("Exception in generating byte array for given IP address := " + exception.toString(),
+ exception);
}
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java
index f9173e9b..fec2ca39 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java
@@ -24,14 +24,19 @@
package org.onap.dmaap.datarouter.node;
-import java.util.*;
+import java.util.HashSet;
+import java.util.Hashtable;
+import java.util.Vector;
/**
- * Given a set of node names and next hops, identify and ignore any cycles and figure out the sequence of next hops to get from this node to any other node
+ * Given a set of node names and next hops, identify and ignore any cycles and figure out the sequence of next hops to
+ * get from this node to any other node
*/
public class PathFinder {
+
private static class Hop {
+
boolean mark;
boolean bad;
NodeConfig.ProvHop basis;
@@ -90,8 +95,8 @@ public class PathFinder {
* Find routes from a specified origin to all of the nodes given a set of specified next hops.
*
* @param origin where we start
- * @param nodes where we can go
- * @param hops detours along the way
+ * @param nodes where we can go
+ * @param hops detours along the way
*/
public PathFinder(String origin, String[] nodes, NodeConfig.ProvHop[] hops) {
HashSet<String> known = new HashSet<String>();