aboutsummaryrefslogtreecommitdiffstats
path: root/datarouter-node/src
diff options
context:
space:
mode:
authorConor Ward <conor.ward@est.tech>2019-04-01 15:50:38 +0000
committerGerrit Code Review <gerrit@onap.org>2019-04-01 15:50:38 +0000
commit875daad0a737115702458d1850ddee87ac4cea30 (patch)
tree25ca00bf5f13b34a8e73daddf41b5573017961af /datarouter-node/src
parenta26d139f31caa06ec932498574b18c69780dab28 (diff)
parent5775de7b0fc84a29511dc4a1a480c3ab32da2ade (diff)
Merge "DR AAF CADI integration"
Diffstat (limited to 'datarouter-node/src')
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilter.java99
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java15
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java17
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/IsFrom.java30
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java3
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java143
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java202
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeMain.java106
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java160
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java88
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java15
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java2
-rw-r--r--datarouter-node/src/main/resources/drNodeCadi.properties23
-rw-r--r--datarouter-node/src/main/resources/node.properties80
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilterTest.java121
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java23
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java2
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java2
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java16
19 files changed, 932 insertions, 215 deletions
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilter.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilter.java
new file mode 100644
index 00000000..b0122596
--- /dev/null
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilter.java
@@ -0,0 +1,99 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.dmaap.datarouter.node;
+
+import org.apache.log4j.Logger;
+import org.onap.aaf.cadi.PropAccess;
+import org.onap.aaf.cadi.filter.CadiFilter;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+
+public class DRNodeCadiFilter extends CadiFilter {
+ private static Logger logger = Logger.getLogger("org.onap.dmaap.datarouter.node.NodeServlet");
+
+ DRNodeCadiFilter(boolean init, PropAccess access) throws ServletException {
+ super(init, access);
+ }
+
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
+ HttpServletRequest httpRequest = (HttpServletRequest) request;
+ String path = httpRequest.getPathInfo();
+ if (!(path.startsWith("/internal"))) {
+ if (!(httpRequest.getMethod().equalsIgnoreCase("POST"))) {
+ if (httpRequest.getMethod().equalsIgnoreCase("DELETE") && path.startsWith("/delete")) {
+ chain.doFilter(request, response);
+ } else {
+ String feedId = getFeedId(request, response);
+ String aafDbInstance = NodeConfigManager.getInstance().getAafInstance(feedId);
+ if (aafDbInstance != null && !aafDbInstance.equals("") && !aafDbInstance.equalsIgnoreCase("legacy")) {
+ logger.info("DRNodeCadiFilter - doFilter: FeedId - " + feedId + ":" + "AAF Instance -" + aafDbInstance);
+ super.doFilter(request, response, chain);
+ } else {
+ logger.info("DRNodeCadiFilter - doFilter: FeedId - " + feedId + ":" + "Legacy Feed");
+ chain.doFilter(request, response);
+ }
+ }
+ }
+ } else {
+ chain.doFilter(request, response);
+ }
+ }
+
+ private String getFeedId(ServletRequest request, ServletResponse response) {
+ HttpServletRequest req = (HttpServletRequest) request;
+ HttpServletResponse resp = (HttpServletResponse) response;
+ String fileid = req.getPathInfo();
+ if (fileid == null) {
+ logger.info("NODE0105 Rejecting bad URI for PUT " + req.getPathInfo() + " from " + req.getRemoteAddr());
+ try {
+ resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
+ } catch (IOException e) {
+ logger.error("NODE0541 DRNodeCadiFilter.getFeedId: ", e);
+ }
+ return null;
+ }
+ String feedid = "";
+
+ if (fileid.startsWith("/publish/")) {
+ fileid = fileid.substring(9);
+ int i = fileid.indexOf('/');
+ if (i == -1 || i == fileid.length() - 1) {
+ logger.info("NODE0105 Rejecting bad URI for PUT (publish) of " + req.getPathInfo() + " from " + req.getRemoteAddr());
+ try {
+ resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid request URI. Expecting <feed-publishing-url>/<fileid>. Possible missing fileid.");
+ } catch (IOException e) {
+ logger.error("NODE0542 DRNodeCadiFilter.getFeedId: ", e);
+ }
+ return null;
+ }
+ feedid = fileid.substring(0, i);
+ }
+ return feedid;
+ }
+
+}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java
index c085ebe7..46c46675 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java
@@ -64,6 +64,7 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
private String feedid;
private String subid;
private int attempts;
+ private boolean followRedirects;
private String[][] hdrs;
private String newInvocationId;
private long resumeTime;
@@ -82,6 +83,7 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
this.pubid = pubid;
destInfo = deliveryTaskHelper.getDestinationInfo();
subid = destInfo.getSubId();
+ this.followRedirects = destInfo.isFollowRedirects();
feedid = destInfo.getLogData();
spool = destInfo.getSpool();
String dfn = spool + "/" + pubid;
@@ -127,7 +129,7 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
hdrv.add(new String[]{h, v});
}
} catch (Exception e) {
- loggerDeliveryTask.error("Exception "+e.getStackTrace(),e);
+ loggerDeliveryTask.error("Exception "+ Arrays.toString(e.getStackTrace()), e);
}
hdrs = hdrv.toArray(new String[hdrv.size()][]);
url = deliveryTaskHelper.getDestURL(fileid);
@@ -247,7 +249,7 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
}
deliveryTaskHelper.reportStatus(this, rc, xpubid, rmsg);
} catch (Exception e) {
- loggerDeliveryTask.error("Exception " + e.getStackTrace(), e);
+ loggerDeliveryTask.error("Exception " + Arrays.toString(e.getStackTrace()), e);
deliveryTaskHelper.reportException(this, e);
}
}
@@ -326,7 +328,7 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
} catch (ProtocolException pe) {
deliveryTaskHelper.reportDeliveryExtra(this, -1L);
// Rcvd error instead of 100-continue
- loggerDeliveryTask.error("Exception " + pe.getStackTrace(), pe);
+ loggerDeliveryTask.error("Exception " + Arrays.toString(pe.getStackTrace()), pe);
}
return outputStream;
}
@@ -425,4 +427,11 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
String getFeedId() {
return (feedid);
}
+
+ /**
+ * Get the followRedirects for this delivery task
+ */
+ public boolean getFollowRedirects() {
+ return(followRedirects);
+ }
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java
index 73753527..8aa339f5 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java
@@ -39,7 +39,8 @@ public class DestInfo {
private boolean use100;
private boolean privilegedSubscriber;
private boolean decompress;
-
+ private boolean followRedirects;
+ private String aafInstance;
/**
* Create a destination information object.
*
@@ -53,9 +54,10 @@ public class DestInfo {
* @param metaonly Is this a metadata only delivery?
* @param use100 Should I use expect 100-continue?
* @param privilegedSubscriber Can we wait to receive a file processed acknowledgement before deleting file
+ * @param followRedirects Is follow redirect of destination enabled?
* @param decompress To see if the they want there information compressed or decompressed
*/
- public DestInfo(String name, String spool, String subid, String logdata, String url, String authuser, String authentication, boolean metaonly, boolean use100, boolean privilegedSubscriber, boolean decompress) {
+ public DestInfo(String name, String spool, String subid, String logdata, String url, String authuser, String authentication, boolean metaonly, boolean use100, boolean privilegedSubscriber, boolean followRedirects, boolean decompress) {
this.name = name;
this.spool = spool;
this.subid = subid;
@@ -66,6 +68,7 @@ public class DestInfo {
this.metaonly = metaonly;
this.use100 = use100;
this.privilegedSubscriber = privilegedSubscriber;
+ this.followRedirects = followRedirects;
this.decompress = decompress;
}
@@ -87,6 +90,7 @@ public class DestInfo {
this.metaonly = subscription.isMetaDataOnly();
this.use100 = subscription.isUsing100();
this.privilegedSubscriber = subscription.isPrivilegedSubscriber();
+ this.followRedirects = subscription.getFollowRedirect();
this.decompress = subscription.isDecompress();
}
@@ -186,6 +190,15 @@ public class DestInfo {
}
/**
+ * Should I follow redirects?
+ *
+ * @return True if I should.
+ */
+ public boolean isFollowRedirects() {
+ return (followRedirects);
+ }
+
+ /**
* Should i decompress the file before sending it on
*
* @return True if I should.
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/IsFrom.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/IsFrom.java
index 35ba0951..b8db0309 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/IsFrom.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/IsFrom.java
@@ -26,6 +26,7 @@ package org.onap.dmaap.datarouter.node;
import org.apache.log4j.Logger;
+import java.io.IOException;
import java.util.*;
import java.net.*;
@@ -62,24 +63,37 @@ public class IsFrom {
long now = System.currentTimeMillis();
if (now > nextcheck) {
nextcheck = now + 10000;
- Vector<String> v = new Vector<>();
+ ArrayList<String> hostAddrArray = new ArrayList<>();
try {
InetAddress[] addrs = InetAddress.getAllByName(fqdn);
- for (InetAddress a : addrs) {
- v.add(a.getHostAddress());
+ for (InetAddress addr : addrs) {
+ hostAddrArray.add(addr.getHostAddress());
}
} catch (UnknownHostException e) {
logger.debug("IsFrom: UnknownHostEx: " + e.toString(), e);
}
- ips = v.toArray(new String[v.size()]);
+ ips = hostAddrArray.toArray(new String[0]);
logger.info("IsFrom: DNS ENTRIES FOR FQDN " + fqdn + " : " + Arrays.toString(ips));
}
- for (String s : ips) {
- if (s.equals(ip) || s.equals(System.getenv("DMAAP_DR_PROV_SERVICE_HOST"))) {
- return (true);
+ for (String ipAddr : ips) {
+ if (ipAddr.equals(ip)) {
+ return true;
}
}
- return (false);
+ return false;
+ }
+
+ synchronized boolean isReachable(String ip) {
+ try {
+ if (InetAddress.getByName(ip).isReachable(1000)) {
+ return true;
+ }
+ } catch (UnknownHostException e) {
+ logger.debug("IsFrom: UnknownHostEx: " + e.toString(), e);
+ } catch (IOException e) {
+ logger.debug("IsFrom: Failed to parse IP : " + ip + " : " + e.toString(), e);
+ }
+ return false;
}
/**
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java
index 032c6ced..3fa5dc29 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java
@@ -104,8 +104,7 @@ public class LogManager extends TimerTask {
public Uploader() {
dq = new DeliveryQueue(this,
- new DestInfo("LogUpload", uploaddir, null, null, null, config.getMyName(), config.getMyAuth(), false,
- false, false, false));
+ new DestInfo("LogUpload", uploaddir, null, null, null, config.getMyName(), config.getMyAuth(), false, false, false, false, false));
setDaemon(true);
setName("Log Uploader");
start();
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java
index 5577e52e..791eee12 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java
@@ -25,9 +25,11 @@
package org.onap.dmaap.datarouter.node;
import java.io.File;
+import java.util.Arrays;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Vector;
+import org.apache.log4j.Logger;
/**
* Processed configuration for this node.
@@ -37,7 +39,7 @@ import java.util.Vector;
* discarded.
*/
public class NodeConfig {
-
+ private static Logger logger = Logger.getLogger("org.onap.dmaap.datarouter.node.NodeConfig");
/**
* Raw configuration entry for a data router node
*/
@@ -104,6 +106,12 @@ public class NodeConfig {
private String id;
private String logdata;
private String status;
+ private String createdDate;
+ /*
+ * AAF changes: TDP EPIC US# 307413
+ * Passing aafInstance from to identify legacy/AAF feeds
+ */
+ private String aafInstance;
/**
* Construct a feed configuration entry.
@@ -113,10 +121,27 @@ public class NodeConfig {
* @param status The reason why this feed cannot be used (Feed has been deleted, Feed has been suspended) or
* null if it is valid.
*/
- public ProvFeed(String id, String logdata, String status) {
+ public ProvFeed(String id, String logdata, String status, String createdDate, String aafInstance) {
this.id = id;
this.logdata = logdata;
this.status = status;
+ this.createdDate = createdDate;
+ this.aafInstance = aafInstance;
+ }
+
+ /**
+ * Get the created date of the data feed.
+ */
+ public String getCreatedDate()
+ {
+ return(createdDate);
+ }
+
+ /**
+ * Get the aafInstance of the data feed.
+ */
+ public String getAafInstance() {
+ return aafInstance;
}
/**
@@ -232,6 +257,7 @@ public class NodeConfig {
private boolean metaonly;
private boolean use100;
private boolean privilegedSubscriber;
+ private boolean followRedirect;
private boolean decompress;
/**
@@ -246,10 +272,10 @@ public class NodeConfig {
* @param metaonly Is this a meta data only subscription?
* @param use100 Should we send Expect: 100-continue?
* @param privilegedSubscriber Can we wait to receive a delete file call before deleting file
+ * @param followRedirect Is follow redirect of destination enabled?
* @param decompress To see if they want their information compressed or decompressed
*/
- public ProvSubscription(String subid, String feedid, String url, String authuser, String credentials,
- boolean metaonly, boolean use100, boolean privilegedSubscriber, boolean decompress) {
+ public ProvSubscription(String subid, String feedid, String url, String authuser, String credentials, boolean metaonly, boolean use100, boolean privilegedSubscriber, boolean followRedirect, boolean decompress) {
this.subid = subid;
this.feedid = feedid;
this.url = url;
@@ -258,6 +284,7 @@ public class NodeConfig {
this.metaonly = metaonly;
this.use100 = use100;
this.privilegedSubscriber = privilegedSubscriber;
+ this.followRedirect = followRedirect;
this.decompress = decompress;
}
@@ -319,10 +346,18 @@ public class NodeConfig {
/**
* Should i decompress the file before sending it on
- */
+ */
public boolean isDecompress() {
return (decompress);
}
+
+ /**
+ * New field is added - FOLLOW_REDIRECTS feature iTrack:DATARTR-17 - 1706
+ * Get the followRedirect of this destination
+ */
+ boolean getFollowRedirect() {
+ return(followRedirect);
+ }
}
/**
@@ -348,7 +383,12 @@ public class NodeConfig {
this.feedid = feedid;
this.subnet = subnet;
this.user = user;
- this.nodes = nodes;
+ //Sonar fix
+ if(nodes == null) {
+ this.nodes = new String[0];
+ } else {
+ this.nodes = Arrays.copyOf(nodes, nodes.length);
+ }
}
/**
@@ -480,6 +520,8 @@ public class NodeConfig {
Hashtable<String, String> authusers = new Hashtable<String, String>();
Redirection[] redirections;
Target[] targets;
+ String createdDate;
+ String aafInstance;
}
private Hashtable<String, String> params = new Hashtable<>();
@@ -510,24 +552,24 @@ public class NodeConfig {
Vector<DestInfo> destInfos = new Vector<>();
myauth = NodeUtils.getNodeAuthHdr(myname, nodeauthkey);
for (ProvNode pn : pd.getNodes()) {
- String cn = pn.getCName();
- if (nodeinfo.get(cn) != null) {
+ String cName = pn.getCName();
+ if (nodeinfo.get(cName) != null) {
continue;
}
- String auth = NodeUtils.getNodeAuthHdr(cn, nodeauthkey);
- DestInfo di = new DestInfo("n:" + cn, spooldir + "/n/" + cn, null, "n2n-" + cn,
- "https://" + cn + ":" + port + "/internal/publish", cn, myauth, false, true, false, false);
+ String auth = NodeUtils.getNodeAuthHdr(cName, nodeauthkey);
+ DestInfo di = new DestInfo("n:" + cName, spooldir + "/n/" + cName, null, "n2n-" + cName,
+ "https://" + cName + ":" + port + "/internal/publish", cName, myauth, false, true, false, false, false);
(new File(di.getSpool())).mkdirs();
destInfos.add(di);
- nodeinfo.put(cn, di);
- nodes.put(auth, new IsFrom(cn));
+ nodeinfo.put(cName, di);
+ nodes.put(auth, new IsFrom(cName));
}
- PathFinder pf = new PathFinder(myname, nodeinfo.keySet().toArray(new String[nodeinfo.size()]), pd.getHops());
- Hashtable<String, Vector<Redirection>> rdtab = new Hashtable<String, Vector<Redirection>>();
+ PathFinder pf = new PathFinder(myname, nodeinfo.keySet().toArray(new String[0]), pd.getHops());
+ Hashtable<String, Vector<Redirection>> rdtab = new Hashtable<>();
for (ProvForceIngress pfi : pd.getForceIngress()) {
Vector<Redirection> v = rdtab.get(pfi.getFeedId());
if (v == null) {
- v = new Vector<Redirection>();
+ v = new Vector<>();
rdtab.put(pfi.getFeedId(), v);
}
Redirection r = new Redirection();
@@ -538,16 +580,16 @@ public class NodeConfig {
r.nodes = pfi.getNodes();
v.add(r);
}
- Hashtable<String, Hashtable<String, String>> pfutab = new Hashtable<String, Hashtable<String, String>>();
+ Hashtable<String, Hashtable<String, String>> pfutab = new Hashtable<>();
for (ProvFeedUser pfu : pd.getFeedUsers()) {
Hashtable<String, String> t = pfutab.get(pfu.getFeedId());
if (t == null) {
- t = new Hashtable<String, String>();
+ t = new Hashtable<>();
pfutab.put(pfu.getFeedId(), t);
}
t.put(pfu.getCredentials(), pfu.getUser());
}
- Hashtable<String, String> egrtab = new Hashtable<String, String>();
+ Hashtable<String, String> egrtab = new Hashtable<>();
for (ProvForceEgress pfe : pd.getForceEgress()) {
if (pfe.getNode().equals(myname) || nodeinfo.get(pfe.getNode()) == null) {
continue;
@@ -558,7 +600,7 @@ public class NodeConfig {
for (ProvFeedSubnet pfs : pd.getFeedSubnets()) {
Vector<SubnetMatcher> v = pfstab.get(pfs.getFeedId());
if (v == null) {
- v = new Vector<SubnetMatcher>();
+ v = new Vector<>();
pfstab.put(pfs.getFeedId(), v);
}
v.add(new SubnetMatcher(pfs.getCidr()));
@@ -584,6 +626,7 @@ public class NodeConfig {
sididx = Integer.parseInt(subId);
sididx -= sididx % 100;
} catch (Exception e) {
+ logger.error("NODE0517 Exception NodeConfig: "+e);
}
String subscriptionDirectory = sididx + "/" + subId;
DestInfo destinationInfo = new DestInfo("s:" + subId,
@@ -603,7 +646,7 @@ public class NodeConfig {
}
sb.append(' ').append(subId);
}
- alldests = destInfos.toArray(new DestInfo[destInfos.size()]);
+ alldests = destInfos.toArray(new DestInfo[0]);
for (ProvFeed pfx : pd.getFeeds()) {
String fid = pfx.getId();
Feed f = feeds.get(fid);
@@ -612,13 +655,19 @@ public class NodeConfig {
}
f = new Feed();
feeds.put(fid, f);
+ f.createdDate = pfx.getCreatedDate();
f.loginfo = pfx.getLogData();
f.status = pfx.getStatus();
+ /*
+ * AAF changes: TDP EPIC US# 307413
+ * Passing aafInstance from ProvFeed to identify legacy/AAF feeds
+ */
+ f.aafInstance = pfx.getAafInstance();
Vector<SubnetMatcher> v1 = pfstab.get(fid);
if (v1 == null) {
f.subnets = new SubnetMatcher[0];
} else {
- f.subnets = v1.toArray(new SubnetMatcher[v1.size()]);
+ f.subnets = v1.toArray(new SubnetMatcher[0]);
}
Hashtable<String, String> h1 = pfutab.get(fid);
if (h1 == null) {
@@ -629,7 +678,7 @@ public class NodeConfig {
if (v2 == null) {
f.redirections = new Redirection[0];
} else {
- f.redirections = v2.toArray(new Redirection[v2.size()]);
+ f.redirections = v2.toArray(new Redirection[0]);
}
StringBuffer sb = feedTargets.get(fid);
if (sb == null) {
@@ -687,7 +736,7 @@ public class NodeConfig {
}
}
}
- return (tv.toArray(new Target[tv.size()]));
+ return (tv.toArray(new Target[0]));
}
/**
@@ -744,6 +793,32 @@ public class NodeConfig {
}
/**
+ * Check whether publication is allowed for AAF Feed.
+ * @param feedid The ID of the feed being requested.
+ * @param ip The requesting IP address
+ */
+ public String isPublishPermitted(String feedid, String ip) {
+ Feed f = feeds.get(feedid);
+ String nf = "Feed does not exist";
+ if (f != null) {
+ nf = f.status;
+ }
+ if (nf != null) {
+ return(nf);
+ }
+ if (f.subnets.length == 0) {
+ return(null);
+ }
+ byte[] addr = NodeUtils.getInetAddress(ip);
+ for (SubnetMatcher snm: f.subnets) {
+ if (snm.matches(addr)) {
+ return(null);
+ }
+ }
+ return("Publisher not permitted for this feed");
+ }
+
+ /**
* Get authenticated user
*/
public String getAuthUser(String feedid, String credentials) {
@@ -751,6 +826,16 @@ public class NodeConfig {
}
/**
+ * AAF changes: TDP EPIC US# 307413
+ * Check AAF_instance for feed ID
+ * @param feedid The ID of the feed specified
+ */
+ public String getAafInstance(String feedid) {
+ Feed f = feeds.get(feedid);
+ return f.aafInstance;
+ }
+
+ /**
* Check if the request should be redirected to a different ingress node
*/
public String getIngressNode(String feedid, String user, String ip) {
@@ -811,6 +896,16 @@ public class NodeConfig {
}
/**
+ * Get the creation date for a feed
+ * @param feedid The feed ID
+ * @return the timestamp of creation date of feed id passed
+ */
+ public String getCreatedDate(String feedid) {
+ Feed f = feeds.get(feedid);
+ return(f.createdDate);
+ }
+
+ /**
* Get the feed ID for a subscription
*
* @param subid The subscription ID
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java
index d98c47ae..8011c632 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java
@@ -26,6 +26,10 @@ package org.onap.dmaap.datarouter.node;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
+import org.apache.log4j.Logger;
+import org.onap.aaf.cadi.PropAccess;
+import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
+
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
@@ -33,8 +37,6 @@ import java.io.Reader;
import java.net.URL;
import java.util.Properties;
import java.util.Timer;
-import org.apache.log4j.Logger;
-import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
/**
@@ -95,32 +97,54 @@ public class NodeConfigManager implements DeliveryQueueHelper {
private String eventlogsuffix;
private String eventloginterval;
private boolean followredirects;
+ private String [] enabledprotocols;
+ private String aafType;
+ private String aafInstance;
+ private String aafAction;
+ private String aafURL;
+ private boolean cadiEnabled;
/**
* Get the default node configuration manager
*/
public static NodeConfigManager getInstance() {
- return (base);
+ return base;
}
/**
* Initialize the configuration of a Data Router node
*/
private NodeConfigManager() {
- Properties p = new Properties();
+
+ Properties drNodeProperties = new Properties();
try {
- p.load(new FileInputStream(System
+ logger.info("NODE0301 Loading local config file node.properties");
+ drNodeProperties.load(new FileInputStream(System
.getProperty("org.onap.dmaap.datarouter.node.properties", "/opt/app/datartr/etc/node.properties")));
} catch (Exception e) {
-
NodeUtils.setIpAndFqdnForEelf("NodeConfigManager");
eelflogger.error(EelfMsgs.MESSAGE_PROPERTIES_LOAD_ERROR);
logger.error("NODE0301 Unable to load local configuration file " + System
- .getProperty("org.onap.dmaap.datarouter.node.properties", "/opt/app/datartr/etc/node.properties"),
- e);
+ .getProperty("org.onap.dmaap.datarouter.node.properties", "/opt/app/datartr/etc/node.properties"), e);
}
- provurl = p.getProperty("ProvisioningURL", "https://feeds-drtr.web.att.com/internal/prov");
+ provurl = drNodeProperties.getProperty("ProvisioningURL", "https://dmaap-dr-prov:8443/internal/prov");
+ /*
+ * START - AAF changes: TDP EPIC US# 307413
+ * Pull AAF settings from node.properties
+ */
+ aafType = drNodeProperties.getProperty("AAFType", "org.onap.dmaap-dr.feed");
+ aafInstance = drNodeProperties.getProperty("AAFInstance", "legacy");
+ aafAction = drNodeProperties.getProperty("AAFAction", "publish");
+ aafURL = drNodeProperties.getProperty("AafUrl", "https://aaf-onap-test.osaaf.org:8095");
+ cadiEnabled = Boolean.parseBoolean(drNodeProperties.getProperty("CadiEnabled", "false"));
+ /*
+ * END - AAF changes: TDP EPIC US# 307413
+ * Pull AAF settings from node.properties
+ */
+ //Disable and enable protocols*/
+ enabledprotocols = ((drNodeProperties.getProperty("NodeHttpsProtocols")).trim()).split("\\|");
+
try {
provhost = (new URL(provurl)).getHost();
} catch (Exception e) {
@@ -130,14 +154,14 @@ public class NodeConfigManager implements DeliveryQueueHelper {
System.exit(1);
}
logger.info("NODE0303 Provisioning server is " + provhost);
- eventlogurl = p.getProperty("LogUploadURL", "https://feeds-drtr.web.att.com/internal/logs");
+ eventlogurl = drNodeProperties.getProperty("LogUploadURL", "https://feeds-drtr.web.att.com/internal/logs");
provcheck = new IsFrom(provhost);
- gfport = Integer.parseInt(p.getProperty("IntHttpPort", "8080"));
- svcport = Integer.parseInt(p.getProperty("IntHttpsPort", "8443"));
- port = Integer.parseInt(p.getProperty("ExtHttpsPort", "443"));
- long minpfinterval = Long.parseLong(p.getProperty("MinProvFetchInterval", "10000"));
- long minrsinterval = Long.parseLong(p.getProperty("MinRedirSaveInterval", "10000"));
- spooldir = p.getProperty("SpoolDir", "spool");
+ gfport = Integer.parseInt(drNodeProperties.getProperty("IntHttpPort", "8080"));
+ svcport = Integer.parseInt(drNodeProperties.getProperty("IntHttpsPort", "8443"));
+ port = Integer.parseInt(drNodeProperties.getProperty("ExtHttpsPort", "443"));
+ long minpfinterval = Long.parseLong(drNodeProperties.getProperty("MinProvFetchInterval", "10000"));
+ long minrsinterval = Long.parseLong(drNodeProperties.getProperty("MinRedirSaveInterval", "10000"));
+ spooldir = drNodeProperties.getProperty("SpoolDir", "spool");
File fdir = new File(spooldir + "/f");
fdir.mkdirs();
for (File junk : fdir.listFiles()) {
@@ -145,26 +169,26 @@ public class NodeConfigManager implements DeliveryQueueHelper {
junk.delete();
}
}
- logdir = p.getProperty("LogDir", "logs");
+ logdir = drNodeProperties.getProperty("LogDir", "logs");
(new File(logdir)).mkdirs();
- logretention = Long.parseLong(p.getProperty("LogRetention", "30")) * 86400000L;
+ logretention = Long.parseLong(drNodeProperties.getProperty("LogRetention", "30")) * 86400000L;
eventlogprefix = logdir + "/events";
eventlogsuffix = ".log";
- String redirfile = p.getProperty("RedirectionFile", "etc/redirections.dat");
- kstype = p.getProperty("KeyStoreType", "jks");
- ksfile = p.getProperty("KeyStoreFile", "etc/keystore");
- kspass = p.getProperty("KeyStorePassword", "changeme");
- kpass = p.getProperty("KeyPassword", "changeme");
- tstype = p.getProperty("TrustStoreType", "jks");
- tsfile = p.getProperty("TrustStoreFile");
- tspass = p.getProperty("TrustStorePassword", "changeme");
+ String redirfile = drNodeProperties.getProperty("RedirectionFile", "etc/redirections.dat");
+ kstype = drNodeProperties.getProperty("KeyStoreType", "jks");
+ ksfile = drNodeProperties.getProperty("KeyStoreFile", "etc/keystore");
+ kspass = drNodeProperties.getProperty("KeyStorePassword", "changeme");
+ kpass = drNodeProperties.getProperty("KeyPassword", "changeme");
+ tstype = drNodeProperties.getProperty("TrustStoreType", "jks");
+ tsfile = drNodeProperties.getProperty("TrustStoreFile");
+ tspass = drNodeProperties.getProperty("TrustStorePassword", "changeme");
if (tsfile != null && tsfile.length() > 0) {
System.setProperty("javax.net.ssl.trustStoreType", tstype);
System.setProperty("javax.net.ssl.trustStore", tsfile);
System.setProperty("javax.net.ssl.trustStorePassword", tspass);
}
- nak = p.getProperty("NodeAuthKey", "Node123!");
- quiesce = new File(p.getProperty("QuiesceFile", "etc/SHUTDOWN"));
+ nak = drNodeProperties.getProperty("NodeAuthKey", "Node123!");
+ quiesce = new File(drNodeProperties.getProperty("QuiesceFile", "etc/SHUTDOWN"));
myname = NodeUtils.getCanonicalName(kstype, ksfile, kspass);
if (myname == null) {
NodeUtils.setIpAndFqdnForEelf("NodeConfigManager");
@@ -253,7 +277,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
private void fetchconfig() {
try {
- System.out.println("provurl:: " + provurl);
+ logger.info("NodeConfigMan.fetchConfig: provurl:: " + provurl);
Reader r = new InputStreamReader((new URL(provurl)).openStream());
config = new NodeConfig(new ProvData(r), myname, spooldir, port, nak);
localconfig();
@@ -263,6 +287,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
try {
rr.run();
} catch (Exception e) {
+ logger.error("NODE0518 Exception fetchconfig: " + e);
}
}
} catch (Exception e) {
@@ -278,12 +303,12 @@ public class NodeConfigManager implements DeliveryQueueHelper {
* fetch the provisioning data, ignore the request. If the data has been fetched very recently (default 10
* seconds), wait a while before fetching again.
*/
- public synchronized void gofetch(String remoteaddr) {
- if (provcheck.isFrom(remoteaddr)) {
- logger.info("NODE0307 Received configuration fetch request from provisioning server " + remoteaddr);
+ public synchronized void gofetch(String remoteAddr) {
+ if (provcheck.isReachable(remoteAddr)) {
+ logger.info("NODE0307 Received configuration fetch request from provisioning server " + remoteAddr);
pfetcher.request();
} else {
- logger.info("NODE0308 Received configuration fetch request from unexpected server " + remoteaddr);
+ logger.info("NODE0308 Received configuration fetch request from unexpected server " + remoteAddr);
}
}
@@ -345,6 +370,17 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
+ * Check whether publication is allowed for AAF Feed.
+ *
+ * @param feedid The ID of the feed being requested
+ * @param ip The requesting IP address
+ * @return True if the IP and credentials are valid for the specified feed.
+ */
+ public String isPublishPermitted(String feedid, String ip) {
+ return(config.isPublishPermitted(feedid, ip));
+ }
+
+ /**
* Check who the user is given the feed ID and the offered credentials.
*
* @param feedid The ID of the feed specified
@@ -356,6 +392,15 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
+ * AAF changes: TDP EPIC US# 307413
+ * Check AAF_instance for feed ID in NodeConfig
+ * @param feedid The ID of the feed specified
+ */
+ public String getAafInstance(String feedid) {
+ return(config.getAafInstance(feedid));
+ }
+
+ /**
* Check if the publish request should be sent to another node based on the feedid, user, and source IP address.
*
* @param feedid The ID of the feed specified
@@ -461,6 +506,23 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
+ * Set up redirection on receipt of a 3XX from a target URL
+ */
+ public boolean handleRedirectionSubLevel(DeliveryTask task, DestInfo destinfo, String redirto, String fileid) {
+ fileid = "/" + fileid;
+ String subid = destinfo.getSubId();
+ String purl = destinfo.getURL();
+ if (task.getFollowRedirects() && subid != null && redirto.endsWith(fileid)) {
+ redirto = redirto.substring(0, redirto.length() - fileid.length());
+ if (!redirto.equals(purl)) {
+ rdmgr.redirect(subid, purl, redirto);
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
* Handle unreachable target URL
*/
public void handleUnreachable(DestInfo destinationInfo) {
@@ -530,6 +592,15 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
+ * Get the creation date for a feed
+ * @param feedid The feed ID
+ * @return the timestamp of creation date of feed id passed
+ */
+ public String getCreatedDate(String feedid) {
+ return(config.getCreatedDate(feedid));
+ }
+
+ /**
* Get the spool directory for temporary files
*/
public String getSpoolDir() {
@@ -698,6 +769,16 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
+ * Disable and enable protocols
+ * */
+ public String[] getEnabledprotocols() {
+ return enabledprotocols;
+ }
+ public void setEnabledprotocols(String[] enabledprotocols) {
+ this.enabledprotocols = enabledprotocols.clone();
+ }
+
+ /**
* Get the spool directory for a subscription
*/
public String getSpoolDir(String subid, String remoteaddr) {
@@ -716,4 +797,59 @@ public class NodeConfigManager implements DeliveryQueueHelper {
return (null);
}
}
+
+ public String getAafType() {
+ return aafType;
+ }
+ public void setAafType(String aafType) {
+ this.aafType = aafType;
+ }
+ public String getAafInstance() {
+ return aafInstance;
+ }
+ public void setAafInstance(String aafInstance) {
+ this.aafInstance = aafInstance;
+ }
+ public String getAafAction() {
+ return aafAction;
+ }
+ public void setAafAction(String aafAction) {
+ this.aafAction = aafAction;
+ }
+ /*
+ * Get aafURL from SWM variable
+ * */
+ public String getAafURL() {
+ return aafURL;
+ }
+ public void setAafURL(String aafURL) {
+ this.aafURL = aafURL;
+ }
+
+ public boolean getCadiEnabeld() {
+ return cadiEnabled;
+ }
+ public void setCadiEnabled(boolean cadiEnabled) {
+ this.cadiEnabled = cadiEnabled;
+ }
+
+ /**
+ * Builds the permissions string to be verified
+ *
+ * @param aafInstance The aaf instance
+ * @return The permissions
+ */
+ protected String getPermission(String aafInstance) {
+ try {
+ String type = getAafType();
+ String action = getAafAction();
+ if (aafInstance == null || aafInstance.equals("")) {
+ aafInstance = getAafInstance();
+ }
+ return type + "|" + aafInstance + "|" + action;
+ } catch (Exception e) {
+ logger.error("NODE0543 NodeConfigManager.getPermission: ", e);
+ }
+ return null;
+ }
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeMain.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeMain.java
index d25531a7..7ff33ff9 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeMain.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeMain.java
@@ -23,19 +23,20 @@
package org.onap.dmaap.datarouter.node;
-import java.util.Arrays;
import org.apache.log4j.Logger;
import org.eclipse.jetty.http.HttpVersion;
-import org.eclipse.jetty.server.Connector;
-import org.eclipse.jetty.server.HttpConfiguration;
-import org.eclipse.jetty.server.HttpConnectionFactory;
-import org.eclipse.jetty.server.SecureRequestCustomizer;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.server.SslConnectionFactory;
+import org.eclipse.jetty.server.*;
+import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.onap.aaf.cadi.PropAccess;
+
+import javax.servlet.DispatcherType;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.EnumSet;
+import java.util.Properties;
/**
* The main starting point for the Data Router node
@@ -47,6 +48,18 @@ public class NodeMain {
private static Logger nodeMainLogger = Logger.getLogger("org.onap.dmaap.datarouter.node.NodeMain");
+ class Inner {
+ InputStream getCadiProps() {
+ InputStream in = null;
+ try {
+ in = getClass().getClassLoader().getResourceAsStream("drNodeCadi.properties");
+ } catch (Exception e) {
+ nodeMainLogger.error("Exception in Inner.getCadiProps() method " + e.getMessage());
+ }
+ return in;
+ }
+ }
+
private static class WaitForConfig implements Runnable {
private NodeConfigManager localNodeConfigManager;
@@ -67,8 +80,8 @@ public class NodeMain {
wait();
} catch (Exception exception) {
nodeMainLogger
- .debug("NodeMain: waitForConfig exception. Exception Message:- " + exception.toString(),
- exception);
+ .debug("NodeMain: waitForConfig exception. Exception Message:- " + exception.toString(),
+ exception);
}
}
localNodeConfigManager.deregisterConfigTask(this);
@@ -89,8 +102,8 @@ public class NodeMain {
/**
* Start the data router.
* <p>
- * The location of the node configuration file can be set using the org.onap.dmaap.datarouter.node.ConfigFile system
- * property. By default, it is "etc/node.properties".
+ * The location of the node configuration file can be set using the org.onap.dmaap.datarouter.node.properties system
+ * property. By default, it is "/opt/app/datartr/etc/node.properties".
*/
public static void main(String[] args) throws Exception {
nodeMainLogger.info("NODE0001 Data Router Node Starting");
@@ -100,15 +113,15 @@ public class NodeMain {
(new WaitForConfig(nodeConfigManager)).waitForConfig();
delivery = new Delivery(nodeConfigManager);
new LogManager(nodeConfigManager);
+
Server server = new Server();
+
// HTTP configuration
HttpConfiguration httpConfiguration = new HttpConfiguration();
httpConfiguration.setRequestHeaderSize(2048);
// HTTP connector
- ServletContextHandler ctxt;
- try (ServerConnector httpServerConnector = new ServerConnector(server,
- new HttpConnectionFactory(httpConfiguration))) {
+ try (ServerConnector httpServerConnector = new ServerConnector(server, new HttpConnectionFactory(httpConfiguration))) {
httpServerConnector.setPort(nodeConfigManager.getHttpPort());
httpServerConnector.setIdleTimeout(2000);
@@ -118,10 +131,23 @@ public class NodeMain {
sslContextFactory.setKeyStorePath(nodeConfigManager.getKSFile());
sslContextFactory.setKeyStorePassword(nodeConfigManager.getKSPass());
sslContextFactory.setKeyManagerPassword(nodeConfigManager.getKPass());
- /* Skip SSLv3 Fixes */
+
+ //SP-6 : Fixes for SDV scan to exclude/remove DES/3DES ciphers are taken care by upgrading jdk in descriptor.xml
+ sslContextFactory.setExcludeCipherSuites(
+ "SSL_RSA_WITH_DES_CBC_SHA",
+ "SSL_DHE_RSA_WITH_DES_CBC_SHA",
+ "SSL_DHE_DSS_WITH_DES_CBC_SHA",
+ "SSL_RSA_EXPORT_WITH_RC4_40_MD5",
+ "SSL_RSA_EXPORT_WITH_DES40_CBC_SHA",
+ "SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA",
+ "SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA"
+ );
+
sslContextFactory.addExcludeProtocols("SSLv3");
- nodeMainLogger.info("Excluded protocols node-" + Arrays.toString(sslContextFactory.getExcludeProtocols()));
- /* End of SSLv3 Fixes */
+ sslContextFactory.setIncludeProtocols(nodeConfigManager.getEnabledprotocols());
+ nodeMainLogger.info("NODE00004 Unsupported protocols node server:-" + String.join(",", sslContextFactory.getExcludeProtocols()));
+ nodeMainLogger.info("NODE00004 Supported protocols node server:-" + String.join(",", sslContextFactory.getIncludeProtocols()));
+ nodeMainLogger.info("NODE00004 Unsupported ciphers node server:-" + String.join(",", sslContextFactory.getExcludeCipherSuites()));
HttpConfiguration httpsConfiguration = new HttpConfiguration(httpConfiguration);
httpsConfiguration.setRequestHeaderSize(8192);
@@ -133,21 +159,47 @@ public class NodeMain {
// HTTPS connector
try (ServerConnector httpsServerConnector = new ServerConnector(server,
- new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.asString()),
- new HttpConnectionFactory(httpsConfiguration))) {
+ new SslConnectionFactory(sslContextFactory, HttpVersion.HTTP_1_1.asString()),
+ new HttpConnectionFactory(httpsConfiguration))) {
+
httpsServerConnector.setPort(nodeConfigManager.getHttpsPort());
- httpsServerConnector.setIdleTimeout(500000);
+ httpsServerConnector.setIdleTimeout(3600000);
httpsServerConnector.setAcceptQueueSize(2);
+ //Context Handler
+ ServletContextHandler servletContextHandler = new ServletContextHandler(0);
+ servletContextHandler.setContextPath("/");
+ servletContextHandler.addServlet(new ServletHolder(new NodeServlet(delivery)), "/*");
+
+ //CADI Filter activation check
+ if (nodeConfigManager.getCadiEnabeld()) {
+ Properties cadiProperties = new Properties();
+ try {
+ Inner obj = new NodeMain().new Inner();
+ InputStream in = obj.getCadiProps();
+ cadiProperties.load(in);
+ } catch (IOException e1) {
+ nodeMainLogger.error("NODE00005 Exception in NodeMain.Main() loading CADI properties " + e1.getMessage());
+ }
+ cadiProperties.setProperty("aaf_locate_url", nodeConfigManager.getAafURL());
+ nodeMainLogger.info("NODE00005 aaf_url set to - " + cadiProperties.getProperty("aaf_url"));
+
+ PropAccess access = new PropAccess(cadiProperties);
+ servletContextHandler.addFilter(new FilterHolder(new DRNodeCadiFilter(true, access)), "/*", EnumSet.of(DispatcherType.REQUEST));
+ }
+
+ server.setHandler(servletContextHandler);
server.setConnectors(new Connector[]{httpServerConnector, httpsServerConnector});
}
}
- ctxt = new ServletContextHandler(0);
- ctxt.setContextPath("/");
- server.setHandler(ctxt);
- ctxt.addServlet(new ServletHolder(new NodeServlet(delivery)), "/*");
- nodeMainLogger.info("NODE0005 Data Router Node Activating Service");
- server.start();
+
+ try {
+ server.start();
+ nodeMainLogger.info("NODE00006 Node Server started-" + server.getState());
+ } catch (Exception e) {
+ nodeMainLogger.info("NODE00006 Jetty failed to start. Reporting will we unavailable", e);
+ }
server.join();
+ nodeMainLogger.info("NODE00007 Node Server joined - " + server.getState());
}
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java
index 79888795..93e901f9 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java
@@ -26,25 +26,24 @@ package org.onap.dmaap.datarouter.node;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Writer;
+import org.apache.log4j.Logger;
+import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
+import org.slf4j.MDC;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Enumeration;
import java.util.regex.Pattern;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import org.apache.log4j.Logger;
+
+import static org.onap.dmaap.datarouter.node.NodeUtils.sendResponseError;
+
import org.jetbrains.annotations.Nullable;
-import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
-import org.slf4j.MDC;
import static org.onap.dmaap.datarouter.node.NodeUtils.*;
@@ -64,9 +63,8 @@ public class NodeServlet extends HttpServlet {
private static Logger logger = Logger.getLogger("org.onap.dmaap.datarouter.node.NodeServlet");
private static NodeConfigManager config;
private static Pattern MetaDataPattern;
- //Adding EELF Logger Rally:US664892
- private static EELFLogger eelflogger = EELFManager.getInstance()
- .getLogger(NodeServlet.class);
+ private static EELFLogger eelflogger = EELFManager.getInstance().getLogger(NodeServlet.class);
+ private boolean isAAFFeed = false;
private final Delivery delivery;
static {
@@ -88,6 +86,7 @@ public class NodeServlet extends HttpServlet {
/**
* Get the NodeConfigurationManager
*/
+ @Override
public void init() {
config = NodeConfigManager.getInstance();
logger.info("NODE0101 Node Servlet Configured");
@@ -97,14 +96,15 @@ public class NodeServlet extends HttpServlet {
if (config.isShutdown() || !config.isConfigured()) {
sendResponseError(resp, HttpServletResponse.SC_SERVICE_UNAVAILABLE, logger);
logger.info("NODE0102 Rejecting request: Service is being quiesced");
- return (true);
+ return true;
}
- return (false);
+ return false;
}
/**
* Handle a GET for /internal/fetchProv
*/
+ @Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) {
NodeUtils.setIpAndFqdnForEelf("doGet");
NodeUtils.setRequestIdAndInvocationId(req);
@@ -149,12 +149,13 @@ public class NodeServlet extends HttpServlet {
/**
* Handle all PUT requests
*/
+ @Override
protected void doPut(HttpServletRequest req, HttpServletResponse resp) {
NodeUtils.setIpAndFqdnForEelf("doPut");
NodeUtils.setRequestIdAndInvocationId(req);
eelflogger.info(EelfMsgs.ENTRY);
eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader("X-DMAAP-DR-ON-BEHALF-OF"),
- getIdFromPath(req) + "");
+ getIdFromPath(req) + "");
try {
common(req, resp, true);
} catch (IOException ioe) {
@@ -166,6 +167,7 @@ public class NodeServlet extends HttpServlet {
/**
* Handle all DELETE requests
*/
+ @Override
protected void doDelete(HttpServletRequest req, HttpServletResponse resp) {
NodeUtils.setIpAndFqdnForEelf("doDelete");
NodeUtils.setRequestIdAndInvocationId(req);
@@ -215,6 +217,27 @@ public class NodeServlet extends HttpServlet {
return;
}
feedid = fileid.substring(0, i);
+
+ if (config.getCadiEnabeld()) {
+ String path = req.getPathInfo();
+ if (!path.startsWith("/internal") && feedid != null) {
+ String aafInstance = config.getAafInstance(feedid);
+ if (!(aafInstance.equalsIgnoreCase("legacy"))) {
+ isAAFFeed = true;
+ String permission = config.getPermission(aafInstance);
+ logger.info("NodeServlet.common() permission string - " + permission);
+ //Check in CADI Framework API if user has AAF permission or not
+ if (!req.isUserInRole(permission)) {
+ String message = "AAF disallows access to permission string - " + permission;
+ logger.info("NODE0106 Rejecting unauthenticated PUT or DELETE of " + req.getPathInfo() + " from " + req.getRemoteAddr());
+ resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
+ eelflogger.info(EelfMsgs.EXIT);
+ return;
+ }
+ }
+ }
+ }
+
fileid = fileid.substring(i + 1);
pubid = config.getPublishId();
xpubid = req.getHeader("X-DMAAP-DR-PUBLISH-ID");
@@ -228,6 +251,7 @@ public class NodeServlet extends HttpServlet {
}
fileid = fileid.substring(18);
pubid = req.getHeader("X-DMAAP-DR-PUBLISH-ID");
+ user = "datartr"; // SP6 : Added usr as datartr to avoid null entries for internal routing
targets = config.parseRouting(req.getHeader("X-DMAAP-DR-ROUTING"));
} else {
logger.info("NODE0105 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + " from " + req
@@ -257,17 +281,34 @@ public class NodeServlet extends HttpServlet {
String logurl = "https://" + hp + "/internal/publish/" + fileid;
if (feedid != null) {
logurl = "https://" + hp + "/publish/" + feedid + "/" + fileid;
- String reason = config.isPublishPermitted(feedid, credentials, ip);
- if (reason != null) {
- logger.info(
- "NODE0111 Rejecting unauthorized publish attempt to feed " + feedid + " fileid " + fileid
- + " from "
- + ip + " reason " + reason);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, reason);
- eelflogger.info(EelfMsgs.EXIT);
- return;
+ //Cadi code starts
+ if (!isAAFFeed) {
+ String reason = config.isPublishPermitted(feedid, credentials, ip);
+ if (reason != null) {
+ logger.info("NODE0111 Rejecting unauthorized publish attempt to feed " + PathUtil.cleanString(feedid) + " fileid " + PathUtil.cleanString(fileid) + " from " + PathUtil.cleanString(ip) + " reason " + PathUtil.cleanString(reason));
+ resp.sendError(HttpServletResponse.SC_FORBIDDEN, reason);
+ eelflogger.info(EelfMsgs.EXIT);
+ return;
+ }
+ user = config.getAuthUser(feedid, credentials);
+ } else {
+ String reason = config.isPublishPermitted(feedid, ip);
+ if (reason != null) {
+ logger.info("NODE0111 Rejecting unauthorized publish attempt to feed " + PathUtil.cleanString(feedid) + " fileid " + PathUtil.cleanString(fileid) + " from " + PathUtil.cleanString(ip) + " reason Invalid AAF user- " + PathUtil.cleanString(reason));
+ String message = "Invalid AAF user- " + PathUtil.cleanString(reason);
+ logger.info("NODE0106 Rejecting unauthenticated PUT or DELETE of " + PathUtil.cleanString(req.getPathInfo()) + " from " + PathUtil.cleanString(req.getRemoteAddr()));
+ resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
+ return;
+ }
+ if ((req.getUserPrincipal() != null) && (req.getUserPrincipal().getName() != null)) {
+ String userName = req.getUserPrincipal().getName();
+ String[] attid = userName.split("@");
+ user = attid[0];
+ } else {
+ user = "AAFUser";
+ }
}
- user = config.getAuthUser(feedid, credentials);
+ //Cadi code Ends
String newnode = config.getIngressNode(feedid, user, ip);
if (newnode != null) {
String port = "";
@@ -276,17 +317,17 @@ public class NodeServlet extends HttpServlet {
port = ":" + iport;
}
String redirto = "https://" + newnode + port + "/publish/" + feedid + "/" + fileid;
- logger.info(
- "NODE0108 Redirecting publish attempt for feed " + feedid + " user " + user + " ip " + ip
- + " to "
- + redirto);
- resp.sendRedirect(redirto);
+ logger.info("NODE0108 Redirecting publish attempt for feed " + PathUtil.cleanString(feedid) + " user " + PathUtil.cleanString(user) + " ip " + PathUtil.cleanString(ip) + " to " + PathUtil.cleanString(redirto)); //Fortify scan fixes - log forging
+ resp.sendRedirect(PathUtil.cleanString(redirto)); //Fortify scan fixes-open redirect - 2 issues
eelflogger.info(EelfMsgs.EXIT);
return;
}
resp.setHeader("X-DMAAP-DR-PUBLISH-ID", pubid);
}
- String fbase = config.getSpoolDir() + "/" + pubid;
+ if (req.getPathInfo().startsWith("/internal/publish/")) {
+ feedid = req.getHeader("X-DMAAP-DR-FEED-ID");
+ }
+ String fbase = PathUtil.cleanString(config.getSpoolDir() + "/" + pubid); //Fortify scan fixes-Path manipulation
File data = new File(fbase);
File meta = new File(fbase + ".M");
OutputStream dos = null;
@@ -323,17 +364,13 @@ public class NodeServlet extends HttpServlet {
}
if ("x-dmaap-dr-meta".equals(hnlc)) {
if (hv.length() > 4096) {
- logger.info(
- "NODE0109 Rejecting publish attempt with metadata too long for feed " + feedid
- + " user " + user + " ip " + ip);
+ logger.info("NODE0109 Rejecting publish attempt with metadata too long for feed " + PathUtil.cleanString(feedid) + " user " + PathUtil.cleanString(user) + " ip " + PathUtil.cleanString(ip)); //Fortify scan fixes - log forging
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Metadata too long");
eelflogger.info(EelfMsgs.EXIT);
return;
}
if (!MetaDataPattern.matcher(hv.replaceAll("\\\\.", "X")).matches()) {
- logger.info(
- "NODE0109 Rejecting publish attempt with malformed metadata for feed " + feedid
- + " user " + user + " ip " + ip);
+ logger.info("NODE0109 Rejecting publish attempt with malformed metadata for feed " + PathUtil.cleanString(feedid) + " user " + PathUtil.cleanString(user) + " ip " + PathUtil.cleanString(ip)); //Fortify scan fixes - log forging
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Malformed metadata");
eelflogger.info(EelfMsgs.EXIT);
return;
@@ -343,10 +380,10 @@ public class NodeServlet extends HttpServlet {
}
}
}
- if(!hasRequestIdHeader){
+ if (!hasRequestIdHeader) {
mx.append("X-ONAP-RequestID\t").append(MDC.get("RequestId")).append('\n');
}
- if(!hasInvocationIdHeader){
+ if (!hasInvocationIdHeader) {
mx.append("X-InvocationID\t").append(MDC.get("InvocationId")).append('\n');
}
mx.append("X-DMAAP-DR-RECEIVED\t").append(rcvd).append('\n');
@@ -368,9 +405,9 @@ public class NodeServlet extends HttpServlet {
try {
exlen = Long.parseLong(req.getHeader("Content-Length"));
} catch (Exception e) {
+ logger.error("NODE0529 Exception common: " + e);
}
- StatusLog.logPubFail(pubid, feedid, logurl, req.getMethod(), ctype, exlen, data.length(), ip, user,
- ioe.getMessage());
+ StatusLog.logPubFail(pubid, feedid, logurl, req.getMethod(), ctype, exlen, data.length(), ip, user, ioe.getMessage());
eelflogger.info(EelfMsgs.EXIT);
throw ioe;
}
@@ -381,7 +418,7 @@ public class NodeServlet extends HttpServlet {
// TODO: unknown destination
continue;
}
- String dbase = di.getSpool() + "/" + pubid;
+ String dbase = PathUtil.cleanString(di.getSpool() + "/" + pubid); //Fortify scan fixes-Path Manipulation
Files.createLink(Paths.get(dbase), dpath);
mw = new FileWriter(meta);
mw.write(metadata);
@@ -393,13 +430,25 @@ public class NodeServlet extends HttpServlet {
}
resp.setStatus(HttpServletResponse.SC_NO_CONTENT);
- resp.getOutputStream().close();
- StatusLog.logPub(pubid, feedid, logurl, req.getMethod(), ctype, data.length(), ip, user,
- HttpServletResponse.SC_NO_CONTENT);
+ try {
+ resp.getOutputStream().close();
+ } catch (IOException ioe) {
+ long exlen = -1;
+ try {
+ exlen = Long.parseLong(req.getHeader("Content-Length"));
+ } catch (Exception e) {
+ logger.debug("NODE00000 Exception common: " + e);
+ }
+ StatusLog.logPubFail(pubid, feedid, logurl, req.getMethod(), ctype, exlen, data.length(), ip, user, ioe.getMessage());
+ //Fortify scan fixes - log forging
+ logger.info("NODE0110 IO Exception while closing IO stream " + PathUtil.cleanString(feedid) + " user " + PathUtil.cleanString(user) + " ip " + PathUtil.cleanString(ip) + " " + ioe.toString(), ioe);
+
+ throw ioe;
+ }
+
+ StatusLog.logPub(pubid, feedid, logurl, req.getMethod(), ctype, data.length(), ip, user, HttpServletResponse.SC_NO_CONTENT);
} catch (IOException ioe) {
- logger.info(
- "NODE0110 IO Exception receiving publish attempt for feed " + feedid + " user " + user + " ip " + ip
- + " " + ioe.toString(), ioe);
+ logger.info("NODE0110 IO Exception receiving publish attempt for feed " + feedid + " user " + user + " ip " + ip + " " + ioe.toString(), ioe);
eelflogger.info(EelfMsgs.EXIT);
throw ioe;
} finally {
@@ -407,27 +456,32 @@ public class NodeServlet extends HttpServlet {
try {
is.close();
} catch (Exception e) {
+ logger.error("NODE0530 Exception common: " + e);
}
}
if (dos != null) {
try {
dos.close();
} catch (Exception e) {
+ logger.error("NODE0531 Exception common: " + e);
}
}
if (mw != null) {
try {
mw.close();
} catch (Exception e) {
+ logger.error("NODE0532 Exception common: " + e);
}
}
try {
data.delete();
} catch (Exception e) {
+ logger.error("NODE0533 Exception common: " + e);
}
try {
meta.delete();
} catch (Exception e) {
+ logger.error("NODE0534 Exception common: " + e);
}
}
}
@@ -448,7 +502,7 @@ public class NodeServlet extends HttpServlet {
int subId = Integer.parseInt(subscriptionId);
pubid = fileid.substring(i + 1);
String errorMessage = "Unable to delete files (" + pubid + ", " + pubid + ".M) from DR Node: "
- + config.getMyName() + ".";
+ + config.getMyName() + ".";
int subIdDir = subId - (subId % 100);
if (!isAuthorizedToDelete(resp, subscriptionId, errorMessage)) {
return;
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java
new file mode 100644
index 00000000..a4034410
--- /dev/null
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java
@@ -0,0 +1,88 @@
+/**
+ * -
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * <p>
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.dmaap.datarouter.node;
+
+/**
+ * FORTIFY SCAN FIXES
+ * <p>This Utility is used for Fortify fixes. It Validates the path url formed from
+ * the string passed in the request parameters.</p>
+ *
+ */
+class PathUtil {
+
+ /**
+ * This method takes String as the parameter and return the filtered path string.
+ * @param aString String to clean
+ * @return A cleaned String
+ */
+ static String cleanString(String aString) {
+ if (aString == null) return null;
+ String cleanString = "";
+ for (int i = 0; i < aString.length(); ++i) {
+ cleanString += cleanChar(aString.charAt(i));
+ }
+ return cleanString;
+ }
+
+ /**
+ * This method filters the valid special characters in path string.
+ * @param aChar The char to be cleaned
+ * @return The cleaned char
+ */
+ private static char cleanChar(char aChar) {
+ // 0 - 9
+ for (int i = 48; i < 58; ++i) {
+ if (aChar == i) return (char) i;
+ }
+ // 'A' - 'Z'
+ for (int i = 65; i < 91; ++i) {
+ if (aChar == i) return (char) i;
+ }
+ // 'a' - 'z'
+ for (int i = 97; i < 123; ++i) {
+ if (aChar == i) return (char) i;
+ }
+ // other valid characters
+ switch (aChar) {
+ case '/':
+ return '/';
+ case '.':
+ return '.';
+ case '-':
+ return '-';
+ case ':':
+ return ':';
+ case '?':
+ return '?';
+ case '&':
+ return '&';
+ case '=':
+ return '=';
+ case '#':
+ return '#';
+ case '_':
+ return '_';
+ case ' ':
+ return ' ';
+ }
+ return '%';
+ }
+}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java
index 77c5e996..a9c5c6fe 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java
@@ -135,7 +135,17 @@ public class ProvData {
String fid = gvas(jfeed, "feedid");
String fname = gvas(jfeed, "name");
String fver = gvas(jfeed, "version");
- pfv.add(new NodeConfig.ProvFeed(fid, fname + "//" + fver, stat));
+ String createdDate = gvas(jfeed, "created_date");
+ /*
+ * START - AAF changes
+ * TDP EPIC US# 307413
+ * Passing aafInstance to ProvFeed from feeds json passed by prov to identify legacy/AAF feeds
+ */
+ String aafInstance = gvas(jfeed, "aaf_instance");
+ pfv.add(new NodeConfig.ProvFeed(fid, fname + "//" + fver, stat,createdDate, aafInstance));
+ /*
+ * END - AAF changes
+ */
JSONObject jauth = jfeed.optJSONObject("authorization");
if (jauth == null) {
continue;
@@ -175,7 +185,8 @@ public class ProvData {
boolean use100 = jdel.getBoolean("use100");
boolean privilegedSubscriber = jsub.getBoolean("privilegedSubscriber");
boolean decompress = jsub.getBoolean("decompress");
- psv.add(new NodeConfig.ProvSubscription(sid, fid, delurl, id, NodeUtils.getAuthHdr(id, password), monly, use100, privilegedSubscriber, decompress));
+ boolean followRedirect = jsub.getBoolean("follow_redirect");
+ psv.add(new NodeConfig.ProvSubscription(sid, fid, delurl, id, NodeUtils.getAuthHdr(id, password), monly, use100, privilegedSubscriber, followRedirect, decompress));
}
}
JSONObject jparams = jcfg.optJSONObject("parameters");
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java
index 2e83e222..6f74df48 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java
@@ -24,8 +24,6 @@
package org.onap.dmaap.datarouter.node;
-import java.net.*;
-
/**
* Compare IP addresses as byte arrays to a subnet specified as a CIDR
*/
diff --git a/datarouter-node/src/main/resources/drNodeCadi.properties b/datarouter-node/src/main/resources/drNodeCadi.properties
new file mode 100644
index 00000000..8dfcab1c
--- /dev/null
+++ b/datarouter-node/src/main/resources/drNodeCadi.properties
@@ -0,0 +1,23 @@
+cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US
+cadi_keyfile=/opt/app/datartr/aaf_certs/org.onap.dmaap-dr.keyfile
+cadi_keystore=/opt/app/datartr/aaf_certs/org.onap.dmaap-dr.jks
+cadi_keystore_password=]3V)($O&.Mv]W{f8^]6SxGNL
+cadi_key_password=]3V)($O&.Mv]W{f8^]6SxGNL
+cadi_alias=dmaap-dr-node@dmaap-dr.onap.org
+cadi_truststore=/opt/app/datartr/aaf_certs/org.onap.dmaap-dr.trust.jks
+cadi_truststore_password=(Rd,&{]%ePdp}4JZjqoJ2G+g
+
+aaf_env=DEV
+aaf_locate_url=https://aaf-onap-test.osaaf.org:8095
+aaf_oauth2_introspect_url=https://AAF_LOCATE_URL/AAF_NS.introspect:2.1/introspect
+aaf_oauth2_token_url=https://AAF_LOCATE_URL/AAF_NS.token:2.1/token
+aaf_url=https://AAF_LOCATE_URL/AAF_NS.service:2.1
+cadi_protocols=TLSv1.1,TLSv1.2
+cm_url=https://AAF_LOCATE_URL/AAF_NS.cm:2.1
+fs_url=https://AAF_LOCATE_URL/AAF_NS.fs.2.1
+gui_url=https://AAF_LOCATE_URL/AAF_NS.gui.2.1
+
+cadi_latitude=53.423
+cadi_longitude=7.940
+
+cadi_loglevel=DEBUG \ No newline at end of file
diff --git a/datarouter-node/src/main/resources/node.properties b/datarouter-node/src/main/resources/node.properties
index 8b5568bc..27e91c90 100644
--- a/datarouter-node/src/main/resources/node.properties
+++ b/datarouter-node/src/main/resources/node.properties
@@ -21,91 +21,83 @@
# *
#-------------------------------------------------------------------------------
#
-# Configuration parameters fixed at startup for the DataRouter node
+# Configuration parameters set at startup for the DataRouter node
#
# URL to retrieve dynamic configuration
-#
-#ProvisioningURL: ${DRTR_PROV_INTURL}
-ProvisioningURL=https://dmaap-dr-prov:8443/internal/prov
-
+ProvisioningURL = https://dmaap-dr-prov:8443/internal/prov
#
# URL to upload PUB/DEL/EXP logs
-#
-#LogUploadURL: ${DRTR_LOG_URL}
-LogUploadURL=https://dmaap-dr-prov:8443/internal/logs
-
+LogUploadURL = https://dmaap-dr-prov:8443/internal/logs
#
# The port number for http as seen within the server
-#
-#IntHttpPort: ${DRTR_NODE_INTHTTPPORT:-8080}
-IntHttpPort=8080
+IntHttpPort = 8080
#
# The port number for https as seen within the server
-#
-IntHttpsPort=8443
+IntHttpsPort = 8443
#
# The external port number for https taking port mapping into account
+ExtHttpsPort = 443
#
-ExtHttpsPort=443
-#
-# The minimum interval between fetches of the dynamic configuration
-# from the provisioning server
-#
-MinProvFetchInterval=10000
+# The minimum interval between fetches of the dynamic configuration from the provisioning server
+MinProvFetchInterval = 10000
#
# The minimum interval between saves of the redirection data file
-#
-MinRedirSaveInterval=10000
+MinRedirSaveInterval = 10000
#
# The path to the directory where log files are stored
-#
-LogDir=/opt/app/datartr/logs
+LogDir = /opt/app/datartr/logs
#
# The retention interval (in days) for log files
-#
-LogRetention=30
+LogRetention = 30
#
# The path to the directories where data and meta data files are stored
-#
-SpoolDir=/opt/app/datartr/spool
+SpoolDir = /opt/app/datartr/spool
#
# The path to the redirection data file
-#
-#RedirectionFile: etc/redirections.dat
+RedirectionFile = etc/redirections.dat
#
# The type of keystore for https
-KeyStoreType: jks
+KeyStoreType = jks
#
# The path to the keystore for https
-#
-KeyStoreFile:/opt/app/datartr/aaf_certs/org.onap.dmaap-dr.jks
+KeyStoreFile = /opt/app/datartr/aaf_certs/org.onap.dmaap-dr.jks
#
# The password for the https keystore
-#
KeyStorePassword=]3V)($O&.Mv]W{f8^]6SxGNL
#
# The password for the private key in the https keystore
-#
KeyPassword=]3V)($O&.Mv]W{f8^]6SxGNL
#
# The type of truststore for https
-#
-TrustStoreType=jks
+TrustStoreType = jks
#
# The path to the truststore for https
-#
-#TrustStoreFile=/usr/lib/jvm/java-8-oracle/jre/lib/security/cacerts
-TrustStoreFile=/opt/app/datartr/aaf_certs/org.onap.dmaap-dr.trust.jks
+TrustStoreFile = /opt/app/datartr/aaf_certs/org.onap.dmaap-dr.trust.jks
#
# The password for the https truststore
-#
TrustStorePassword=(Rd,&{]%ePdp}4JZjqoJ2G+g
#
# The path to the file used to trigger an orderly shutdown
-#
-QuiesceFile=etc/SHUTDOWN
+QuiesceFile = etc/SHUTDOWN
#
# The key used to generate passwords for node to node transfers
+NodeAuthKey = Node123!
+#
+# DR_NODE DEFAULT ENABLED TLS PROTOCOLS
+NodeHttpsProtocols = TLSv1.1|TLSv1.2
+#
+# AAF type to generate permission string
+AAFType = org.onap.dmaap-dr.feed
+#
+# AAF default instance to generate permission string - default should be legacy
+AAFInstance = legacy
+#
+# AAF action to generate permission string - default should be publish
+AAFAction = publish
+#
+# AAF URL to connect to AAF server
+AafUrl = https://aaf-onap-test.osaaf.org:8095
#
-NodeAuthKey=Node123!
+# AAF CADI enabled flag
+CadiEnabled = false
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilterTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilterTest.java
new file mode 100644
index 00000000..f6737b1e
--- /dev/null
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilterTest.java
@@ -0,0 +1,121 @@
+/**-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.onap.aaf.cadi.PropAccess;
+import org.onap.aaf.cadi.filter.CadiFilter;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.api.support.membermodification.MemberMatcher;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+import static org.mockito.Mockito.*;
+
+@SuppressStaticInitializationFor("org.onap.dmaap.datarouter.node.NodeConfigManager")
+@PrepareForTest({CadiFilter.class})
+@RunWith(PowerMockRunner.class)
+public class DRNodeCadiFilterTest
+{
+
+ @Mock
+ private PropAccess access;
+
+ @Mock
+ private HttpServletRequest request;
+
+ @Mock
+ private HttpServletResponse response;
+
+ @Mock
+ private FilterChain chain;
+
+ private DRNodeCadiFilter cadiFilter;
+
+
+ @Before
+ public void setUp() throws ServletException {
+ cadiFilter = new DRNodeCadiFilter(false, access);
+ }
+
+ @Test
+ public void Given_doFilter_Called_And_Method_Is_GET_And_AAF_DB_Instance_Is_NULL_Then_Chain_doFilter_Called() throws Exception {
+ PowerMockito.mockStatic(NodeConfigManager.class);
+ NodeConfigManager config = mock(NodeConfigManager.class);
+
+ PowerMockito.when(NodeConfigManager.getInstance()).thenReturn(config);
+ PowerMockito.when(config.getAafInstance("/other/5")).thenReturn("legacy");
+ when(request.getPathInfo()).thenReturn("/publish/5");
+ when(request.getMethod()).thenReturn("GET");
+ cadiFilter.doFilter(request,response,chain);
+ verify(chain, times(1)).doFilter(request, response);
+ }
+
+ @Test
+ public void Given_doFilter_Called_And_Method_Is_GET_And_Path_Includes_Internal_Then_Chain_doFilter_Called() throws Exception {
+ PowerMockito.mockStatic(NodeConfigManager.class);
+ NodeConfigManager config = mock(NodeConfigManager.class);
+
+ PowerMockito.when(NodeConfigManager.getInstance()).thenReturn(config);
+ PowerMockito.when(config.getAafInstance("/other/5")).thenReturn("legacy");
+ when(request.getPathInfo()).thenReturn("/internal/5");
+ when(request.getMethod()).thenReturn("GET");
+ cadiFilter.doFilter(request,response,chain);
+ verify(chain, times(1)).doFilter(request, response);
+ }
+
+ @Test
+ public void Given_doFilter_Called_And_Method_Is_GET_And_AAF_DB_Is_Not_Null_Then_Super_doFilter_Called() throws Exception {
+ PowerMockito.mockStatic(NodeConfigManager.class);
+ NodeConfigManager config = mock(NodeConfigManager.class);
+
+ PowerMockito.when(NodeConfigManager.getInstance()).thenReturn(config);
+ PowerMockito.when(config.getAafInstance("5")).thenReturn("EXISTS");
+ when(request.getPathInfo()).thenReturn("/publish/5/fileId");
+ when(request.getMethod()).thenReturn("GET");
+ PowerMockito.suppress(MemberMatcher.methodsDeclaredIn(CadiFilter.class));
+ cadiFilter.doFilter(request,response,chain);
+ verify(chain, times(0)).doFilter(request, response);
+ }
+
+ @Test
+ public void Given_getFileid_Called_And_SendError_Fails_Then_Throw_IOException_And_Call_chain_doFilter() throws Exception {
+ PowerMockito.mockStatic(NodeConfigManager.class);
+ NodeConfigManager config = mock(NodeConfigManager.class);
+
+ PowerMockito.when(NodeConfigManager.getInstance()).thenReturn(config);
+ when(request.getPathInfo()).thenReturn("/publish/5");
+ when(request.getMethod()).thenReturn("DELETE");
+ doThrow(new IOException()).when(response).sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid request URI. Expecting <feed-publishing-url>/<fileid>. Possible missing fileid.");
+ cadiFilter.doFilter(request,response,chain);
+ verify(chain, times(1)).doFilter(request, response);
+ }
+}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java
index 97904a5e..9a3d82e5 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java
@@ -22,6 +22,7 @@
******************************************************************************/
package org.onap.dmaap.datarouter.node;
+
import org.apache.commons.lang3.reflect.FieldUtils;
import org.junit.Before;
import org.junit.Test;
@@ -29,12 +30,10 @@ import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.powermock.modules.junit4.PowerMockRunner;
-import static org.junit.Assert.*;
import java.io.File;
-
-
-import static org.mockito.Mockito.*;
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.when;
@RunWith(PowerMockRunner.class)
public class DeliveryQueueTest {
@@ -55,7 +54,7 @@ public class DeliveryQueueTest {
}
@Test
- public void Given_New_DeliveryQueue_Directory_Is_Created_As_Defined_By_DestInfo() throws Exception {
+ public void Given_New_DeliveryQueue_Directory_Is_Created_As_Defined_By_DestInfo() {
when(destInfo.getSpool()).thenReturn("tmp");
File file = new File("tmp");
assertTrue(file.exists());
@@ -63,14 +62,14 @@ public class DeliveryQueueTest {
}
@Test
- public void Given_Delivery_Task_Failed_And_Resume_Time_Not_Reached_Return_Null() throws Exception{
+ public void Given_Delivery_Task_Failed_And_Resume_Time_Not_Reached_Return_Null() throws Exception {
FieldUtils.writeField(deliveryQueue,"failed",true,true);
FieldUtils.writeField(deliveryQueue,"resumetime",System.currentTimeMillis()*2,true);
assertNull(deliveryQueue.peekNext());
}
@Test
- public void Given_Delivery_Task_Return_Next_Delivery_Task_Id() throws Exception{
+ public void Given_Delivery_Task_Return_Next_Delivery_Task_Id() throws Exception {
prepareFiles();
when(destInfo.getSpool()).thenReturn(dirPath);
deliveryQueue = new DeliveryQueue(deliveryQueueHelper, destInfo);
@@ -81,19 +80,19 @@ public class DeliveryQueueTest {
}
@Test
- public void Given_Delivery_Task_Cancel_And_FileId_Is_Null_Return_Zero() throws Exception{
+ public void Given_Delivery_Task_Cancel_And_FileId_Is_Null_Return_Zero() {
long rc = deliveryQueue.cancelTask("123.node.datarouternew.com");
assertEquals(0, rc);
}
- private void prepareFiles() throws Exception{
+ private void prepareFiles() throws Exception {
createFolder(dirPath);
createFile(FileName1, dirPath);
String[] files = new String[2];
files[0] = dirPath + FileName1;
}
- private void createFolder(String dirName) throws Exception{
+ private void createFolder(String dirName) throws Exception {
String dirPath = dirName;
File newDirectory = new File(dirPath);
@@ -101,13 +100,13 @@ public class DeliveryQueueTest {
if (isCreated) {
System.out.println("1. Successfully created directories, path: " + newDirectory.getCanonicalPath());
} else if (newDirectory.exists()) {
- System.out.printf("1. Directory path already exist, path: " + newDirectory.getCanonicalPath());
+ System.out.print("1. Directory path already exist, path: " + newDirectory.getCanonicalPath());
} else {
System.out.println("1. Unable to create directory");
}
}
- private void createFile( String file, String dir) throws Exception{
+ private void createFile(String file, String dir) throws Exception {
String FileName = file;
String dirPath = dir;
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java
index 4ca907f7..efa43e11 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java
@@ -97,7 +97,7 @@ public class DeliveryTest {
private DestInfo[] createDestInfoObjects() {
DestInfo[] destInfos = new DestInfo[1];
- DestInfo destInfo = new DestInfo("node.datarouternew.com", "spool/s/0/1", "1", "logs/", "/subs/1", "user1", "Basic dXNlcjE6cGFzc3dvcmQx", false, true, false, false);
+ DestInfo destInfo = new DestInfo("node.datarouternew.com", "spool/s/0/1", "1", "logs/", "/subs/1", "user1", "Basic dXNlcjE6cGFzc3dvcmQx", false, true, false, false, false);
destInfos[0] = destInfo;
return destInfos;
}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java
index 4b614d56..7dddd67a 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java
@@ -193,6 +193,7 @@ public class NodeConfigTest {
endpointAddrs.put("172.0.0.1");
auth.put("endpoint_addrs", endpointAddrs);
feed.put("authorization", auth);
+ feed.put("aaf_instance", "legacy");
feeds.put(feed);
provData.put("feeds", feeds);
}
@@ -211,6 +212,7 @@ public class NodeConfigTest {
delivery.put("use100", true);
subscription.put("delivery", delivery);
subscription.put("privilegedSubscriber", false);
+ subscription.put("follow_redirect", false);
subscription.put("decompress", false);
subscriptions.put(subscription);
provData.put("subscriptions", subscriptions);
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java
index 065565d3..99e34c6f 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java
@@ -59,7 +59,9 @@ public class NodeServletTest {
@Mock
private HttpServletResponse response;
- ListAppender<ILoggingEvent> listAppender;
+ private ListAppender<ILoggingEvent> listAppender;
+
+ private NodeConfigManager config = mock(NodeConfigManager.class);
@Before
public void setUp() throws Exception {
@@ -216,6 +218,17 @@ public class NodeServletTest {
}
@Test
+ public void Given_Request_Is_HTTP_PUT_On_Publish_On_AAF_Feed_And_Cadi_Enabled_And_No_Permissions_Then_Forbidden_Response_Is_Generated() throws Exception {
+ when(config.getCadiEnabeld()).thenReturn(true);
+ when(config.getAafInstance("1")).thenReturn("*");
+ when(request.getPathInfo()).thenReturn("/publish/1/fileName");
+ setHeadersForValidRequest(true);
+ nodeServlet.doPut(request, response);
+ verify(response).sendError(eq(HttpServletResponse.SC_FORBIDDEN), argThat(notNullValue(String.class)));
+ verifyEnteringExitCalled(listAppender);
+ }
+
+ @Test
public void Given_Request_Is_HTTP_DELETE_On_Publish_With_Meta_Data_Malformed_Then_Bad_Request_Response_Is_Generated() throws Exception {
when(request.getPathInfo()).thenReturn("/publish/1/fileName");
setHeadersForValidRequest(false);
@@ -286,7 +299,6 @@ public class NodeServletTest {
}
private void setUpConfig() throws IllegalAccessException {
- NodeConfigManager config = mock(NodeConfigManager.class);
PowerMockito.mockStatic(NodeConfigManager.class);
when(config.isShutdown()).thenReturn(false);
when(config.isConfigured()).thenReturn(true);