From 63b13a0cddf45b4cfd1691dd5b95a205af355898 Mon Sep 17 00:00:00 2001 From: efiacor Date: Fri, 16 Dec 2022 12:12:30 +0000 Subject: [DMAAP-DR] Remove cadi/aaf from dr-node Signed-off-by: efiacor Change-Id: Iba1b7d8c087a4f68c3a8a517145abf70848ee030 Issue-ID: DMAAP-1642 --- .../org/onap/dmaap/datarouter/node/Delivery.java | 324 ------- .../onap/dmaap/datarouter/node/DeliveryQueue.java | 449 --------- .../dmaap/datarouter/node/DeliveryQueueHelper.java | 108 --- .../onap/dmaap/datarouter/node/DeliveryTask.java | 472 --------- .../dmaap/datarouter/node/DeliveryTaskHelper.java | 84 -- .../org/onap/dmaap/datarouter/node/DestInfo.java | 2 + .../dmaap/datarouter/node/DestInfoBuilder.java | 18 +- .../org/onap/dmaap/datarouter/node/LogManager.java | 240 ----- .../dmaap/datarouter/node/NodeAafPropsUtils.java | 58 -- .../org/onap/dmaap/datarouter/node/NodeConfig.java | 1010 -------------------- .../dmaap/datarouter/node/NodeConfigManager.java | 347 ++++--- .../org/onap/dmaap/datarouter/node/NodeRunner.java | 8 +- .../org/onap/dmaap/datarouter/node/NodeServer.java | 35 +- .../onap/dmaap/datarouter/node/NodeServlet.java | 154 +-- .../org/onap/dmaap/datarouter/node/NodeUtils.java | 334 ------- .../org/onap/dmaap/datarouter/node/PathFinder.java | 154 --- .../org/onap/dmaap/datarouter/node/PathUtil.java | 106 -- .../org/onap/dmaap/datarouter/node/ProvData.java | 405 -------- .../org/onap/dmaap/datarouter/node/PublishId.java | 56 -- .../org/onap/dmaap/datarouter/node/StatusLog.java | 288 ------ .../onap/dmaap/datarouter/node/SubnetMatcher.java | 74 -- .../org/onap/dmaap/datarouter/node/TaskList.java | 2 +- .../dmaap/datarouter/node/config/NodeConfig.java | 959 +++++++++++++++++++ .../dmaap/datarouter/node/config/PathFinder.java | 155 +++ .../dmaap/datarouter/node/config/ProvData.java | 397 ++++++++ .../datarouter/node/config/SubnetMatcher.java | 76 ++ .../dmaap/datarouter/node/delivery/Delivery.java | 326 +++++++ .../datarouter/node/delivery/DeliveryQueue.java | 451 +++++++++ .../node/delivery/DeliveryQueueHelper.java | 110 +++ .../datarouter/node/delivery/DeliveryTask.java | 474 +++++++++ .../node/delivery/DeliveryTaskHelper.java | 86 ++ .../dmaap/datarouter/node/eelf/MetricsFilter.java | 2 - .../onap/dmaap/datarouter/node/log/LogManager.java | 245 +++++ .../onap/dmaap/datarouter/node/log/StatusLog.java | 290 ++++++ .../datarouter/node/utils/NodeTlsManager.java | 169 ++++ .../dmaap/datarouter/node/utils/NodeUtils.java | 253 +++++ .../resources/aaf/org.onap.dmaap-dr.cred.props | 17 - .../main/resources/aaf/org.onap.dmaap-dr.keyfile | 27 - .../resources/aaf/org.onap.dmaap-dr.location.props | 8 - .../src/main/resources/aaf/org.onap.dmaap-dr.p12 | Bin 4217 -> 0 bytes .../src/main/resources/aaf/org.onap.dmaap-dr.props | 24 - .../main/resources/aaf/org.onap.dmaap-dr.trust.jks | Bin 1413 -> 0 bytes datarouter-node/src/main/resources/node.properties | 42 +- .../dmaap/datarouter/node/DeliveryQueueTest.java | 3 + .../dmaap/datarouter/node/DeliveryTaskTest.java | 2 + .../onap/dmaap/datarouter/node/DeliveryTest.java | 5 +- .../onap/dmaap/datarouter/node/LogManagerTest.java | 4 +- .../datarouter/node/NodeAafPropsUtilsTest.java | 41 - .../datarouter/node/NodeConfigManagerTest.java | 13 +- .../onap/dmaap/datarouter/node/NodeConfigTest.java | 5 +- .../onap/dmaap/datarouter/node/NodeServerTest.java | 20 +- .../dmaap/datarouter/node/NodeServletTest.java | 15 +- .../dmaap/datarouter/node/NodeTlsManagerTest.java | 51 + .../onap/dmaap/datarouter/node/NodeUtilsTest.java | 15 +- .../onap/dmaap/datarouter/node/PathFinderTest.java | 2 + .../onap/dmaap/datarouter/node/ProvDataTest.java | 1 + .../onap/dmaap/datarouter/node/StatusLogTest.java | 1 + .../resources/aaf/org.onap.dmaap-dr.cred.props | 17 - .../test/resources/aaf/org.onap.dmaap-dr.keyfile | 27 - .../resources/aaf/org.onap.dmaap-dr.location.props | 8 - .../src/test/resources/aaf/org.onap.dmaap-dr.p12 | Bin 4217 -> 0 bytes .../src/test/resources/aaf/org.onap.dmaap-dr.props | 24 - .../test/resources/aaf/org.onap.dmaap-dr.trust.jks | Bin 1413 -> 0 bytes .../resources/certs/org.onap.dmaap-dr-node.p12 | Bin 0 -> 4596 bytes .../src/test/resources/certs/truststore.jks | Bin 0 -> 3234 bytes .../src/test/resources/node_test.properties | 43 +- .../test/resources/org.onap.dmaap-dr-test-cert.jks | Bin 3647 -> 0 bytes datarouter-node/src/test/resources/prov_data.json | 2 - 68 files changed, 4416 insertions(+), 4722 deletions(-) delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Delivery.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueue.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueueHelper.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTaskHelper.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeAafPropsUtils.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PublishId.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/StatusLog.java delete mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/NodeConfig.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/PathFinder.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/ProvData.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/SubnetMatcher.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/Delivery.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryQueue.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryQueueHelper.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryTask.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryTaskHelper.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/log/LogManager.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/log/StatusLog.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/utils/NodeTlsManager.java create mode 100644 datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/utils/NodeUtils.java delete mode 100644 datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.cred.props delete mode 100644 datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.keyfile delete mode 100644 datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.location.props delete mode 100644 datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.p12 delete mode 100644 datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.props delete mode 100644 datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.trust.jks delete mode 100644 datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeAafPropsUtilsTest.java create mode 100644 datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeTlsManagerTest.java delete mode 100644 datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.cred.props delete mode 100644 datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.keyfile delete mode 100644 datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.location.props delete mode 100644 datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.p12 delete mode 100644 datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.props delete mode 100644 datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.trust.jks create mode 100644 datarouter-node/src/test/resources/certs/org.onap.dmaap-dr-node.p12 create mode 100644 datarouter-node/src/test/resources/certs/truststore.jks delete mode 100644 datarouter-node/src/test/resources/org.onap.dmaap-dr-test-cert.jks (limited to 'datarouter-node/src') diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Delivery.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Delivery.java deleted file mode 100644 index 0326fb08..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Delivery.java +++ /dev/null @@ -1,324 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - -package org.onap.dmaap.datarouter.node; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Objects; - -/** - * Main control point for delivering files to destinations. - * - *

The Delivery class manages assignment of delivery threads to delivery queues and creation and destruction of - * delivery queues as configuration changes. DeliveryQueues are assigned threads based on a modified round-robin - * approach giving priority to queues with more work as measured by both bytes to deliver and files to deliver and lower - * priority to queues that already have delivery threads working. A delivery thread continues to work for a delivery - * queue as long as that queue has more files to deliver. - */ -public class Delivery { - - private static final String TOTAL = " total="; - private static final String YELLOW = " yellow="; - private static EELFLogger logger = EELFManager.getInstance().getLogger(Delivery.class); - private double fdstart; - private double fdstop; - private int threads; - private int curthreads; - private NodeConfigManager config; - private HashMap dqs = new HashMap<>(); - private DeliveryQueue[] queues = new DeliveryQueue[0]; - private int qpos = 0; - private long nextcheck; - - /** - * Constructs a new Delivery system using the specified configuration manager. - * - * @param config The configuration manager for this delivery system. - */ - public Delivery(NodeConfigManager config) { - this.config = config; - Runnable cmon = this::checkconfig; - config.registerConfigTask(cmon); - } - - /** - * Reset the retry timer for a delivery queue. - */ - public synchronized void resetQueue(String spool) { - if (spool != null) { - DeliveryQueue dq = dqs.get(spool); - if (dq != null) { - dq.resetQueue(); - } - } - } - - /** - * Mark the task in spool a success. - */ - public synchronized boolean markTaskSuccess(String spool, String pubId) { - boolean succeeded = false; - if (spool != null) { - DeliveryQueue dq = dqs.get(spool); - if (dq != null) { - succeeded = dq.markTaskSuccess(pubId); - } - } - return succeeded; - } - - private void cleardir(String dir) { - if (dqs.get(dir) != null) { - return; - } - File fdir = new File(dir); - try { - for (File junk : fdir.listFiles()) { - if (junk.isFile()) { - Files.delete(fdir.toPath()); - } - } - Files.delete(fdir.toPath()); - } catch (IOException e) { - logger.error("Failed to delete file: " + fdir.getPath(), e); - } - } - - private void freeDiskCheck() { - File spoolfile = new File(config.getSpoolBase()); - long tspace = spoolfile.getTotalSpace(); - long start = (long) (tspace * fdstart); - long cur = spoolfile.getUsableSpace(); - if (cur >= start) { - return; - } - ArrayList cv = new ArrayList<>(); - for (String sdir : dqs.keySet()) { - for (String meta : (new File(sdir)).list()) { - if (!meta.endsWith(".M") || meta.charAt(0) == '.') { - continue; - } - cv.add(new DelItem(meta.substring(0, meta.length() - 2), sdir)); - } - } - DelItem[] items = cv.toArray(new DelItem[cv.size()]); - Arrays.sort(items); - long stop = (long) (tspace * fdstop); - logger.warn( - "NODE0501 Free disk space below red threshold. current=" + cur + " red=" + start + TOTAL + tspace); - if (determineFreeDiskSpace(spoolfile, tspace, stop, cur, items)) { - return; - } - cur = spoolfile.getUsableSpace(); - if (cur >= stop) { - logger.warn("NODE0503 Free disk space at or above yellow threshold. current=" + cur + YELLOW + stop - + TOTAL + tspace); - return; - } - logger.warn( - "NODE0504 Unable to recover sufficient disk space to reach green status. current=" + cur + YELLOW - + stop + TOTAL + tspace); - } - - private void cleardirs() { - String basedir = config.getSpoolBase(); - String nbase = basedir + "/n"; - for (String nodedir : (new File(nbase)).list()) { - if (!nodedir.startsWith(".")) { - cleardir(nbase + "/" + nodedir); - } - } - String sxbase = basedir + "/s"; - for (String sxdir : (new File(sxbase)).list()) { - if (sxdir.startsWith(".")) { - continue; - } - File sxf = new File(sxbase + File.separator + sxdir); - for (String sdir : sxf.list()) { - if (!sdir.startsWith(".")) { - cleardir(sxbase + "/" + sxdir + "/" + sdir); - } - } - try { - if (sxf.list().length == 0) { - Files.delete(sxf.toPath()); // won't if anything still in it - } - } catch (IOException e) { - logger.error("Failed to delete file: " + sxf.getPath(), e); - } - } - } - - private synchronized void checkconfig() { - if (!config.isConfigured()) { - return; - } - fdstart = config.getFreeDiskStart(); - fdstop = config.getFreeDiskStop(); - threads = config.getDeliveryThreads(); - if (threads < 1) { - threads = 1; - } - DestInfo[] alldis = config.getAllDests(); - DeliveryQueue[] nqs = new DeliveryQueue[alldis.length]; - qpos = 0; - HashMap ndqs = new HashMap<>(); - for (DestInfo di : alldis) { - String spl = di.getSpool(); - DeliveryQueue dq = dqs.get(spl); - if (dq == null) { - dq = new DeliveryQueue(config, di); - } else { - dq.config(di); - } - ndqs.put(spl, dq); - nqs[qpos++] = dq; - } - queues = nqs; - dqs = ndqs; - cleardirs(); - while (curthreads < threads) { - curthreads++; - (new Thread("del-thread-" + curthreads) { - @Override - public void run() { - dodelivery(); - } - }).start(); - } - nextcheck = 0; - notifyAll(); - } - - private void dodelivery() { - DeliveryQueue dq; - while ((dq = getNextQueue()) != null) { - dq.run(); - } - } - - private synchronized DeliveryQueue getNextQueue() { - while (true) { - if (curthreads > threads) { - curthreads--; - return (null); - } - if (qpos < queues.length) { - DeliveryQueue dq = queues[qpos++]; - if (dq.isSkipSet()) { - continue; - } - nextcheck = 0; - notifyAll(); - return (dq); - } - long now = System.currentTimeMillis(); - if (now < nextcheck) { - try { - wait(nextcheck + 500 - now); - } catch (Exception e) { - logger.error("InterruptedException", e); - } - now = System.currentTimeMillis(); - } - if (now >= nextcheck) { - nextcheck = now + 5000; - qpos = 0; - freeDiskCheck(); - } - } - } - - private boolean determineFreeDiskSpace(File spoolfile, long tspace, long stop, long cur, DelItem[] items) { - for (DelItem item : items) { - long amount = dqs.get(item.getSpool()).cancelTask(item.getPublishId()); - logger.debug("NODE0502 Attempting to discard " + item.getSpool() + "/" + item.getPublishId() - + " to free up disk"); - if (amount > 0) { - cur += amount; - if (cur >= stop) { - cur = spoolfile.getUsableSpace(); - } - if (cur >= stop) { - logger.warn( - "NODE0503 Free disk space at or above yellow threshold. current=" + cur + YELLOW + stop - + TOTAL + tspace); - return true; - } - } - } - return false; - } - - static class DelItem implements Comparable { - - private String pubid; - private String spool; - - public DelItem(String pubid, String spool) { - this.pubid = pubid; - this.spool = spool; - } - - public int compareTo(DelItem other) { - int diff = pubid.compareTo(other.pubid); - if (diff == 0) { - diff = spool.compareTo(other.spool); - } - return (diff); - } - - public String getPublishId() { - return (pubid); - } - - public String getSpool() { - return (spool); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - DelItem delItem = (DelItem) object; - return Objects.equals(pubid, delItem.pubid) - && Objects.equals(getSpool(), delItem.getSpool()); - } - - @Override - public int hashCode() { - return Objects.hash(pubid, getSpool()); - } - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueue.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueue.java deleted file mode 100644 index d447bcc1..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueue.java +++ /dev/null @@ -1,449 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.node; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import org.jetbrains.annotations.Nullable; - -/** - * Mechanism for monitoring and controlling delivery of files to a destination. - * - *

The DeliveryQueue class maintains lists of DeliveryTasks for a single - * destination (a subscription or another data router node) and assigns - * delivery threads to try to deliver them. It also maintains a delivery - * status that causes it to back off on delivery attempts after a failure. - * - *

If the most recent delivery result was a failure, then no more attempts - * will be made for a period of time. Initially, and on the first failure - * following a success, this delay will be DeliveryQueueHelper.getInitFailureTimer() (milliseconds). - * If, after this delay, additional failures occur, each failure will - * multiply the delay by DeliveryQueueHelper.getFailureBackoff() up to a - * maximum delay specified by DeliveryQueueHelper.getMaxFailureTimer(). - * Note that this behavior applies to the delivery queue as a whole and not - * to individual files in the queue. If multiple files are being - * delivered and one fails, the delay will be started. If a second - * delivery fails while the delay was active, it will not change the delay - * or change the duration of any subsequent delay. - * If, however, it succeeds, it will cancel the delay. - * The queue maintains 3 collections of files to deliver: A todoList of - * files that will be attempted, a working set of files that are being - * attempted, and a retry set of files that were attempted and failed. - * Whenever the todoList is empty and needs to be refilled, a scan of the - * spool directory is made and the file names sorted. Any files in the working set are ignored. - * If a DeliveryTask for the file is in the retry set, then that delivery - * task is placed on the todoList. Otherwise, a new DeliveryTask for the - * file is created and placed on the todoList. - * If, when a DeliveryTask is about to be removed from the todoList, its - * age exceeds DeliveryQueueHelper.getExpirationTimer(), then it is instead - * marked as expired. - * - *

A delivery queue also maintains a skip flag. This flag is true if the - * failure timer is active or if no files are found in a directory scan. - */ -public class DeliveryQueue implements Runnable, DeliveryTaskHelper { - private static EELFLogger logger = EELFManager.getInstance().getLogger(DeliveryQueue.class); - private DeliveryQueueHelper deliveryQueueHelper; - - private DestInfo destinationInfo; - private HashMap working = new HashMap<>(); - private HashMap retry = new HashMap<>(); - private int todoindex; - private boolean failed; - private long failduration; - private long resumetime; - private File dir; - private List todoList = new ArrayList<>(); - - /** - * Create a delivery queue for a given destination info. - */ - DeliveryQueue(DeliveryQueueHelper deliveryQueueHelper, DestInfo destinationInfo) { - this.deliveryQueueHelper = deliveryQueueHelper; - this.destinationInfo = destinationInfo; - dir = new File(destinationInfo.getSpool()); - dir.mkdirs(); - } - - /** - * Try to cancel a delivery task. - * - * @return The length of the task in bytes or 0 if the task cannot be cancelled. - */ - synchronized long cancelTask(String pubid) { - if (working.get(pubid) != null) { - return (0); - } - DeliveryTask dt = retry.get(pubid); - if (dt == null) { - for (int i = todoindex; i < todoList.size(); i++) { - DeliveryTask xdt = todoList.get(i); - if (xdt.getPublishId().equals(pubid)) { - dt = xdt; - break; - } - } - } - if (dt == null) { - dt = new DeliveryTask(this, pubid); - if (dt.getFileId() == null) { - return (0); - } - } - if (dt.isCleaned()) { - return (0); - } - StatusLog.logExp(dt.getPublishId(), dt.getFeedId(), dt.getSubId(), dt.getURL(), - dt.getMethod(), dt.getCType(), dt.getLength(), "diskFull", dt.getAttempts()); - dt.clean(); - return (dt.getLength()); - } - - /** - * Mark that a delivery task has succeeded. - */ - private synchronized void markSuccess(DeliveryTask task) { - working.remove(task.getPublishId()); - logger.info(task.getPublishId() + " marked as success."); - task.clean(); - failed = false; - failduration = 0; - } - - /** - * Mark that a delivery task has expired. - */ - private synchronized void markExpired(DeliveryTask task) { - logger.info(task.getPublishId() + " marked as expired."); - task.clean(); - } - - /** - * Mark that a delivery task has failed permanently. - */ - private synchronized void markFailNoRetry(DeliveryTask task) { - working.remove(task.getPublishId()); - logger.info(task.getPublishId() + " marked as failed permanently"); - task.clean(); - failed = false; - failduration = 0; - } - - private void fdupdate() { - if (!failed) { - failed = true; - if (failduration == 0) { - if (destinationInfo.isPrivilegedSubscriber()) { - failduration = deliveryQueueHelper.getWaitForFileProcessFailureTimer(); - } else { - failduration = deliveryQueueHelper.getInitFailureTimer(); - } - } - resumetime = System.currentTimeMillis() + failduration; - long maxdur = deliveryQueueHelper.getMaxFailureTimer(); - failduration = (long) (failduration * deliveryQueueHelper.getFailureBackoff()); - if (failduration > maxdur) { - failduration = maxdur; - } - } - } - - /** - * Mark that a delivery task has been redirected. - */ - private synchronized void markRedirect(DeliveryTask task) { - working.remove(task.getPublishId()); - logger.info(task.getPublishId() + " marked as redirected."); - retry.put(task.getPublishId(), task); - } - - /** - * Mark that a delivery task has temporarily failed. - */ - private synchronized void markFailWithRetry(DeliveryTask task) { - working.remove(task.getPublishId()); - logger.info(task.getPublishId() + " marked as temporarily failed."); - retry.put(task.getPublishId(), task); - fdupdate(); - } - - /** - * Get the next task. - */ - synchronized DeliveryTask getNext() { - DeliveryTask ret = peekNext(); - if (ret != null) { - todoindex++; - working.put(ret.getPublishId(), ret); - } - return (ret); - } - - /** - * Peek at the next task. - */ - synchronized DeliveryTask peekNext() { - long now = System.currentTimeMillis(); - long mindate = now - deliveryQueueHelper.getExpirationTimer(); - if (failed) { - if (now > resumetime) { - failed = false; - } else { - return (null); - } - } - while (true) { - if (todoindex >= todoList.size()) { - todoindex = 0; - todoList = new ArrayList<>(); - String[] files = dir.list(); - if (files != null) { - Arrays.sort(files); - scanForNextTask(files); - } - retry = new HashMap<>(); - } - return getDeliveryTask(mindate); - } - } - - /** - * Update the destination info for this delivery queue. - */ - public void config(DestInfo destinationInfo) { - this.destinationInfo = destinationInfo; - } - - /** - * Get the dest info. - */ - public DestInfo getDestinationInfo() { - return (destinationInfo); - } - - /** - * Get the config manager. - */ - public DeliveryQueueHelper getConfig() { - return (deliveryQueueHelper); - } - - /** - * Exceptional condition occurred during delivery. - */ - public void reportDeliveryExtra(DeliveryTask task, long sent) { - StatusLog.logDelExtra(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getLength(), sent); - } - - /** - * Message too old to deliver. - */ - void reportExpiry(DeliveryTask task) { - StatusLog.logExp(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), - task.getCType(), task.getLength(), "retriesExhausted", task.getAttempts()); - markExpired(task); - } - - /** - * Completed a delivery attempt. - */ - public void reportStatus(DeliveryTask task, int status, String xpubid, String location) { - if (status < 300) { - StatusLog.logDel(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), - task.getCType(), task.getLength(), destinationInfo.getAuthUser(), status, xpubid); - if (destinationInfo.isPrivilegedSubscriber()) { - task.setResumeTime(System.currentTimeMillis() - + deliveryQueueHelper.getWaitForFileProcessFailureTimer()); - markFailWithRetry(task); - } else { - markSuccess(task); - } - } else if (status < 400 && deliveryQueueHelper.isFollowRedirects()) { - StatusLog.logDel(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), - task.getCType(), task.getLength(), destinationInfo.getAuthUser(), status, location); - if (deliveryQueueHelper.handleRedirection(destinationInfo, location, task.getFileId())) { - markRedirect(task); - } else { - StatusLog.logExp(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), - task.getMethod(), task.getCType(), task.getLength(), "notRetryable", task.getAttempts()); - markFailNoRetry(task); - } - } else if (status < 500 && status != 429) { - // Status 429 is the standard response for Too Many Requests and indicates - // that a file needs to be delivered again at a later time. - StatusLog.logDel(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), - task.getCType(), task.getLength(), destinationInfo.getAuthUser(), status, location); - StatusLog.logExp(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), - task.getCType(), task.getLength(), "notRetryable", task.getAttempts()); - markFailNoRetry(task); - } else { - StatusLog.logDel(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), - task.getCType(), task.getLength(), destinationInfo.getAuthUser(), status, location); - markFailWithRetry(task); - } - } - - /** - * Delivery failed by reason of an exception. - */ - public void reportException(DeliveryTask task, Exception exception) { - StatusLog.logDel(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), - task.getCType(), task.getLength(), destinationInfo.getAuthUser(), -1, exception.toString()); - deliveryQueueHelper.handleUnreachable(destinationInfo); - markFailWithRetry(task); - } - - /** - * Get the feed ID for a subscription. - * - * @param subid The subscription ID - * @return The feed ID - */ - public String getFeedId(String subid) { - return (deliveryQueueHelper.getFeedId(subid)); - } - - /** - * Get the URL to deliver a message to given the file ID. - */ - public String getDestURL(String fileid) { - return (deliveryQueueHelper.getDestURL(destinationInfo, fileid)); - } - - /** - * Deliver files until there's a failure or there are no more. - * files to deliver - */ - public void run() { - DeliveryTask task; - long endtime = System.currentTimeMillis() + deliveryQueueHelper.getFairTimeLimit(); - int filestogo = deliveryQueueHelper.getFairFileLimit(); - while ((task = getNext()) != null) { - logger.info("Processing file: " + task.getPublishId()); - task.run(); - if (--filestogo <= 0 || System.currentTimeMillis() > endtime) { - break; - } - } - } - - /** - * Is there no work to do for this queue right now?. - */ - synchronized boolean isSkipSet() { - return (peekNext() == null); - } - - /** - * Reset the retry timer. - */ - void resetQueue() { - resumetime = System.currentTimeMillis(); - } - - /** - * Get task if in queue and mark as success. - */ - boolean markTaskSuccess(String pubId) { - DeliveryTask task = working.get(pubId); - if (task != null) { - markSuccess(task); - return true; - } - task = retry.get(pubId); - if (task != null) { - retry.remove(pubId); - task.clean(); - resetQueue(); - failduration = 0; - return true; - } - return false; - } - - private void scanForNextTask(String[] files) { - for (String fname : files) { - String pubId = getPubId(fname); - if (pubId == null) { - continue; - } - DeliveryTask dt = retry.get(pubId); - if (dt == null) { - dt = new DeliveryTask(this, pubId); - } - todoList.add(dt); - } - } - - @Nullable - private DeliveryTask getDeliveryTask(long mindate) { - if (todoindex < todoList.size()) { - DeliveryTask dt = todoList.get(todoindex); - if (dt.isCleaned()) { - todoindex++; - } - if (destinationInfo.isPrivilegedSubscriber() && dt.getResumeTime() > System.currentTimeMillis()) { - retry.put(dt.getPublishId(), dt); - todoindex++; - } - if (dt.getDate() >= mindate) { - return (dt); - } - todoindex++; - reportExpiry(dt); - } - return null; - } - - @Nullable - private String getPubId(String fname) { - if (!fname.endsWith(".M")) { - return null; - } - String fname2 = fname.substring(0, fname.length() - 2); - long pidtime = 0; - int dot = fname2.indexOf('.'); - if (dot < 1) { - return null; - } - try { - pidtime = Long.parseLong(fname2.substring(0, dot)); - } catch (Exception e) { - logger.error("Exception", e); - } - if (pidtime < 1000000000000L) { - return null; - } - if (working.get(fname2) != null) { - return null; - } - return fname2; - } -} \ No newline at end of file diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueueHelper.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueueHelper.java deleted file mode 100644 index 5427fafd..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueueHelper.java +++ /dev/null @@ -1,108 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.node; - -/** - * Interface to allow independent testing of the DeliveryQueue code - * - *

This interface represents all of the configuration information and - * feedback mechanisms that a delivery queue needs. - */ -public interface DeliveryQueueHelper { - /** - * Get the timeout (milliseconds) before retrying after an initial delivery failure. - */ - long getInitFailureTimer(); - - /** - * Get the timeout before retrying after delivery and wait for file processing. - */ - long getWaitForFileProcessFailureTimer(); - - /** - * Get the ratio between timeouts on consecutive delivery attempts. - */ - double getFailureBackoff(); - - /** - * Get the maximum timeout (milliseconds) between delivery attempts. - */ - long getMaxFailureTimer(); - - /** - * Get the expiration timer (milliseconds) for deliveries. - */ - long getExpirationTimer(); - - /** - * Get the maximum number of file delivery attempts before checking - * if another queue has work to be performed. - */ - int getFairFileLimit(); - - /** - * Get the maximum amount of time spent delivering files before checking if another queue has work to be performed. - */ - long getFairTimeLimit(); - - /** - * Get the URL for delivering a file. - * - * @param destinationInfo The destination information for the file to be delivered. - * @param fileid The file id for the file to be delivered. - * @return The URL for delivering the file (typically, destinationInfo.getURL() + "/" + fileid). - */ - String getDestURL(DestInfo destinationInfo, String fileid); - - /** - * Forget redirections associated with a subscriber. - * - * @param destinationInfo Destination information to forget - */ - void handleUnreachable(DestInfo destinationInfo); - - /** - * Post redirection for a subscriber. - * - * @param destinationInfo Destination information to update - * @param location Location given by subscriber - * @param fileid File ID of request - * @return true if this 3xx response is retryable, otherwise, false. - */ - boolean handleRedirection(DestInfo destinationInfo, String location, String fileid); - - /** - * Should I handle 3xx responses differently than 4xx responses?. - */ - boolean isFollowRedirects(); - - /** - * Get the feed ID for a subscription. - * - * @param subid The subscription ID - * @return The feed ID - */ - String getFeedId(String subid); -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java deleted file mode 100644 index 55ad6aa8..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java +++ /dev/null @@ -1,472 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.node; - -import static com.att.eelf.configuration.Configuration.MDC_KEY_REQUEST_ID; -import static org.onap.dmaap.datarouter.node.NodeUtils.isFiletypeGzip; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.net.HttpURLConnection; -import java.net.ProtocolException; -import java.net.URL; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.UUID; -import java.util.zip.GZIPInputStream; -import org.jetbrains.annotations.Nullable; -import org.onap.dmaap.datarouter.node.eelf.EelfMsgs; -import org.slf4j.MDC; - -/** - * A file to be delivered to a destination. - * - *

A Delivery task represents a work item for the data router - a file that needs to be delivered and provides - * mechanisms to get information about the file and its delivery data as well as to attempt delivery. - */ -public class DeliveryTask implements Runnable, Comparable { - - private static final String DECOMPRESSION_STATUS = "Decompression_Status"; - private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(DeliveryTask.class); - private DeliveryTaskHelper deliveryTaskHelper; - private String pubid; - private DestInfo destInfo; - private String spool; - private File datafile; - private File metafile; - private long length; - private long date; - private String method; - private String fileid; - private String ctype; - private String url; - private String feedid; - private String subid; - private int attempts; - private boolean followRedirects; - private String[][] hdrs; - private String newInvocationId; - private long resumeTime; - - - /** - * Create a delivery task for a given delivery queue and pub ID. - * - * @param deliveryTaskHelper The delivery task helper for the queue this task is in. - * @param pubid The publish ID for this file. This is used as the base for the file name in the spool directory and - * is of the form (milliseconds since 1970).(fqdn of initial data router node) - */ - DeliveryTask(DeliveryTaskHelper deliveryTaskHelper, String pubid) { - this.deliveryTaskHelper = deliveryTaskHelper; - this.pubid = pubid; - destInfo = deliveryTaskHelper.getDestinationInfo(); - subid = destInfo.getSubId(); - this.followRedirects = destInfo.isFollowRedirects(); - feedid = destInfo.getLogData(); - spool = destInfo.getSpool(); - String dfn = spool + File.separator + pubid; - String mfn = dfn + ".M"; - datafile = new File(spool + File.separator + pubid); - metafile = new File(mfn); - boolean monly = destInfo.isMetaDataOnly(); - date = Long.parseLong(pubid.substring(0, pubid.indexOf('.'))); - resumeTime = System.currentTimeMillis(); - ArrayList hdrv = new ArrayList<>(); - - try (BufferedReader br = new BufferedReader(new FileReader(metafile))) { - String line = br.readLine(); - int index = line.indexOf('\t'); - method = line.substring(0, index); - NodeUtils.setIpAndFqdnForEelf(method); - if (!"DELETE".equals(method) && !monly) { - length = datafile.length(); - } - fileid = line.substring(index + 1); - while ((line = br.readLine()) != null) { - index = line.indexOf('\t'); - String header = line.substring(0, index); - String headerValue = line.substring(index + 1); - if ("x-dmaap-dr-routing".equalsIgnoreCase(header)) { - subid = headerValue.replaceAll("[^ ]*/+", ""); - feedid = deliveryTaskHelper.getFeedId(subid.replaceAll(" .*", "")); - } - if (length == 0 && header.toLowerCase().startsWith("content-")) { - continue; - } - if ("content-type".equalsIgnoreCase(header)) { - ctype = headerValue; - } - if ("x-onap-requestid".equalsIgnoreCase(header)) { - MDC.put(MDC_KEY_REQUEST_ID, headerValue); - } - if ("x-invocationid".equalsIgnoreCase(header)) { - MDC.put("InvocationId", headerValue); - headerValue = UUID.randomUUID().toString(); - newInvocationId = headerValue; - } - hdrv.add(new String[]{header, headerValue}); - } - } catch (Exception e) { - eelfLogger.error("Exception", e); - } - hdrs = hdrv.toArray(new String[hdrv.size()][]); - url = deliveryTaskHelper.getDestURL(fileid); - } - - /** - * Is the object a DeliveryTask with the same publication ID. - */ - public boolean equals(Object object) { - if (!(object instanceof DeliveryTask)) { - return (false); - } - return (pubid.equals(((DeliveryTask) object).pubid)); - } - - /** - * Compare the publication IDs. - */ - public int compareTo(DeliveryTask other) { - return (pubid.compareTo(other.pubid)); - } - - /** - * Get the hash code of the publication ID. - */ - public int hashCode() { - return (pubid.hashCode()); - } - - /** - * Return the publication ID. - */ - public String toString() { - return (pubid); - } - - /** - * Get the publish ID. - */ - String getPublishId() { - return (pubid); - } - - /** - * Attempt delivery. - */ - public void run() { - attempts++; - try { - destInfo = deliveryTaskHelper.getDestinationInfo(); - boolean monly = destInfo.isMetaDataOnly(); - length = 0; - if (!"DELETE".equals(method) && !monly) { - length = datafile.length(); - } - stripSuffixIfIsDecompress(); - url = deliveryTaskHelper.getDestURL(fileid); - URL urlObj = new URL(url); - HttpURLConnection urlConnection = (HttpURLConnection) urlObj.openConnection(); - urlConnection.setConnectTimeout(60000); - urlConnection.setReadTimeout(60000); - urlConnection.setInstanceFollowRedirects(false); - urlConnection.setRequestMethod(method); - urlConnection.setRequestProperty("Content-Length", Long.toString(length)); - urlConnection.setRequestProperty("Authorization", destInfo.getAuth()); - urlConnection.setRequestProperty("X-DMAAP-DR-PUBLISH-ID", pubid); - boolean expect100 = destInfo.isUsing100(); - int rc = deliverFileToSubscriber(expect100, urlConnection); - String rmsg = urlConnection.getResponseMessage(); - rmsg = getResponseMessage(urlConnection, rmsg); - String xpubid = null; - InputStream is; - if (rc >= 200 && rc <= 299) { - is = urlConnection.getInputStream(); - xpubid = urlConnection.getHeaderField("X-DMAAP-DR-PUBLISH-ID"); - } else { - if (rc >= 300 && rc <= 399) { - rmsg = urlConnection.getHeaderField("Location"); - } - is = urlConnection.getErrorStream(); - } - byte[] buf = new byte[4096]; - if (is != null) { - while (is.read(buf) > 0) { - //flush the buffer - } - is.close(); - } - deliveryTaskHelper.reportStatus(this, rc, xpubid, rmsg); - } catch (Exception e) { - eelfLogger.error("Exception " + Arrays.toString(e.getStackTrace()), e); - deliveryTaskHelper.reportException(this, e); - } - } - - /** - * To send decompressed gzip to the subscribers. - * - * @param httpURLConnection connection used to make request - */ - private void sendDecompressedFile(HttpURLConnection httpURLConnection) throws IOException { - byte[] buffer = new byte[8164]; - httpURLConnection.setRequestProperty(DECOMPRESSION_STATUS, "SUCCESS"); - OutputStream outputStream = getOutputStream(httpURLConnection); - if (outputStream != null) { - int bytesRead; - try (InputStream gzipInputStream = new GZIPInputStream(new FileInputStream(datafile))) { - int bufferLength = buffer.length; - while ((bytesRead = gzipInputStream.read(buffer, 0, bufferLength)) > 0) { - outputStream.write(buffer, 0, bytesRead); - } - outputStream.close(); - } catch (IOException e) { - httpURLConnection.setRequestProperty(DECOMPRESSION_STATUS, "FAILURE"); - eelfLogger.info("Could not decompress file", e); - sendFile(httpURLConnection); - } - - } - } - - /** - * To send any file to the subscriber. - * - * @param httpURLConnection connection used to make request - */ - private void sendFile(HttpURLConnection httpURLConnection) throws IOException { - OutputStream os = getOutputStream(httpURLConnection); - if (os == null) { - return; - } - long sofar = 0; - try (InputStream is = new FileInputStream(datafile)) { - byte[] buf = new byte[1024 * 1024]; - while (sofar < length) { - int len = buf.length; - if (sofar + len > length) { - len = (int) (length - sofar); - } - len = is.read(buf, 0, len); - if (len <= 0) { - throw new IOException("Unexpected problem reading data file " + datafile); - } - sofar += len; - os.write(buf, 0, len); - } - os.close(); - } catch (IOException ioe) { - deliveryTaskHelper.reportDeliveryExtra(this, sofar); - throw ioe; - } - } - - /** - * Get the outputstream that will be used to send data. - * - * @param httpURLConnection connection used to make request - * @return AN Outpustream that can be used to send your data. - */ - OutputStream getOutputStream(HttpURLConnection httpURLConnection) throws IOException { - OutputStream outputStream = null; - try { - outputStream = httpURLConnection.getOutputStream(); - } catch (ProtocolException pe) { - deliveryTaskHelper.reportDeliveryExtra(this, -1L); - // Rcvd error instead of 100-continue - eelfLogger.error("Exception " + Arrays.toString(pe.getStackTrace()), pe); - } - return outputStream; - } - - private void stripSuffixIfIsDecompress() { - if (destInfo.isDecompress() && isFiletypeGzip(datafile) && fileid.endsWith(".gz")) { - fileid = fileid.replace(".gz", ""); - } - } - - private int deliverFileToSubscriber(boolean expect100, HttpURLConnection uc) throws IOException { - for (String[] nv : hdrs) { - uc.addRequestProperty(nv[0], nv[1]); - } - if (length > 0) { - if (expect100) { - uc.setRequestProperty("Expect", "100-continue"); - } - uc.setDoOutput(true); - if (destInfo.isDecompress()) { - if (isFiletypeGzip(datafile)) { - sendDecompressedFile(uc); - } else { - uc.setRequestProperty(DECOMPRESSION_STATUS, "UNSUPPORTED_FORMAT"); - sendFile(uc); - } - } else { - sendFile(uc); - } - } - return uc.getResponseCode(); - } - - @Nullable - private String getResponseMessage(HttpURLConnection uc, String rmsg) { - if (rmsg == null) { - String h0 = uc.getHeaderField(0); - if (h0 != null) { - int indexOfSpace1 = h0.indexOf(' '); - int indexOfSpace2 = h0.indexOf(' ', indexOfSpace1 + 1); - if (indexOfSpace1 != -1 && indexOfSpace2 != -1) { - rmsg = h0.substring(indexOfSpace2 + 1); - } - } - } - return rmsg; - } - - /** - * Remove meta and data files. - */ - void clean() { - deleteWithRetry(datafile); - deleteWithRetry(metafile); - eelfLogger.info(EelfMsgs.INVOKE, newInvocationId); - eelfLogger.info(EelfMsgs.EXIT); - hdrs = null; - } - - private void deleteWithRetry(File file) { - int maxTries = 3; - int tryCount = 1; - while (tryCount <= maxTries) { - try { - Files.deleteIfExists(file.toPath()); - break; - } catch (IOException e) { - eelfLogger.error("IOException : Failed to delete file :" - + file.getName() + " on attempt " + tryCount, e); - } - tryCount++; - } - } - - /** - * Get the resume time for a delivery task. - */ - long getResumeTime() { - return resumeTime; - } - - /** - * Set the resume time for a delivery task. - */ - void setResumeTime(long resumeTime) { - this.resumeTime = resumeTime; - } - - /** - * Has this delivery task been cleaned. - */ - boolean isCleaned() { - return (hdrs == null); - } - - /** - * Get length of body. - */ - public long getLength() { - return (length); - } - - /** - * Get creation date as encoded in the publish ID. - */ - long getDate() { - return (date); - } - - /** - * Get the most recent delivery attempt URL. - */ - public String getURL() { - return (url); - } - - /** - * Get the content type. - */ - String getCType() { - return (ctype); - } - - /** - * Get the method. - */ - String getMethod() { - return (method); - } - - /** - * Get the file ID. - */ - String getFileId() { - return (fileid); - } - - /** - * Get the number of delivery attempts. - */ - int getAttempts() { - return (attempts); - } - - /** - * Get the (space delimited list of) subscription ID for this delivery task. - */ - String getSubId() { - return (subid); - } - - /** - * Get the feed ID for this delivery task. - */ - String getFeedId() { - return (feedid); - } - - /** - * Get the followRedirects for this delivery task. - */ - boolean getFollowRedirects() { - return (followRedirects); - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTaskHelper.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTaskHelper.java deleted file mode 100644 index b9068f2f..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTaskHelper.java +++ /dev/null @@ -1,84 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.node; - -/** - * Interface to allow independent testing of the DeliveryTask code. - * - *

This interface represents all the configuraiton information and feedback mechanisms that a delivery task needs. - */ - -public interface DeliveryTaskHelper { - - /** - * Report that a delivery attempt failed due to an exception (like can't connect to remote host). - * - * @param task The task that failed - * @param exception The exception that occurred - */ - void reportException(DeliveryTask task, Exception exception); - - /** - * Report that a delivery attempt completed (successfully or unsuccessfully). - * - * @param task The task that failed - * @param status The HTTP status - * @param xpubid The publish ID from the far end (if any) - * @param location The redirection location for a 3XX response - */ - void reportStatus(DeliveryTask task, int status, String xpubid, String location); - - /** - * Report that a delivery attempt either failed while sending data or that an error was returned instead of a 100 - * Continue. - * - * @param task The task that failed - * @param sent The number of bytes sent or -1 if an error was returned instead of 100 Continue. - */ - void reportDeliveryExtra(DeliveryTask task, long sent); - - /** - * Get the destination information for the delivery queue. - * - * @return The destination information - */ - DestInfo getDestinationInfo(); - - /** - * Given a file ID, get the URL to deliver to. - * - * @param fileid The file id - * @return The URL to deliver to - */ - String getDestURL(String fileid); - - /** - * Get the feed ID for a subscription. - * - * @param subid The subscription ID - * @return The feed iD - */ - String getFeedId(String subid); -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java index f5fa6e98..ac8c3186 100644 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java @@ -24,6 +24,8 @@ package org.onap.dmaap.datarouter.node; +import org.onap.dmaap.datarouter.node.config.NodeConfig; + /** * Information for a delivery destination that doesn't change from message to message. */ diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfoBuilder.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfoBuilder.java index 00c5cd8b..2b9db95b 100644 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfoBuilder.java +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfoBuilder.java @@ -66,7 +66,7 @@ public class DestInfoBuilder { return destInfoLogData; } - DestInfoBuilder setLogdata(String logdata) { + public DestInfoBuilder setLogdata(String logdata) { this.destInfoLogData = logdata; return this; } @@ -84,7 +84,7 @@ public class DestInfoBuilder { return destInfoAuthUser; } - DestInfoBuilder setAuthuser(String authuser) { + public DestInfoBuilder setAuthuser(String authuser) { this.destInfoAuthUser = authuser; return this; } @@ -93,7 +93,7 @@ public class DestInfoBuilder { return destInfoAuthentication; } - DestInfoBuilder setAuthentication(String authentication) { + public DestInfoBuilder setAuthentication(String authentication) { this.destInfoAuthentication = authentication; return this; } @@ -102,7 +102,7 @@ public class DestInfoBuilder { return destInfoMetaOnly; } - DestInfoBuilder setMetaonly(boolean metaonly) { + public DestInfoBuilder setMetaonly(boolean metaonly) { this.destInfoMetaOnly = metaonly; return this; } @@ -111,7 +111,7 @@ public class DestInfoBuilder { return destInfoUse100; } - DestInfoBuilder setUse100(boolean use100) { + public DestInfoBuilder setUse100(boolean use100) { this.destInfoUse100 = use100; return this; } @@ -120,7 +120,7 @@ public class DestInfoBuilder { return destInfoPrivilegedSubscriber; } - DestInfoBuilder setPrivilegedSubscriber(boolean privilegedSubscriber) { + public DestInfoBuilder setPrivilegedSubscriber(boolean privilegedSubscriber) { this.destInfoPrivilegedSubscriber = privilegedSubscriber; return this; } @@ -129,7 +129,7 @@ public class DestInfoBuilder { return destInfoFollowRedirects; } - DestInfoBuilder setFollowRedirects(boolean followRedirects) { + public DestInfoBuilder setFollowRedirects(boolean followRedirects) { this.destInfoFollowRedirects = followRedirects; return this; } @@ -138,12 +138,12 @@ public class DestInfoBuilder { return destInfoDecompress; } - DestInfoBuilder setDecompress(boolean decompress) { + public DestInfoBuilder setDecompress(boolean decompress) { this.destInfoDecompress = decompress; return this; } - DestInfo createDestInfo() { + public DestInfo createDestInfo() { return new DestInfo(this); } } \ No newline at end of file diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java deleted file mode 100644 index 4c7ea9c8..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java +++ /dev/null @@ -1,240 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - -package org.onap.dmaap.datarouter.node; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.io.FileWriter; -import java.io.IOException; -import java.io.Writer; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.Arrays; -import java.util.TimerTask; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.jetbrains.annotations.NotNull; - -/** - * Cleanup of old log files. - * - *

Periodically scan the log directory for log files that are older than the log file retention interval, and delete - * them. In a future release, This class will also be responsible for uploading events logs to the log server to - * support the log query APIs. - */ - -public class LogManager extends TimerTask { - - private static final String EXCEPTION = "Exception"; - private EELFLogger logger = EELFManager.getInstance().getLogger(LogManager.class); - private NodeConfigManager config; - private Matcher isnodelog; - private Matcher iseventlog; - private Uploader worker; - private String uploaddir; - private String logdir; - - /** - * Construct a log manager - * - *

The log manager will check for expired log files every 5 minutes at 20 seconds after the 5 minute boundary. - * (Actually, the interval is the event log rollover interval, which defaults to 5 minutes). - */ - public LogManager(NodeConfigManager config) { - this.config = config; - try { - isnodelog = Pattern.compile("node\\.log\\.\\d{8}").matcher(""); - iseventlog = Pattern.compile("events-\\d{12}\\.log").matcher(""); - } catch (Exception e) { - logger.error(EXCEPTION, e); - } - logdir = config.getLogDir(); - uploaddir = logdir + "/.spool"; - (new File(uploaddir)).mkdirs(); - long now = System.currentTimeMillis(); - long intvl = StatusLog.parseInterval(config.getEventLogInterval(), 30000); - long when = now - now % intvl + intvl + 20000L; - config.getTimer().scheduleAtFixedRate(this, when - now, intvl); - worker = new Uploader(); - } - - /** - * Trigger check for expired log files and log files to upload. - */ - public void run() { - worker.poke(); - } - - public Uploader getWorker() { - return worker; - } - - class Uploader extends Thread implements DeliveryQueueHelper { - - private static final String META = "/.meta"; - private EELFLogger logger = EELFManager.getInstance().getLogger(Uploader.class); - private DeliveryQueue dq; - - Uploader() { - dq = new DeliveryQueue(this, - new DestInfoBuilder().setName("LogUpload").setSpool(uploaddir).setSubid(null).setLogdata(null) - .setUrl(null).setAuthuser(config.getMyName()).setAuthentication(config.getMyAuth()) - .setMetaonly(false).setUse100(false).setPrivilegedSubscriber(false) - .setFollowRedirects(false) - .setDecompress(false).createDestInfo()); - setDaemon(true); - setName("Log Uploader"); - start(); - } - - public long getInitFailureTimer() { - return (10000L); - } - - public long getWaitForFileProcessFailureTimer() { - return (600000L); - } - - public double getFailureBackoff() { - return (2.0); - } - - public long getMaxFailureTimer() { - return (150000L); - } - - public long getExpirationTimer() { - return (604800000L); - } - - public int getFairFileLimit() { - return (10000); - } - - public long getFairTimeLimit() { - return (86400000); - } - - public String getDestURL(DestInfo destinationInfo, String fileid) { - return (config.getEventLogUrl()); - } - - public void handleUnreachable(DestInfo destinationInfo) { - throw new UnsupportedOperationException(); - } - - public boolean handleRedirection(DestInfo destinationInfo, String location, String fileid) { - return (false); - } - - public boolean isFollowRedirects() { - return (false); - } - - public String getFeedId(String subid) { - return (null); - } - - private synchronized void snooze() { - try { - wait(10000); - } catch (Exception e) { - logger.error(EXCEPTION, e); - } - } - - private synchronized void poke() { - notifyAll(); - } - - @Override - public void run() { - while (true) { - scan(); - dq.run(); - snooze(); - } - } - - private void scan() { - long threshold = System.currentTimeMillis() - config.getLogRetention(); - File dir = new File(logdir); - String[] fns = dir.list(); - Arrays.sort(fns); - String lastqueued = "events-000000000000.log"; - String curlog = StatusLog.getCurLogFile(); - curlog = curlog.substring(curlog.lastIndexOf('/') + 1); - try { - Writer writer = new FileWriter(uploaddir + META); - writer.write("POST\tlogdata\nContent-Type\ttext/plain\n"); - writer.close(); - BufferedReader br = new BufferedReader(new FileReader(uploaddir + "/.lastqueued")); - lastqueued = br.readLine(); - br.close(); - } catch (Exception e) { - logger.error(EXCEPTION, e); - } - for (String fn : fns) { - if (!isnodelog.reset(fn).matches()) { - if (!iseventlog.reset(fn).matches()) { - continue; - } - lastqueued = setLastQueued(lastqueued, curlog, fn); - } - File file = new File(dir, fn); - if (file.lastModified() < threshold) { - try { - Files.deleteIfExists(file.toPath()); - } catch (IOException e) { - logger.error("Failed to delete file: " + file.getPath(), e); - } - } - } - try (Writer w = new FileWriter(uploaddir + "/.lastqueued")) { - Files.deleteIfExists(new File(uploaddir + META).toPath()); - w.write(lastqueued + "\n"); - } catch (Exception e) { - logger.error(EXCEPTION, e); - } - } - - @NotNull - private String setLastQueued(String lastqueued, String curlog, String fn) { - if (lastqueued.compareTo(fn) < 0 && curlog.compareTo(fn) > 0) { - lastqueued = fn; - try { - String pid = config.getPublishId(); - Files.createLink(Paths.get(uploaddir + "/" + pid), Paths.get(logdir + "/" + fn)); - Files.createLink(Paths.get(uploaddir + "/" + pid + ".M"), Paths.get(uploaddir + META)); - } catch (Exception e) { - logger.error(EXCEPTION, e); - } - } - return lastqueued; - } - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeAafPropsUtils.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeAafPropsUtils.java deleted file mode 100644 index ec49807e..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeAafPropsUtils.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2019 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.dmaap.datarouter.node; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import org.onap.aaf.cadi.PropAccess; - -class NodeAafPropsUtils { - - private static final EELFLogger eelfLogger = EELFManager.getInstance().getLogger(NodeAafPropsUtils.class); - private final PropAccess propAccess; - - NodeAafPropsUtils(File propsFile) throws IOException { - propAccess = new PropAccess(); - try { - propAccess.load(new FileInputStream(propsFile.getPath())); - } catch (IOException e) { - eelfLogger.error("Failed to load props file: " + propsFile + "\n" + e.getMessage(), e); - throw e; - } - } - - String getDecryptedPass(String password) { - String decryptedPass = ""; - try { - decryptedPass = propAccess.decrypt(propAccess.getProperty(password), false); - } catch (IOException e) { - eelfLogger.error("Failed to decrypt " + password + " : " + e.getMessage(), e); - } - return decryptedPass; - } - - PropAccess getPropAccess() { - return propAccess; - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java deleted file mode 100644 index 127668ff..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java +++ /dev/null @@ -1,1010 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.node; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import org.jetbrains.annotations.NotNull; - -/** - * Processed configuration for this node. - * - *

The NodeConfig represents a processed configuration from the Data Router provisioning server. Each time - * configuration data is received from the provisioning server, a new NodeConfig is created and the previous one - * discarded. - */ -public class NodeConfig { - - private static final String PUBLISHER_NOT_PERMITTED = "Publisher not permitted for this feed"; - private static EELFLogger logger = EELFManager.getInstance().getLogger(NodeConfig.class); - private HashMap params = new HashMap<>(); - private HashMap feeds = new HashMap<>(); - private HashMap nodeinfo = new HashMap<>(); - private HashMap subinfo = new HashMap<>(); - private HashMap nodes = new HashMap<>(); - private HashMap provSubscriptions = new HashMap<>(); - private String myname; - private String myauth; - private DestInfo[] alldests; - private int rrcntr; - - /** - * Process the raw provisioning data to configure this node. - * - * @param pd The parsed provisioning data - * @param myname My name as seen by external systems - * @param spooldir The directory where temporary files live - * @param port The port number for URLs - * @param nodeauthkey The keying string used to generate node authentication credentials - */ - public NodeConfig(ProvData pd, String myname, String spooldir, int port, String nodeauthkey) { - this.myname = myname; - for (ProvParam p : pd.getParams()) { - params.put(p.getName(), p.getValue()); - } - ArrayList destInfos = addDestInfoToNodeConfig(pd, myname, spooldir, port, nodeauthkey); - PathFinder pf = new PathFinder(myname, nodeinfo.keySet().toArray(new String[0]), pd.getHops()); - HashMap> rdtab = addSubRedirInfoToNodeConfig(pd); - HashMap> pfutab = addFeedUsersToNodeConfig(pd); - HashMap egrtab = addEgressRoutesToNodeConfig(pd, myname); - HashMap> pfstab = addFeedSubnetToNodeConfig(pd); - HashSet allfeeds = addFeedsToNodeConfig(pd); - HashMap feedTargets = addSubsToNodeConfig(pd, spooldir, destInfos, pf, egrtab, allfeeds); - alldests = destInfos.toArray(new DestInfo[0]); - addFeedTargetsToNodeConfig(pd, rdtab, pfutab, pfstab, feedTargets); - } - - @NotNull - private ArrayList addDestInfoToNodeConfig(ProvData pd, String myname, String spooldir, int port, - String nodeauthkey) { - ArrayList destInfos = new ArrayList<>(); - myauth = NodeUtils.getNodeAuthHdr(myname, nodeauthkey); - for (ProvNode pn : pd.getNodes()) { - String commonName = pn.getCName(); - if (nodeinfo.get(commonName) != null) { - continue; - } - DestInfo di = new DestInfoBuilder().setName("n:" + commonName).setSpool(spooldir + "/n/" + commonName) - .setSubid(null) - .setLogdata("n2n-" + commonName).setUrl("https://" + commonName + ":" + port + "/internal/publish") - .setAuthuser(commonName).setAuthentication(myauth).setMetaonly(false).setUse100(true) - .setPrivilegedSubscriber(false).setFollowRedirects(false).setDecompress(false).createDestInfo(); - (new File(di.getSpool())).mkdirs(); - String auth = NodeUtils.getNodeAuthHdr(commonName, nodeauthkey); - destInfos.add(di); - nodeinfo.put(commonName, di); - nodes.put(auth, new IsFrom(commonName)); - } - return destInfos; - } - - @NotNull - private HashMap> addSubRedirInfoToNodeConfig(ProvData pd) { - HashMap> rdtab = new HashMap<>(); - for (ProvForceIngress pfi : pd.getForceIngress()) { - ArrayList redirections = rdtab.get(pfi.getFeedId()); - if (redirections == null) { - redirections = new ArrayList<>(); - rdtab.put(pfi.getFeedId(), redirections); - } - Redirection redirection = new Redirection(); - if (pfi.getSubnet() != null) { - redirection.snm = new SubnetMatcher(pfi.getSubnet()); - } - redirection.user = pfi.getUser(); - redirection.nodes = pfi.getNodes(); - redirections.add(redirection); - } - return rdtab; - } - - @NotNull - private HashMap> addFeedUsersToNodeConfig(ProvData pd) { - HashMap> pfutab = new HashMap<>(); - for (ProvFeedUser pfu : pd.getFeedUsers()) { - HashMap userInfo = pfutab.get(pfu.getFeedId()); - if (userInfo == null) { - userInfo = new HashMap<>(); - pfutab.put(pfu.getFeedId(), userInfo); - } - userInfo.put(pfu.getCredentials(), pfu.getUser()); - } - return pfutab; - } - - @NotNull - private HashMap addEgressRoutesToNodeConfig(ProvData pd, String myname) { - HashMap egrtab = new HashMap<>(); - for (ProvForceEgress pfe : pd.getForceEgress()) { - if (pfe.getNode().equals(myname) || nodeinfo.get(pfe.getNode()) == null) { - continue; - } - egrtab.put(pfe.getSubId(), pfe.getNode()); - } - return egrtab; - } - - @NotNull - private HashMap> addFeedSubnetToNodeConfig(ProvData pd) { - HashMap> pfstab = new HashMap<>(); - for (ProvFeedSubnet pfs : pd.getFeedSubnets()) { - ArrayList subnetMatchers = pfstab.get(pfs.getFeedId()); - if (subnetMatchers == null) { - subnetMatchers = new ArrayList<>(); - pfstab.put(pfs.getFeedId(), subnetMatchers); - } - subnetMatchers.add(new SubnetMatcher(pfs.getCidr())); - } - return pfstab; - } - - @NotNull - private HashSet addFeedsToNodeConfig(ProvData pd) { - HashSet allfeeds = new HashSet<>(); - for (ProvFeed pfx : pd.getFeeds()) { - if (pfx.getStatus() == null) { - allfeeds.add(pfx.getId()); - } - } - return allfeeds; - } - - @NotNull - private HashMap addSubsToNodeConfig(ProvData pd, String spooldir, - ArrayList destInfos, PathFinder pf, HashMap egrtab, HashSet allfeeds) { - HashMap feedTargets = new HashMap<>(); - for (ProvSubscription provSubscription : pd.getSubscriptions()) { - String subId = provSubscription.getSubId(); - String feedId = provSubscription.getFeedId(); - if (isFeedOrSubKnown(allfeeds, subId, feedId)) { - continue; - } - int sididx = 999; - try { - sididx = Integer.parseInt(subId); - sididx -= sididx % 100; - } catch (Exception e) { - logger.error("NODE0517 Exception NodeConfig: " + e); - } - String subscriptionDirectory = sididx + "/" + subId; - DestInfo destinationInfo = new DestInfo("s:" + subId, - spooldir + "/s/" + subscriptionDirectory, provSubscription); - (new File(destinationInfo.getSpool())).mkdirs(); - destInfos.add(destinationInfo); - provSubscriptions.put(subId, provSubscription); - subinfo.put(subId, destinationInfo); - String egr = egrtab.get(subId); - if (egr != null) { - subId = pf.getPath(egr) + subId; - } - StringBuilder sb = feedTargets.get(feedId); - if (sb == null) { - sb = new StringBuilder(); - feedTargets.put(feedId, sb); - } - sb.append(' ').append(subId); - } - return feedTargets; - } - - private void addFeedTargetsToNodeConfig(ProvData pd, HashMap> rdtab, - HashMap> pfutab, HashMap> pfstab, - HashMap feedTargets) { - for (ProvFeed pfx : pd.getFeeds()) { - String fid = pfx.getId(); - Feed feed = feeds.get(fid); - if (feed != null) { - continue; - } - feed = new Feed(); - feeds.put(fid, feed); - feed.createdDate = pfx.getCreatedDate(); - feed.loginfo = pfx.getLogData(); - feed.status = pfx.getStatus(); - /* - * AAF changes: TDP EPIC US# 307413 - * Passing aafInstance from ProvFeed to identify legacy/AAF feeds - */ - feed.aafInstance = pfx.getAafInstance(); - ArrayList v1 = pfstab.get(fid); - if (v1 == null) { - feed.subnets = new SubnetMatcher[0]; - } else { - feed.subnets = v1.toArray(new SubnetMatcher[0]); - } - HashMap h1 = pfutab.get(fid); - if (h1 == null) { - h1 = new HashMap(); - } - feed.authusers = h1; - ArrayList v2 = rdtab.get(fid); - if (v2 == null) { - feed.redirections = new Redirection[0]; - } else { - feed.redirections = v2.toArray(new Redirection[0]); - } - StringBuilder sb = feedTargets.get(fid); - if (sb == null) { - feed.targets = new Target[0]; - } else { - feed.targets = parseRouting(sb.toString()); - } - } - } - - /** - * Parse a target string into an array of targets. - * - * @param routing Target string - * @return Array of targets. - */ - public Target[] parseRouting(String routing) { - routing = routing.trim(); - if ("".equals(routing)) { - return (new Target[0]); - } - String[] routingTable = routing.split("\\s+"); - HashMap tmap = new HashMap<>(); - HashSet subset = new HashSet<>(); - ArrayList targets = new ArrayList<>(); - for (int i = 0; i < routingTable.length; i++) { - String target = routingTable[i]; - int index = target.indexOf('/'); - if (index == -1) { - addTarget(subset, targets, target); - } else { - addTargetWithRouting(tmap, targets, target, index); - } - } - return (targets.toArray(new Target[0])); - } - - /** - * Check whether this is a valid node-to-node transfer. - * - * @param credentials Credentials offered by the supposed node - * @param ip IP address the request came from - */ - public boolean isAnotherNode(String credentials, String ip) { - IsFrom node = nodes.get(credentials); - return (node != null && node.isFrom(ip)); - } - - /** - * Check whether publication is allowed. - * - * @param feedid The ID of the feed being requested. - * @param credentials The offered credentials - * @param ip The requesting IP address - */ - public String isPublishPermitted(String feedid, String credentials, String ip) { - Feed feed = feeds.get(feedid); - String nf = "Feed does not exist"; - if (feed != null) { - nf = feed.status; - } - if (nf != null) { - return (nf); - } - String user = feed.authusers.get(credentials); - if (user == null) { - return (PUBLISHER_NOT_PERMITTED); - } - if (feed.subnets.length == 0) { - return (null); - } - byte[] addr = NodeUtils.getInetAddress(ip); - for (SubnetMatcher snm : feed.subnets) { - if (snm.matches(addr)) { - return (null); - } - } - return (PUBLISHER_NOT_PERMITTED); - } - - /** - * Check whether publication is allowed for AAF Feed. - * - * @param feedid The ID of the feed being requested. - * @param ip The requesting IP address - */ - public String isPublishPermitted(String feedid, String ip) { - Feed feed = feeds.get(feedid); - String nf = "Feed does not exist"; - if (feed != null) { - nf = feed.status; - } - if (nf != null) { - return nf; - } - if (feed.subnets.length == 0) { - return null; - } - byte[] addr = NodeUtils.getInetAddress(ip); - for (SubnetMatcher snm : feed.subnets) { - if (snm.matches(addr)) { - return null; - } - } - return PUBLISHER_NOT_PERMITTED; - } - - /** - * Check whether delete file is allowed. - * - * @param subId The ID of the subscription being requested. - */ - public boolean isDeletePermitted(String subId) { - ProvSubscription provSubscription = provSubscriptions.get(subId); - return provSubscription.isPrivilegedSubscriber(); - } - - /** - * Get authenticated user. - */ - public String getAuthUser(String feedid, String credentials) { - return (feeds.get(feedid).authusers.get(credentials)); - } - - /** - * AAF changes: TDP EPIC US# 307413 Check AAF_instance for feed ID. - * - * @param feedid The ID of the feed specified - */ - public String getAafInstance(String feedid) { - Feed feed = feeds.get(feedid); - return feed.aafInstance; - } - - /** - * Check if the request should be redirected to a different ingress node. - */ - public String getIngressNode(String feedid, String user, String ip) { - Feed feed = feeds.get(feedid); - if (feed.redirections.length == 0) { - return (null); - } - byte[] addr = NodeUtils.getInetAddress(ip); - for (Redirection r : feed.redirections) { - if ((r.user != null && !user.equals(r.user)) || (r.snm != null && !r.snm.matches(addr))) { - continue; - } - for (String n : r.nodes) { - if (myname.equals(n)) { - return (null); - } - } - if (r.nodes.length == 0) { - return (null); - } - return (r.nodes[rrcntr++ % r.nodes.length]); - } - return (null); - } - - /** - * Get a provisioned configuration parameter. - */ - public String getProvParam(String name) { - return (params.get(name)); - } - - /** - * Get all the DestInfos. - */ - public DestInfo[] getAllDests() { - return (alldests); - } - - /** - * Get the targets for a feed. - * - * @param feedid The feed ID - * @return The targets this feed should be delivered to - */ - public Target[] getTargets(String feedid) { - if (feedid == null) { - return (new Target[0]); - } - Feed feed = feeds.get(feedid); - if (feed == null) { - return (new Target[0]); - } - return (feed.targets); - } - - /** - * Get the creation date for a feed. - * - * @param feedid The feed ID - * @return the timestamp of creation date of feed id passed - */ - public String getCreatedDate(String feedid) { - Feed feed = feeds.get(feedid); - return (feed.createdDate); - } - - /** - * Get the feed ID for a subscription. - * - * @param subid The subscription ID - * @return The feed ID - */ - public String getFeedId(String subid) { - DestInfo di = subinfo.get(subid); - if (di == null) { - return (null); - } - return (di.getLogData()); - } - - /** - * Get the spool directory for a subscription. - * - * @param subid The subscription ID - * @return The spool directory - */ - public String getSpoolDir(String subid) { - DestInfo di = subinfo.get(subid); - if (di == null) { - return (null); - } - return (di.getSpool()); - } - - /** - * Get the Authorization value this node uses. - * - * @return The Authorization header value for this node - */ - public String getMyAuth() { - return (myauth); - } - - private boolean isFeedOrSubKnown(HashSet allfeeds, String subId, String feedId) { - return !allfeeds.contains(feedId) || subinfo.get(subId) != null; - } - - private void addTargetWithRouting(HashMap tmap, ArrayList targets, String target, - int index) { - String node = target.substring(0, index); - String rtg = target.substring(index + 1); - DestInfo di = nodeinfo.get(node); - if (di == null) { - targets.add(new Target(null, target)); - } else { - Target tt = tmap.get(node); - if (tt == null) { - tt = new Target(di, rtg); - tmap.put(node, tt); - targets.add(tt); - } else { - tt.addRouting(rtg); - } - } - } - - private void addTarget(HashSet subset, ArrayList targets, String target) { - DestInfo destInfo = subinfo.get(target); - if (destInfo == null) { - targets.add(new Target(null, target)); - } else { - if (!subset.contains(target)) { - subset.add(target); - targets.add(new Target(destInfo, null)); - } - } - } - - /** - * Raw configuration entry for a data router node. - */ - public static class ProvNode { - - private String cname; - - /** - * Construct a node configuration entry. - * - * @param cname The cname of the node. - */ - public ProvNode(String cname) { - this.cname = cname; - } - - /** - * Get the cname of the node. - */ - public String getCName() { - return (cname); - } - } - - /** - * Raw configuration entry for a provisioning parameter. - */ - public static class ProvParam { - - private String name; - private String value; - - /** - * Construct a provisioning parameter configuration entry. - * - * @param name The name of the parameter. - * @param value The value of the parameter. - */ - public ProvParam(String name, String value) { - this.name = name; - this.value = value; - } - - /** - * Get the name of the parameter. - */ - public String getName() { - return (name); - } - - /** - * Get the value of the parameter. - */ - public String getValue() { - return (value); - } - } - - /** - * Raw configuration entry for a data feed. - */ - public static class ProvFeed { - - private String id; - private String logdata; - private String status; - private String createdDate; - /* - * AAF changes: TDP EPIC US# 307413 - * Passing aafInstance from to identify legacy/AAF feeds - */ - private String aafInstance; - - /** - * Construct a feed configuration entry. - * - * @param id The feed ID of the entry. - * @param logdata String for log entries about the entry. - * @param status The reason why this feed cannot be used (Feed has been deleted, Feed has been suspended) or - * null if it is valid. - */ - public ProvFeed(String id, String logdata, String status, String createdDate, String aafInstance) { - this.id = id; - this.logdata = logdata; - this.status = status; - this.createdDate = createdDate; - this.aafInstance = aafInstance; - } - - /** - * Get the created date of the data feed. - */ - public String getCreatedDate() { - return (createdDate); - } - - /** - * Get the aafInstance of the data feed. - */ - public String getAafInstance() { - return aafInstance; - } - - /** - * Get the feed id of the data feed. - */ - public String getId() { - return (id); - } - - /** - * Get the log data of the data feed. - */ - public String getLogData() { - return (logdata); - } - - /** - * Get the status of the data feed. - */ - public String getStatus() { - return (status); - } - } - - /** - * Raw configuration entry for a feed user. - */ - public static class ProvFeedUser { - - private String feedid; - private String user; - private String credentials; - - /** - * Construct a feed user configuration entry. - * - * @param feedid The feed id. - * @param user The user that will publish to the feed. - * @param credentials The Authorization header the user will use to publish. - */ - public ProvFeedUser(String feedid, String user, String credentials) { - this.feedid = feedid; - this.user = user; - this.credentials = credentials; - } - - /** - * Get the feed id of the feed user. - */ - public String getFeedId() { - return (feedid); - } - - /** - * Get the user for the feed user. - */ - public String getUser() { - return (user); - } - - /** - * Get the credentials for the feed user. - */ - public String getCredentials() { - return (credentials); - } - } - - /** - * Raw configuration entry for a feed subnet. - */ - public static class ProvFeedSubnet { - - private String feedid; - private String cidr; - - /** - * Construct a feed subnet configuration entry. - * - * @param feedid The feed ID - * @param cidr The CIDR allowed to publish to the feed. - */ - public ProvFeedSubnet(String feedid, String cidr) { - this.feedid = feedid; - this.cidr = cidr; - } - - /** - * Get the feed id of the feed subnet. - */ - public String getFeedId() { - return (feedid); - } - - /** - * Get the CIDR of the feed subnet. - */ - public String getCidr() { - return (cidr); - } - } - - /** - * Raw configuration entry for a subscription. - */ - public static class ProvSubscription { - - private String subid; - private String feedid; - private String url; - private String authuser; - private String credentials; - private boolean metaonly; - private boolean use100; - private boolean privilegedSubscriber; - private boolean followRedirect; - private boolean decompress; - - /** - * Construct a subscription configuration entry. - * - * @param subid The subscription ID - * @param feedid The feed ID - * @param url The base delivery URL (not including the fileid) - * @param authuser The user in the credentials used to deliver - * @param credentials The credentials used to authenticate to the delivery URL exactly as they go in the - * Authorization header. - * @param metaonly Is this a meta data only subscription? - * @param use100 Should we send Expect: 100-continue? - * @param privilegedSubscriber Can we wait to receive a delete file call before deleting file - * @param followRedirect Is follow redirect of destination enabled? - * @param decompress To see if they want their information compressed or decompressed - */ - public ProvSubscription(String subid, String feedid, String url, String authuser, String credentials, - boolean metaonly, boolean use100, boolean privilegedSubscriber, boolean followRedirect, - boolean decompress) { - this.subid = subid; - this.feedid = feedid; - this.url = url; - this.authuser = authuser; - this.credentials = credentials; - this.metaonly = metaonly; - this.use100 = use100; - this.privilegedSubscriber = privilegedSubscriber; - this.followRedirect = followRedirect; - this.decompress = decompress; - } - - /** - * Get the subscription ID. - */ - public String getSubId() { - return (subid); - } - - /** - * Get the feed ID. - */ - public String getFeedId() { - return (feedid); - } - - /** - * Get the delivery URL. - */ - public String getURL() { - return (url); - } - - /** - * Get the user. - */ - public String getAuthUser() { - return (authuser); - } - - /** - * Get the delivery credentials. - */ - public String getCredentials() { - return (credentials); - } - - /** - * Is this a meta data only subscription. - */ - public boolean isMetaDataOnly() { - return (metaonly); - } - - /** - * Should we send Expect: 100-continue. - */ - public boolean isUsing100() { - return (use100); - } - - /** - * Can we wait to receive a delete file call before deleting file. - */ - public boolean isPrivilegedSubscriber() { - return (privilegedSubscriber); - } - - /** - * Should I decompress the file before sending it on. - */ - public boolean isDecompress() { - return (decompress); - } - - /** - * New field is added - FOLLOW_REDIRECTS feature iTrack:DATARTR-17 - 1706 Get the followRedirect of this - * destination. - */ - boolean getFollowRedirect() { - return (followRedirect); - } - } - - /** - * Raw configuration entry for controlled ingress to the data router node. - */ - public static class ProvForceIngress { - - private String feedid; - private String subnet; - private String user; - private String[] nodes; - - /** - * Construct a forced ingress configuration entry. - * - * @param feedid The feed ID that this entry applies to - * @param subnet The CIDR for which publisher IP addresses this entry applies to or "" if it applies to all - * publisher IP addresses - * @param user The publishing user this entry applies to or "" if it applies to all publishing users. - * @param nodes The array of FQDNs of the data router nodes to redirect publication attempts to. - */ - public ProvForceIngress(String feedid, String subnet, String user, String[] nodes) { - this.feedid = feedid; - this.subnet = subnet; - this.user = user; - //Sonar fix - if (nodes == null) { - this.nodes = new String[0]; - } else { - this.nodes = Arrays.copyOf(nodes, nodes.length); - } - } - - /** - * Get the feed ID. - */ - public String getFeedId() { - return (feedid); - } - - /** - * Get the subnet. - */ - public String getSubnet() { - return (subnet); - } - - /** - * Get the user. - */ - public String getUser() { - return (user); - } - - /** - * Get the node. - */ - public String[] getNodes() { - return (nodes); - } - } - - /** - * Raw configuration entry for controlled egress from the data router. - */ - public static class ProvForceEgress { - - private String subid; - private String node; - - /** - * Construct a forced egress configuration entry. - * - * @param subid The subscription ID the subscription with forced egress - * @param node The node handling deliveries for this subscription - */ - public ProvForceEgress(String subid, String node) { - this.subid = subid; - this.node = node; - } - - /** - * Get the subscription ID. - */ - public String getSubId() { - return (subid); - } - - /** - * Get the node. - */ - public String getNode() { - return (node); - } - } - - /** - * Raw configuration entry for routing within the data router network. - */ - public static class ProvHop { - - private String from; - private String to; - private String via; - - /** - * Construct a hop entry. - * - * @param from The FQDN of the node with the data to be delivered - * @param to The FQDN of the node that will deliver to the subscriber - * @param via The FQDN of the node where the from node should send the data - */ - public ProvHop(String from, String to, String via) { - this.from = from; - this.to = to; - this.via = via; - } - - /** - * A human readable description of this entry. - */ - public String toString() { - return ("Hop " + from + "->" + to + " via " + via); - } - - /** - * Get the from node. - */ - public String getFrom() { - return (from); - } - - /** - * Get the to node. - */ - public String getTo() { - return (to); - } - - /** - * Get the next intermediate node. - */ - public String getVia() { - return (via); - } - } - - private static class Redirection { - - SubnetMatcher snm; - String user; - String[] nodes; - } - - private static class Feed { - - String loginfo; - String status; - SubnetMatcher[] subnets; - HashMap authusers = new HashMap<>(); - Redirection[] redirections; - Target[] targets; - String createdDate; - String aafInstance; - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java index 1debcf63..d02bedb2 100644 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java @@ -36,10 +36,17 @@ import java.io.InputStreamReader; import java.io.Reader; import java.net.URL; import java.nio.file.Files; +import java.util.HashSet; +import java.util.Iterator; import java.util.Objects; import java.util.Properties; import java.util.Timer; +import org.onap.dmaap.datarouter.node.config.NodeConfig; +import org.onap.dmaap.datarouter.node.config.ProvData; +import org.onap.dmaap.datarouter.node.delivery.DeliveryQueueHelper; import org.onap.dmaap.datarouter.node.eelf.EelfMsgs; +import org.onap.dmaap.datarouter.node.utils.NodeTlsManager; +import org.onap.dmaap.datarouter.node.utils.NodeUtils; /** @@ -56,8 +63,6 @@ public class NodeConfigManager implements DeliveryQueueHelper { private static final String NODE_CONFIG_MANAGER = "NodeConfigManager"; private static final EELFLogger eelfLogger = EELFManager.getInstance().getLogger(NodeConfigManager.class); - private static NodeConfigManager base; - private long maxfailuretimer; private long initfailuretimer; private long waitForFileProcessFailureTimer; @@ -73,19 +78,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { private final int intHttpPort; private final int intHttpsPort; private final int extHttpsPort; - private String[] enabledprotocols; - private final boolean cadiEnabled; - private String aafType; - private String aafInstance; - private String aafAction; private final boolean tlsEnabled; - private String kstype; - private String ksfile; - private String kspass; - private String kpass; - private String tstype; - private String tsfile; - private String tspass; private String myname; private final String nak; private final File quiesce; @@ -103,8 +96,9 @@ public class NodeConfigManager implements DeliveryQueueHelper { private final RedirManager rdmgr; private final Timer timer = new Timer("Node Configuration Timer", true); private final RateLimitedOperation pfetcher; - private NodeConfig config; - private NodeAafPropsUtils nodeAafPropsUtils; + private static NodeConfigManager base; + private static NodeTlsManager nodeTlsManager; + private NodeConfig nodeConfig; private static Properties drNodeProperties; public static Properties getDrNodeProperties() { @@ -135,42 +129,24 @@ public class NodeConfigManager implements DeliveryQueueHelper { } eelfLogger.debug("NODE0303 Provisioning server is at: " + provhost); provcheck = new IsFrom(provhost); - - cadiEnabled = Boolean.parseBoolean(getDrNodeProperties().getProperty("CadiEnabled", "false")); - if (cadiEnabled) { - aafType = getDrNodeProperties().getProperty("AAFType", "org.onap.dmaap-dr.feed"); - aafInstance = getDrNodeProperties().getProperty("AAFInstance", "legacy"); - aafAction = getDrNodeProperties().getProperty("AAFAction", "publish"); - } tlsEnabled = Boolean.parseBoolean(getDrNodeProperties().getProperty("TlsEnabled", "true")); if (isTlsEnabled()) { try { - kstype = getDrNodeProperties().getProperty("KeyStoreType", "PKCS12"); - tstype = getDrNodeProperties().getProperty("TrustStoreType", "jks"); - enabledprotocols = ((getDrNodeProperties().getProperty("NodeHttpsProtocols")).trim()).split("\\|"); - nodeAafPropsUtils = new NodeAafPropsUtils(new File(getDrNodeProperties() - .getProperty("AAFPropsFilePath", "/opt/app/osaaf/local/org.onap.dmaap-dr.props"))); - getSslContextData(); - if (tsfile != null && tsfile.length() > 0) { - System.setProperty("javax.net.ssl.trustStoreType", tstype); - System.setProperty("javax.net.ssl.trustStore", tsfile); - System.setProperty("javax.net.ssl.trustStorePassword", tspass); - } - myname = NodeUtils.getCanonicalName(kstype, ksfile, kspass); + nodeTlsManager = new NodeTlsManager(getDrNodeProperties()); + myname = nodeTlsManager.getMyNameFromCertificate(); if (myname == null) { NodeUtils.setIpAndFqdnForEelf(NODE_CONFIG_MANAGER); - eelfLogger.error(EelfMsgs.MESSAGE_KEYSTORE_FETCH_ERROR, ksfile); - eelfLogger.error("NODE0309 Unable to fetch canonical name from keystore file " + ksfile); + eelfLogger.error(EelfMsgs.MESSAGE_KEYSTORE_FETCH_ERROR, nodeTlsManager.getKeyStorefile()); + eelfLogger.error("NODE0309 Unable to fetch canonical name from keystore file {}", nodeTlsManager.getKeyStorefile()); exit(1); } - eelfLogger.debug("NODE0304 My certificate says my name is " + myname); + eelfLogger.debug("NODE0304 My certificate says my name is {}", myname); } catch (Exception e) { eelfLogger.error("NODE0314 Failed to load AAF props. Exiting", e); exit(1); } } myname = "dmaap-dr-node"; - eventlogurl = getDrNodeProperties().getProperty("LogUploadURL", "https://feeds-drtr.web.att.com/internal/logs"); intHttpPort = Integer.parseInt(getDrNodeProperties().getProperty("IntHttpPort", "80")); intHttpsPort = Integer.parseInt(getDrNodeProperties().getProperty("IntHttpsPort", "443")); @@ -200,21 +176,13 @@ public class NodeConfigManager implements DeliveryQueueHelper { pfetcher = new RateLimitedOperation( Long.parseLong(getDrNodeProperties().getProperty("MinProvFetchInterval", "10000")), timer) { public void run() { - fetchconfig(); + fetchNodeConfigFromProv(); } }; eelfLogger.debug("NODE0305 Attempting to fetch configuration at " + provurl); pfetcher.request(); } - private void getSslContextData() { - ksfile = nodeAafPropsUtils.getPropAccess().getProperty("cadi_keystore"); - kspass = nodeAafPropsUtils.getDecryptedPass("cadi_keystore_password"); - kpass = nodeAafPropsUtils.getDecryptedPass("cadi_keystore_password"); - tsfile = nodeAafPropsUtils.getPropAccess().getProperty("cadi_truststore"); - tspass = nodeAafPropsUtils.getDecryptedPass("cadi_truststore_password"); - } - /** * Get the default node configuration manager. */ @@ -303,19 +271,19 @@ public class NodeConfigManager implements DeliveryQueueHelper { } } - private void fetchconfig() { + private void fetchNodeConfigFromProv() { try { - eelfLogger.debug("NodeConfigMan.fetchConfig: provurl:: " + provurl); + eelfLogger.debug("NodeConfigMan.fetchNodeConfigFromProv: provurl:: {}", provurl); URL url = new URL(provurl); Reader reader = new InputStreamReader(url.openStream()); - config = new NodeConfig(new ProvData(reader), myname, spooldir, extHttpsPort, nak); + nodeConfig = new NodeConfig(new ProvData(reader), myname, spooldir, extHttpsPort, nak); localconfig(); configtasks.startRun(); runTasks(); } catch (Exception e) { - NodeUtils.setIpAndFqdnForEelf("fetchconfigs"); + NodeUtils.setIpAndFqdnForEelf("fetchNodeConfigFromProv"); eelfLogger.error(EelfMsgs.MESSAGE_CONF_FAILED, e.toString()); - eelfLogger.error("NODE0306 Configuration failed " + e + " - try again later", e); + eelfLogger.error("NODE0306 Configuration failed {} - try again later", e); pfetcher.request(); } } @@ -348,8 +316,8 @@ public class NodeConfigManager implements DeliveryQueueHelper { /** * Am I configured. */ - boolean isConfigured() { - return config != null; + public boolean isConfigured() { + return nodeConfig != null; } /** @@ -366,7 +334,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { * @return array of targets */ Target[] parseRouting(String routing) { - return config.parseRouting(routing); + return nodeConfig.parseRouting(routing); } /** @@ -377,7 +345,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { * @return If the credentials and IP address are recognized, true, otherwise false. */ boolean isAnotherNode(String credentials, String ip) { - return config.isAnotherNode(credentials, ip); + return nodeConfig.isAnotherNode(credentials, ip); } /** @@ -389,18 +357,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { * @return True if the IP and credentials are valid for the specified feed. */ String isPublishPermitted(String feedid, String credentials, String ip) { - return config.isPublishPermitted(feedid, credentials, ip); - } - - /** - * Check whether publication is allowed for AAF Feed. - * - * @param feedid The ID of the feed being requested - * @param ip The requesting IP address - * @return True if the IP and credentials are valid for the specified feed. - */ - String isPublishPermitted(String feedid, String ip) { - return config.isPublishPermitted(feedid, ip); + return nodeConfig.isPublishPermitted(feedid, credentials, ip); } /** @@ -410,7 +367,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { * @return True if the delete file is permitted for the subscriber. */ boolean isDeletePermitted(String subId) { - return config.isDeletePermitted(subId); + return nodeConfig.isDeletePermitted(subId); } /** @@ -421,20 +378,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { * @return Null if the credentials are invalid or the user if they are valid. */ String getAuthUser(String feedid, String credentials) { - return config.getAuthUser(feedid, credentials); - } - - /** - * AAF changes: TDP EPIC US# 307413 Check AAF_instance for feed ID in NodeConfig. - * - * @param feedid The ID of the feed specified - */ - String getAafInstance(String feedid) { - return config.getAafInstance(feedid); - } - - String getAafInstance() { - return aafInstance; + return nodeConfig.getAuthUser(feedid, credentials); } /** @@ -446,7 +390,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { * @return Null if the request should be accepted or the correct hostname if it should be sent to another node. */ String getIngressNode(String feedid, String user, String ip) { - return config.getIngressNode(feedid, user, ip); + return nodeConfig.getIngressNode(feedid, user, ip); } /** @@ -456,7 +400,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { * @return The value of the parameter or null if it is not defined. */ private String getProvParam(String name) { - return config.getProvParam(name); + return nodeConfig.getProvParam(name); } /** @@ -467,7 +411,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { * @return The value of the parameter or deflt if it is not defined. */ private String getProvParam(String name, String defaultValue) { - name = config.getProvParam(name); + name = nodeConfig.getProvParam(name); if (name == null) { name = defaultValue; } @@ -484,14 +428,14 @@ public class NodeConfigManager implements DeliveryQueueHelper { /** * Get all the outbound spooling destinations. This will include both subscriptions and nodes. */ - DestInfo[] getAllDests() { - return config.getAllDests(); + public DestInfo[] getAllDests() { + return nodeConfig.getAllDests(); } /** * Register a task to run whenever the configuration changes. */ - void registerConfigTask(Runnable task) { + public void registerConfigTask(Runnable task) { configtasks.addTask(task); } @@ -601,7 +545,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { * @return The targets this feed should be delivered to */ Target[] getTargets(String feedid) { - return config.getTargets(feedid); + return nodeConfig.getTargets(feedid); } /** @@ -616,7 +560,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { */ String getSpoolDir(String subid, String remoteaddr) { if (provcheck.isFrom(remoteaddr)) { - String sdir = config.getSpoolDir(subid); + String sdir = nodeConfig.getSpoolDir(subid); if (sdir != null) { eelfLogger.debug("NODE0310 Received subscription reset request for subscription " + subid + " from provisioning server " + remoteaddr); @@ -634,51 +578,10 @@ public class NodeConfigManager implements DeliveryQueueHelper { /** * Get the base directory for spool directories. */ - String getSpoolBase() { + public String getSpoolBase() { return spooldir; } - /** - * Get the key store type. - */ - String getKSType() { - return kstype; - } - - /** - * Get the key store file. - */ - String getKSFile() { - return ksfile; - } - - /** - * Get the key store password. - */ - String getKSPass() { - return kspass; - } - - /** - * Get the key password. - */ - String getKPass() { - return kpass; - } - - - String getTstype() { - return tstype; - } - - String getTsfile() { - return tsfile; - } - - String getTspass() { - return tspass; - } - /** * Get the http port. */ @@ -703,42 +606,42 @@ public class NodeConfigManager implements DeliveryQueueHelper { /** * Get the external name of this machine. */ - String getMyName() { + public String getMyName() { return myname; } /** * Get the number of threads to use for delivery. */ - int getDeliveryThreads() { + public int getDeliveryThreads() { return deliverythreads; } /** * Get the URL for uploading the event log data. */ - String getEventLogUrl() { + public String getEventLogUrl() { return eventlogurl; } /** * Get the prefix for the names of event log files. */ - String getEventLogPrefix() { + public String getEventLogPrefix() { return eventlogprefix; } /** * Get the suffix for the names of the event log files. */ - String getEventLogSuffix() { + public String getEventLogSuffix() { return eventlogsuffix; } /** * Get the interval between event log file rollovers. */ - String getEventLogInterval() { + public String getEventLogInterval() { return eventloginterval; } @@ -752,14 +655,14 @@ public class NodeConfigManager implements DeliveryQueueHelper { /** * Get the directory where the event and node log files live. */ - String getLogDir() { + public String getLogDir() { return logdir; } /** * How long do I keep log files (in milliseconds). */ - long getLogRetention() { + public long getLogRetention() { return logretention; } @@ -777,7 +680,7 @@ public class NodeConfigManager implements DeliveryQueueHelper { * @return The feed ID */ public String getFeedId(String subid) { - return config.getFeedId(subid); + return nodeConfig.getFeedId(subid); } /** @@ -785,15 +688,15 @@ public class NodeConfigManager implements DeliveryQueueHelper { * * @return The Authorization string for this node */ - String getMyAuth() { - return config.getMyAuth(); + public String getMyAuth() { + return nodeConfig.getMyAuth(); } /** * Get the fraction of free spool disk space where we start throwing away undelivered files. This is * FREE_DISK_RED_PERCENT / 100.0. Default is 0.05. Limited by 0.01 <= FreeDiskStart <= 0.5. */ - double getFreeDiskStart() { + public double getFreeDiskStart() { return fdpstart; } @@ -801,54 +704,138 @@ public class NodeConfigManager implements DeliveryQueueHelper { * Get the fraction of free spool disk space where we stop throwing away undelivered files. This is * FREE_DISK_YELLOW_PERCENT / 100.0. Default is 0.2. Limited by FreeDiskStart <= FreeDiskStop <= 0.5. */ - double getFreeDiskStop() { + public double getFreeDiskStop() { return fdpstop; } - /** - * Disable and enable protocols. - */ - String[] getEnabledprotocols() { - return enabledprotocols; + protected boolean isTlsEnabled() { + return tlsEnabled; } - String getAafType() { - return aafType; + public static NodeTlsManager getNodeTlsManager() { + return nodeTlsManager; } - String getAafAction() { - return aafAction; - } + /** + * Generate publish IDs. + */ + static class PublishId { - protected boolean isTlsEnabled() { - return tlsEnabled; - } + private long nextuid; + private final String myname; - boolean getCadiEnabled() { - return cadiEnabled; - } + /** + * Generate publish IDs for the specified name. + * + * @param myname Unique identifier for this publish ID generator (usually fqdn of server) + */ + public PublishId(String myname) { + this.myname = myname; + } - NodeAafPropsUtils getNodeAafPropsUtils() { - return nodeAafPropsUtils; + /** + * Generate a Data Router Publish ID that uniquely identifies the particular invocation of the Publish API for log + * correlation purposes. + */ + public synchronized String next() { + long now = System.currentTimeMillis(); + if (now < nextuid) { + now = nextuid; + } + nextuid = now + 1; + return (now + "." + myname); + } } /** - * Builds the permissions string to be verified. - * - * @param aafInstance The aaf instance - * @return The permissions - */ - String getPermission(String aafInstance) { - try { - String type = getAafType(); - String action = getAafAction(); - if ("".equals(aafInstance)) { - aafInstance = getAafInstance(); + * Manage a list of tasks to be executed when an event occurs. This makes the following guarantees: + *

+ */ + static class TaskList { + + private Iterator runlist; + private final HashSet tasks = new HashSet<>(); + private HashSet togo; + private HashSet sofar; + private HashSet added; + private HashSet removed; + + /** + * Start executing the sequence of tasks. + */ + synchronized void startRun() { + sofar = new HashSet<>(); + added = new HashSet<>(); + removed = new HashSet<>(); + togo = new HashSet<>(tasks); + runlist = togo.iterator(); + } + + /** + * Get the next task to execute. + */ + synchronized Runnable next() { + while (runlist != null) { + if (runlist.hasNext()) { + Runnable task = runlist.next(); + if (addTaskToSoFar(task)) { + return task; + } + } + if (!added.isEmpty()) { + togo = added; + added = new HashSet<>(); + removed.clear(); + runlist = togo.iterator(); + continue; + } + togo = null; + added = null; + removed = null; + sofar = null; + runlist = null; } - return type + "|" + aafInstance + "|" + action; - } catch (Exception e) { - eelfLogger.error("NODE0543 NodeConfigManager.getPermission: ", e); + return (null); + } + + /** + * Add a task to the list of tasks to run whenever the event occurs. + */ + synchronized void addTask(Runnable task) { + if (runlist != null) { + added.add(task); + removed.remove(task); + } + tasks.add(task); + } + + /** + * Remove a task from the list of tasks to run whenever the event occurs. + */ + synchronized void removeTask(Runnable task) { + if (runlist != null) { + removed.add(task); + added.remove(task); + } + tasks.remove(task); + } + + private boolean addTaskToSoFar(Runnable task) { + if (removed.contains(task)) { + return false; + } + if (sofar.contains(task)) { + return false; + } + sofar.add(task); + return true; } - return null; } } diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeRunner.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeRunner.java index 485cdb20..036599a4 100644 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeRunner.java +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeRunner.java @@ -28,6 +28,9 @@ import static java.lang.System.exit; import com.att.eelf.configuration.EELFLogger; import com.att.eelf.configuration.EELFManager; import org.eclipse.jetty.server.Server; +import org.onap.dmaap.datarouter.node.log.LogManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The main starting point for the Data Router node. @@ -49,7 +52,7 @@ public class NodeRunner { nodeMainLogger.debug("NODE0001 Data Router Node Starting"); IsFrom.setDNSCache(); NodeConfigManager nodeConfigManager = NodeConfigManager.getInstance(); - nodeMainLogger.debug("NODE0002 I am " + nodeConfigManager.getMyName()); + nodeMainLogger.debug("NODE0002 I am {}", nodeConfigManager.getMyName()); (new WaitForConfig(nodeConfigManager)).waitForConfig(); new LogManager(nodeConfigManager); try { @@ -58,8 +61,7 @@ public class NodeRunner { server.join(); nodeMainLogger.debug("NODE0006 Node Server started-" + server.getState()); } catch (Exception e) { - nodeMainLogger.error("NODE0006 Jetty failed to start. Reporting will we be unavailable: " - + e.getMessage(), e); + nodeMainLogger.error("NODE0006 Jetty failed to start. Reporting will we be unavailable: {}", e.getMessage()); exit(1); } nodeMainLogger.debug("NODE0007 Node Server joined"); diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServer.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServer.java index cc07ab62..1a29f682 100644 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServer.java +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServer.java @@ -22,9 +22,6 @@ package org.onap.dmaap.datarouter.node; import com.att.eelf.configuration.EELFLogger; import com.att.eelf.configuration.EELFManager; -import java.util.EnumSet; -import javax.servlet.DispatcherType; -import javax.servlet.ServletException; import org.eclipse.jetty.http.HttpVersion; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.HttpConfiguration; @@ -33,11 +30,11 @@ import org.eclipse.jetty.server.SecureRequestCustomizer; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.SslConnectionFactory; -import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.jetbrains.annotations.NotNull; +import org.onap.dmaap.datarouter.node.delivery.Delivery; public class NodeServer { @@ -47,8 +44,7 @@ public class NodeServer { private static Server server; private static Delivery delivery; - private NodeServer(){ - } + private NodeServer(){} static Server getServerInstance(NodeConfigManager nodeConfigManager) { if (server == null) { @@ -77,7 +73,7 @@ public class NodeServer { servletContextHandler.addServlet(new ServletHolder(new NodeServlet(delivery, nodeConfigManager)), "/*"); if (nodeConfigManager.isTlsEnabled()) { - initialiseHttpsConnector(nodeConfigManager, httpConfiguration, httpServerConnector, servletContextHandler); + initialiseHttpsConnector(nodeConfigManager, httpConfiguration, httpServerConnector); } else { eelfLogger.info("NODE0005 Adding HTTP Connector"); server.setConnectors(new Connector[]{httpServerConnector}); @@ -88,7 +84,7 @@ public class NodeServer { } private static void initialiseHttpsConnector(NodeConfigManager nodeConfigManager, HttpConfiguration httpConfiguration, - ServerConnector httpServerConnector, ServletContextHandler servletContextHandler) { + ServerConnector httpServerConnector) { HttpConfiguration httpsConfiguration = new HttpConfiguration(httpConfiguration); httpsConfiguration.setRequestHeaderSize(8192); @@ -99,9 +95,8 @@ public class NodeServer { // HTTPS connector try (ServerConnector httpsServerConnector = new ServerConnector(server, - new SslConnectionFactory(getSslContextFactory(nodeConfigManager), HttpVersion.HTTP_1_1.asString()), + new SslConnectionFactory(getSslContextFactory(), HttpVersion.HTTP_1_1.asString()), new HttpConnectionFactory(httpsConfiguration))) { - httpsServerConnector.setPort(nodeConfigManager.getHttpsPort()); httpsServerConnector.setIdleTimeout(3600000); httpsServerConnector.setAcceptQueueSize(2); @@ -119,12 +114,16 @@ public class NodeServer { @NotNull - private static SslContextFactory.Server getSslContextFactory(NodeConfigManager nodeConfigManager) { - SslContextFactory sslContextFactory = new SslContextFactory.Server(); - sslContextFactory.setKeyStoreType(nodeConfigManager.getKSType()); - sslContextFactory.setKeyStorePath(nodeConfigManager.getKSFile()); - sslContextFactory.setKeyStorePassword(nodeConfigManager.getKSPass()); - sslContextFactory.setKeyManagerPassword(nodeConfigManager.getKPass()); + private static SslContextFactory.Server getSslContextFactory() { + SslContextFactory.Server sslContextFactory = new SslContextFactory.Server(); + sslContextFactory.setKeyStoreType(NodeConfigManager.getNodeTlsManager().getKeyStoreType()); + sslContextFactory.setKeyStorePath(NodeConfigManager.getNodeTlsManager().getKeyStorefile()); + sslContextFactory.setKeyStorePassword(NodeConfigManager.getNodeTlsManager().getKeyStorePassword()); + sslContextFactory.setKeyManagerPassword(NodeConfigManager.getNodeTlsManager().getKeyManagerPassword()); + +// sslContextFactory.setTrustStoreType(NodeConfigManager.getNodeTlsManager().getTrustStoreType()); +// sslContextFactory.setTrustStorePath(ProvRunner.getAafPropsUtils().getTruststorePathProperty()); +// sslContextFactory.setTrustStorePassword(ProvRunner.getAafPropsUtils().getTruststorePassProperty()); sslContextFactory.setExcludeCipherSuites( "SSL_RSA_WITH_DES_CBC_SHA", @@ -135,12 +134,12 @@ public class NodeServer { "SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA", "SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA" ); - sslContextFactory.addExcludeProtocols("SSLv3"); + sslContextFactory.setIncludeProtocols(NodeConfigManager.getNodeTlsManager().getEnabledProtocols()); eelfLogger.info("Unsupported protocols: " + String.join(",", sslContextFactory.getExcludeProtocols())); eelfLogger.info("Supported protocols: " + String.join(",", sslContextFactory.getIncludeProtocols())); eelfLogger.info("Unsupported ciphers: " + String.join(",", sslContextFactory.getExcludeCipherSuites())); eelfLogger.info("Supported ciphers: " + String.join(",", sslContextFactory.getIncludeCipherSuites())); - return (SslContextFactory.Server) sslContextFactory; + return sslContextFactory; } } diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java index 80f7e3ad..4dbe0e75 100644 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java @@ -24,7 +24,7 @@ package org.onap.dmaap.datarouter.node; -import static org.onap.dmaap.datarouter.node.NodeUtils.sendResponseError; +import static org.onap.dmaap.datarouter.node.utils.NodeUtils.sendResponseError; import com.att.eelf.configuration.EELFLogger; import com.att.eelf.configuration.EELFManager; @@ -44,7 +44,10 @@ import java.util.regex.Pattern; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; import org.jetbrains.annotations.Nullable; +import org.onap.dmaap.datarouter.node.delivery.Delivery; import org.onap.dmaap.datarouter.node.eelf.EelfMsgs; +import org.onap.dmaap.datarouter.node.log.StatusLog; +import org.onap.dmaap.datarouter.node.utils.NodeUtils; import org.slf4j.MDC; /** @@ -195,7 +198,6 @@ public class NodeServlet extends HttpServlet { String pubid = null; String rcvd = NodeUtils.logts(System.currentTimeMillis()) + ";from=" + ip + ";by=" + lip; Target[] targets; - boolean isAAFFeed = false; if (fileid.startsWith("/delete/")) { deleteFile(req, resp, fileid, pubid); return; @@ -220,28 +222,6 @@ public class NodeServlet extends HttpServlet { return; } feedid = fileid.substring(0, index); - - if (config.getCadiEnabled()) { - String path = req.getPathInfo(); - if (!path.startsWith("/internal") && feedid != null) { - String aafInstance = config.getAafInstance(feedid); - if (!("legacy".equalsIgnoreCase(aafInstance))) { - isAAFFeed = true; - String permission = config.getPermission(aafInstance); - eelfLogger.debug("NodeServlet.common() permission string - " + permission); - //Check in CADI Framework API if user has AAF permission or not - if (!req.isUserInRole(permission)) { - String message = "AAF disallows access to permission string - " + permission; - eelfLogger.error("NODE0307 Rejecting unauthenticated PUT or DELETE of " + req.getPathInfo() - + FROM + req.getRemoteAddr()); - resp.sendError(HttpServletResponse.SC_FORBIDDEN, message); - eelfLogger.info(EelfMsgs.EXIT); - return; - } - } - } - } - fileid = fileid.substring(index + 1); pubid = config.getPublishId(); targets = config.getTargets(feedid); @@ -254,8 +234,6 @@ public class NodeServlet extends HttpServlet { } fileid = fileid.substring(18); pubid = generateAndValidatePublishId(req); - - user = "datartr"; // SP6 : Added usr as datartr to avoid null entries for internal routing targets = config.parseRouting(req.getHeader("X-DMAAP-DR-ROUTING")); } else { eelfLogger.error("NODE0204 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + FROM + req @@ -285,39 +263,15 @@ public class NodeServlet extends HttpServlet { String logurl = HTTPS + hp + INTERNAL_PUBLISH + fileid; if (feedid != null) { logurl = HTTPS + hp + PUBLISH + feedid + "/" + fileid; - //Cadi code starts - if (!isAAFFeed) { - String reason = config.isPublishPermitted(feedid, credentials, ip); - if (reason != null) { - eelfLogger.error("NODE0111 Rejecting unauthorized publish attempt to feed " + PathUtil - .cleanString(feedid) + " fileid " + PathUtil.cleanString(fileid) + FROM + PathUtil - .cleanString(ip) + " reason " + PathUtil.cleanString(reason)); - resp.sendError(HttpServletResponse.SC_FORBIDDEN, reason); - eelfLogger.info(EelfMsgs.EXIT); - return; - } - user = config.getAuthUser(feedid, credentials); - } else { - String reason = config.isPublishPermitted(feedid, ip); - if (reason != null) { - eelfLogger.error("NODE0111 Rejecting unauthorized publish attempt to feed " + PathUtil - .cleanString(feedid) + " fileid " + PathUtil.cleanString(fileid) + FROM + PathUtil - .cleanString(ip) + " reason Invalid AAF user- " + PathUtil.cleanString(reason)); - String message = "Invalid AAF user- " + PathUtil.cleanString(reason); - eelfLogger.debug("NODE0308 Rejecting unauthenticated PUT or DELETE of " + PathUtil - .cleanString(req.getPathInfo()) + FROM + PathUtil.cleanString(req.getRemoteAddr())); - resp.sendError(HttpServletResponse.SC_FORBIDDEN, message); - return; - } - if ((req.getUserPrincipal() != null) && (req.getUserPrincipal().getName() != null)) { - String userName = req.getUserPrincipal().getName(); - String[] attid = userName.split("@"); - user = attid[0]; - } else { - user = "AAFUser"; - } + String reason = config.isPublishPermitted(feedid, credentials, ip); + if (reason != null) { + eelfLogger.info("NODE0111 Rejecting unauthorized publish attempt to feed " + feedid + " fileid " + + fileid + " from " + ip + " reason " + reason); + resp.sendError(javax.servlet.http.HttpServletResponse.SC_FORBIDDEN, reason); + eelfLogger.info(EelfMsgs.EXIT); + return; } - //Cadi code Ends + user = config.getAuthUser(feedid, credentials); String newnode = config.getIngressNode(feedid, user, ip); if (newnode != null) { String port = ""; @@ -601,4 +555,88 @@ public class NodeServlet extends HttpServlet { return -1; } } + + /** + * Utility class that validates the path url formed from + * the string passed in the request parameters. + */ + static class PathUtil { + + private PathUtil() { + throw new IllegalStateException("Utility Class"); + } + + /** + * This method takes String as the parameter and return the filtered path string. + * + * @param string String to clean + * @return A cleaned String + */ + static String cleanString(String string) { + if (string == null) { + return null; + } + StringBuilder cleanString = new StringBuilder(); + for (int i = 0; i < string.length(); ++i) { + cleanString.append(cleanChar(string.charAt(i))); + } + return cleanString.toString(); + } + + /** + * This method filters the valid special characters in path string. + * + * @param character The char to be cleaned + * @return The cleaned char + */ + private static char cleanChar(char character) { + // 0 - 9 + for (int i = 48; i < 58; ++i) { + if (character == i) { + return (char) i; + } + } + // 'A' - 'Z' + for (int i = 65; i < 91; ++i) { + if (character == i) { + return (char) i; + } + } + // 'a' - 'z' + for (int i = 97; i < 123; ++i) { + if (character == i) { + return (char) i; + } + } + return getValidCharacter(character); + } + + private static char getValidCharacter(char character) { + // other valid characters + switch (character) { + case '/': + return '/'; + case '.': + return '.'; + case '-': + return '-'; + case ':': + return ':'; + case '?': + return '?'; + case '&': + return '&'; + case '=': + return '='; + case '#': + return '#'; + case '_': + return '_'; + case ' ': + return ' '; + default: + return '%'; + } + } + } } diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java deleted file mode 100644 index 5cca7375..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java +++ /dev/null @@ -1,334 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.node; - -import static com.att.eelf.configuration.Configuration.MDC_KEY_REQUEST_ID; -import static com.att.eelf.configuration.Configuration.MDC_SERVER_FQDN; -import static com.att.eelf.configuration.Configuration.MDC_SERVER_IP_ADDRESS; -import static com.att.eelf.configuration.Configuration.MDC_SERVICE_NAME; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.net.InetAddress; -import java.security.KeyStore; -import java.security.KeyStoreException; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.security.cert.CertificateException; -import java.security.cert.X509Certificate; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Enumeration; -import java.util.TimeZone; -import java.util.UUID; -import java.util.zip.GZIPInputStream; -import javax.naming.InvalidNameException; -import javax.naming.ldap.LdapName; -import javax.naming.ldap.Rdn; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.lang3.StringUtils; -import org.onap.dmaap.datarouter.node.eelf.EelfMsgs; -import org.slf4j.MDC; - -/** - * Utility functions for the data router node. - */ -public class NodeUtils { - - private static final EELFLogger eelfLogger = EELFManager.getInstance().getLogger(NodeUtils.class); - - private NodeUtils() { - } - - /** - * Base64 encode a byte array. - * - * @param raw The bytes to be encoded - * @return The encoded string - */ - public static String base64Encode(byte[] raw) { - return (Base64.encodeBase64String(raw)); - } - - /** - * Given a user and password, generate the credentials. - * - * @param user User name - * @param password User password - * @return Authorization header value - */ - public static String getAuthHdr(String user, String password) { - if (user == null || password == null) { - return (null); - } - return ("Basic " + base64Encode((user + ":" + password).getBytes())); - } - - /** - * Given a node name, generate the credentials. - * - * @param node Node name - */ - public static String getNodeAuthHdr(String node, String key) { - try { - MessageDigest md = MessageDigest.getInstance("SHA-512"); - md.update(key.getBytes()); - md.update(node.getBytes()); - md.update(key.getBytes()); - return (getAuthHdr(node, base64Encode(md.digest()))); - } catch (Exception exception) { - eelfLogger - .error("Exception in generating Credentials for given node name:= " + exception.getMessage(), - exception); - return (null); - } - } - - /** - * Given a keystore file and its password, return the value of the CN of the first private key entry with a - * certificate. - * - * @param kstype The type of keystore - * @param ksfile The file name of the keystore - * @param kspass The password of the keystore - * @return CN of the certificate subject or null - */ - public static String getCanonicalName(String kstype, String ksfile, String kspass) { - KeyStore ks; - try { - ks = KeyStore.getInstance(kstype); - if (loadKeyStore(ksfile, kspass, ks)) { - return (null); - } - } catch (Exception e) { - setIpAndFqdnForEelf("getCanonicalName"); - eelfLogger.error(EelfMsgs.MESSAGE_KEYSTORE_LOAD_ERROR, e, ksfile); - return (null); - } - return (getCanonicalName(ks)); - } - - /** - * Given a keystore, return the value of the CN of the first private key entry with a certificate. - * - * @param ks The KeyStore - * @return CN of the certificate subject or null - */ - public static String getCanonicalName(KeyStore ks) { - try { - Enumeration aliases = ks.aliases(); - while (aliases.hasMoreElements()) { - String name = getNameFromSubject(ks, aliases); - if (name != null) { - return name; - } - } - } catch (Exception e) { - eelfLogger.error("NODE0402 Error extracting my name from my keystore file " + e.toString(), e); - } - return (null); - } - - /** - * Given a string representation of an IP address, get the corresponding byte array. - * - * @param ip The IP address as a string - * @return The IP address as a byte array or null if the address is invalid - */ - public static byte[] getInetAddress(String ip) { - try { - return (InetAddress.getByName(ip).getAddress()); - } catch (Exception exception) { - eelfLogger - .error("Exception in generating byte array for given IP address := " + exception.toString(), - exception); - } - return (null); - } - - /** - * Given a uri with parameters, split out the feed ID and file ID. - */ - public static String[] getFeedAndFileID(String uriandparams) { - int end = uriandparams.length(); - int index = uriandparams.indexOf('#'); - if (index != -1 && index < end) { - end = index; - } - index = uriandparams.indexOf('?'); - if (index != -1 && index < end) { - end = index; - } - end = uriandparams.lastIndexOf('/', end); - if (end < 2) { - return (null); - } - index = uriandparams.lastIndexOf('/', end - 1); - if (index == -1) { - return (null); - } - return (new String[]{uriandparams.substring(index + 1, end), uriandparams.substring(end + 1)}); - } - - /** - * Escape fields that might contain vertical bar, backslash, or newline by replacing them with backslash p, - * backslash e and backslash n. - */ - public static String loge(String string) { - if (string == null) { - return (string); - } - return (string.replaceAll("\\\\", "\\\\e").replaceAll("\\|", "\\\\p").replaceAll("\n", "\\\\n")); - } - - /** - * Undo what loge does. - */ - public static String unloge(String string) { - if (string == null) { - return (string); - } - return (string.replaceAll("\\\\p", "\\|").replaceAll("\\\\n", "\n").replaceAll("\\\\e", "\\\\")); - } - - /** - * Format a logging timestamp as yyyy-mm-ddThh:mm:ss.mmmZ - */ - public static String logts(long when) { - return (logts(new Date(when))); - } - - /** - * Format a logging timestamp as yyyy-mm-ddThh:mm:ss.mmmZ - */ - public static synchronized String logts(Date when) { - SimpleDateFormat logDate = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); - logDate.setTimeZone(TimeZone.getTimeZone("GMT")); - return (logDate.format(when)); - } - - /** Method prints method name, server FQDN and IP Address of the machine in EELF logs. - * - * @param method Prints method name in EELF log. - */ - public static void setIpAndFqdnForEelf(String method) { - MDC.clear(); - MDC.put(MDC_SERVICE_NAME, method); - try { - MDC.put(MDC_SERVER_FQDN, InetAddress.getLocalHost().getHostName()); - MDC.put(MDC_SERVER_IP_ADDRESS, InetAddress.getLocalHost().getHostAddress()); - } catch (Exception exception) { - eelfLogger - .error("Exception in generating byte array for given IP address := " + exception.toString(), - exception); - } - - } - - /** Method sets RequestIs and InvocationId for se in EELF logs. - * - * @param req Request used to get RequestId and InvocationId. - */ - public static void setRequestIdAndInvocationId(HttpServletRequest req) { - String reqId = req.getHeader("X-ONAP-RequestID"); - if (StringUtils.isBlank(reqId)) { - reqId = UUID.randomUUID().toString(); - } - MDC.put(MDC_KEY_REQUEST_ID, reqId); - String invId = req.getHeader("X-InvocationID"); - if (StringUtils.isBlank(invId)) { - invId = UUID.randomUUID().toString(); - } - MDC.put("InvocationId", invId); - } - - /** - * Sends error as response with error code input. - */ - public static void sendResponseError(HttpServletResponse response, int errorCode, EELFLogger intlogger) { - try { - response.sendError(errorCode); - } catch (IOException ioe) { - intlogger.error("IOException", ioe); - } - } - - /** - * Method to check to see if file is of type gzip. - * - * @param file The name of the file to be checked - * @return True if the file is of type gzip - */ - public static boolean isFiletypeGzip(File file) { - try (FileInputStream fileInputStream = new FileInputStream(file); - GZIPInputStream gzip = new GZIPInputStream(fileInputStream)) { - - return true; - } catch (IOException e) { - eelfLogger.error("NODE0403 " + file.toString() + " Not in gzip(gz) format: " + e.toString() + e); - return false; - } - } - - - private static boolean loadKeyStore(String ksfile, String kspass, KeyStore ks) - throws NoSuchAlgorithmException, CertificateException { - try (FileInputStream fileInputStream = new FileInputStream(ksfile)) { - ks.load(fileInputStream, kspass.toCharArray()); - } catch (IOException ioException) { - eelfLogger.error("IOException occurred while opening FileInputStream: " + ioException.getMessage(), - ioException); - return true; - } - return false; - } - - - private static String getNameFromSubject(KeyStore ks, Enumeration aliases) throws KeyStoreException { - String alias = aliases.nextElement(); - if (ks.entryInstanceOf(alias, KeyStore.PrivateKeyEntry.class)) { - X509Certificate cert = (X509Certificate) ks.getCertificate(alias); - if (cert != null) { - String subject = cert.getSubjectX500Principal().getName(); - try { - LdapName ln = new LdapName(subject); - for (Rdn rdn : ln.getRdns()) { - if (rdn.getType().equalsIgnoreCase("CN")) { - return rdn.getValue().toString(); - } - } - } catch (InvalidNameException e) { - eelfLogger.error("No valid CN not found for dr-node cert", e); - } - } - } - return null; - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java deleted file mode 100644 index fe3fdb6e..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java +++ /dev/null @@ -1,154 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.node; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import org.jetbrains.annotations.Nullable; -import org.onap.dmaap.datarouter.node.NodeConfig.ProvHop; - -/** - * Given a set of node names and next hops, identify and ignore any cycles and figure out the sequence of next hops to - * get from this node to any other node. - */ - -class PathFinder { - - private ArrayList errors = new ArrayList<>(); - private HashMap routes = new HashMap<>(); - - /** - * Find routes from a specified origin to all of the nodes given a set of specified next hops. - * - * @param origin where we start - * @param nodes where we can go - * @param hops detours along the way - */ - PathFinder(String origin, String[] nodes, NodeConfig.ProvHop[] hops) { - HashSet known = new HashSet<>(); - HashMap> ht = new HashMap<>(); - for (String n : nodes) { - known.add(n); - ht.put(n, new HashMap<>()); - } - for (NodeConfig.ProvHop ph : hops) { - Hop hop = getHop(known, ht, ph); - if (hop == null) { - continue; - } - if (ph.getVia().equals(ph.getTo())) { - errors.add(ph + " gives destination as via"); - hop.bad = true; - } - } - for (String n : known) { - if (n.equals(origin)) { - routes.put(n, ""); - } - routes.put(n, plot(origin, n, ht.get(n)) + "/"); - } - } - - /** - * Get list of errors encountered while finding paths. - * - * @return array of error descriptions - */ - String[] getErrors() { - return (errors.toArray(new String[0])); - } - - /** - * Get the route from this node to the specified node. - * - * @param destination node - * @return list of node names separated by and ending with "/" - */ - String getPath(String destination) { - String ret = routes.get(destination); - if (ret == null) { - return (""); - } - return (ret); - } - - private String plot(String from, String to, HashMap info) { - Hop nh = info.get(from); - if (nh == null || nh.bad) { - return (to); - } - if (nh.mark) { - while (!nh.bad) { - nh.bad = true; - errors.add(nh.basis + " is part of a cycle"); - nh = info.get(nh.basis.getVia()); - } - return (to); - } - nh.mark = true; - String route = plot(nh.basis.getVia(), to, info); - nh.mark = false; - if (nh.bad) { - return (to); - } - return (nh.basis.getVia() + "/" + route); - } - - @Nullable - private Hop getHop(HashSet known, HashMap> ht, ProvHop ph) { - if (!known.contains(ph.getFrom())) { - errors.add(ph + " references unknown from node"); - return null; - } - if (!known.contains(ph.getTo())) { - errors.add(ph + " references unknown destination node"); - return null; - } - HashMap ht2 = ht.get(ph.getTo()); - Hop hop = ht2.get(ph.getFrom()); - if (hop != null) { - hop.bad = true; - errors.add(ph + " gives duplicate next hop - previous via was " + hop.basis.getVia()); - return null; - } - hop = new Hop(); - hop.basis = ph; - ht2.put(ph.getFrom(), hop); - if (!known.contains(ph.getVia())) { - errors.add(ph + " references unknown via node"); - hop.bad = true; - return null; - } - return hop; - } - - private static class Hop { - - boolean mark; - boolean bad; - NodeConfig.ProvHop basis; - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java deleted file mode 100644 index d67c9094..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java +++ /dev/null @@ -1,106 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (C) 2019 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ - -package org.onap.dmaap.datarouter.node; - -/** - * FORTIFY SCAN FIXES. - *

This Utility is used for Fortify fixes. It Validates the path url formed from - * the string passed in the request parameters.

- */ -class PathUtil { - - private PathUtil() { - throw new IllegalStateException("Utility Class"); - } - - /** - * This method takes String as the parameter and return the filtered path string. - * - * @param string String to clean - * @return A cleaned String - */ - static String cleanString(String string) { - if (string == null) { - return null; - } - StringBuilder cleanString = new StringBuilder(); - for (int i = 0; i < string.length(); ++i) { - cleanString.append(cleanChar(string.charAt(i))); - } - return cleanString.toString(); - } - - /** - * This method filters the valid special characters in path string. - * - * @param character The char to be cleaned - * @return The cleaned char - */ - private static char cleanChar(char character) { - // 0 - 9 - for (int i = 48; i < 58; ++i) { - if (character == i) { - return (char) i; - } - } - // 'A' - 'Z' - for (int i = 65; i < 91; ++i) { - if (character == i) { - return (char) i; - } - } - // 'a' - 'z' - for (int i = 97; i < 123; ++i) { - if (character == i) { - return (char) i; - } - } - return getValidCharacter(character); - } - - private static char getValidCharacter(char character) { - // other valid characters - switch (character) { - case '/': - return '/'; - case '.': - return '.'; - case '-': - return '-'; - case ':': - return ':'; - case '?': - return '?'; - case '&': - return '&'; - case '=': - return '='; - case '#': - return '#'; - case '_': - return '_'; - case ' ': - return ' '; - default: - return '%'; - } - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java deleted file mode 100644 index 2c205804..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java +++ /dev/null @@ -1,405 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.node; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.IOException; -import java.io.Reader; -import java.util.ArrayList; -import org.jetbrains.annotations.Nullable; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; -import org.json.JSONTokener; -import org.onap.dmaap.datarouter.node.NodeConfig.ProvFeed; -import org.onap.dmaap.datarouter.node.NodeConfig.ProvFeedSubnet; -import org.onap.dmaap.datarouter.node.NodeConfig.ProvFeedUser; -import org.onap.dmaap.datarouter.node.NodeConfig.ProvForceEgress; -import org.onap.dmaap.datarouter.node.NodeConfig.ProvForceIngress; -import org.onap.dmaap.datarouter.node.NodeConfig.ProvHop; -import org.onap.dmaap.datarouter.node.NodeConfig.ProvNode; -import org.onap.dmaap.datarouter.node.NodeConfig.ProvParam; -import org.onap.dmaap.datarouter.node.NodeConfig.ProvSubscription; -import org.onap.dmaap.datarouter.node.eelf.EelfMsgs; - -/** - * Parser for provisioning data from the provisioning server. - * - *

The ProvData class uses a Reader for the text configuration from the provisioning server to construct arrays of - * raw configuration entries. - */ -public class ProvData { - - private static final String FEED_ID = "feedid"; - - private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(ProvData.class); - private NodeConfig.ProvNode[] pn; - private NodeConfig.ProvParam[] pp; - private NodeConfig.ProvFeed[] pf; - private NodeConfig.ProvFeedUser[] pfu; - private NodeConfig.ProvFeedSubnet[] pfsn; - private NodeConfig.ProvSubscription[] ps; - private NodeConfig.ProvForceIngress[] pfi; - private NodeConfig.ProvForceEgress[] pfe; - private NodeConfig.ProvHop[] ph; - - /** - * Construct raw provisioing data entries from the text (JSON) provisioning document received from the provisioning - * server. - * - * @param reader The reader for the JSON text. - */ - public ProvData(Reader reader) throws IOException { - ArrayList pnv = new ArrayList<>(); - ArrayList ppv = new ArrayList<>(); - ArrayList pfv = new ArrayList<>(); - ArrayList pfuv = new ArrayList<>(); - ArrayList pfsnv = new ArrayList<>(); - ArrayList psv = new ArrayList<>(); - ArrayList pfiv = new ArrayList<>(); - ArrayList pfev = new ArrayList<>(); - ArrayList phv = new ArrayList<>(); - try { - JSONTokener jtx = new JSONTokener(reader); - JSONObject jcfg = new JSONObject(jtx); - char cch = jtx.nextClean(); - if (cch != '\0') { - throw new JSONException("Spurious characters following configuration"); - } - reader.close(); - addJSONFeeds(pfv, pfuv, pfsnv, jcfg); - addJSONSubs(psv, jcfg); - addJSONParams(pnv, ppv, jcfg); - addJSONRoutingInformation(pfiv, pfev, phv, jcfg); - } catch (JSONException jse) { - NodeUtils.setIpAndFqdnForEelf("ProvData"); - eelfLogger.error(EelfMsgs.MESSAGE_PARSING_ERROR, jse.toString()); - eelfLogger - .error("NODE0201 Error parsing configuration data from provisioning server " + jse.toString(), jse); - throw new IOException(jse.toString(), jse); - } - pn = pnv.toArray(new NodeConfig.ProvNode[pnv.size()]); - pp = ppv.toArray(new NodeConfig.ProvParam[ppv.size()]); - pf = pfv.toArray(new NodeConfig.ProvFeed[pfv.size()]); - pfu = pfuv.toArray(new NodeConfig.ProvFeedUser[pfuv.size()]); - pfsn = pfsnv.toArray(new NodeConfig.ProvFeedSubnet[pfsnv.size()]); - ps = psv.toArray(new NodeConfig.ProvSubscription[psv.size()]); - pfi = pfiv.toArray(new NodeConfig.ProvForceIngress[pfiv.size()]); - pfe = pfev.toArray(new NodeConfig.ProvForceEgress[pfev.size()]); - ph = phv.toArray(new NodeConfig.ProvHop[phv.size()]); - } - - private static String[] gvasa(JSONObject object, String key) { - return (gvasa(object.opt(key))); - } - - private static String[] gvasa(Object object) { - if (object instanceof JSONArray) { - JSONArray jsonArray = (JSONArray) object; - ArrayList array = new ArrayList<>(); - for (int i = 0; i < jsonArray.length(); i++) { - String string = gvas(jsonArray, i); - if (string != null) { - array.add(string); - } - } - return (array.toArray(new String[array.size()])); - } else { - String string = gvas(object); - if (string == null) { - return (new String[0]); - } else { - return (new String[]{string}); - } - } - } - - private static String gvas(JSONArray array, int index) { - return (gvas(array.get(index))); - } - - private static String gvas(JSONObject object, String key) { - return (gvas(object.opt(key))); - } - - private static String gvas(Object object) { - if (object instanceof Boolean || object instanceof Number || object instanceof String) { - return (object.toString()); - } - return (null); - } - - /** - * Get the raw node configuration entries. - */ - public NodeConfig.ProvNode[] getNodes() { - return (pn); - } - - /** - * Get the raw parameter configuration entries. - */ - public NodeConfig.ProvParam[] getParams() { - return (pp); - } - - /** - * Ge the raw feed configuration entries. - */ - public NodeConfig.ProvFeed[] getFeeds() { - return (pf); - } - - /** - * Get the raw feed user configuration entries. - */ - public NodeConfig.ProvFeedUser[] getFeedUsers() { - return (pfu); - } - - /** - * Get the raw feed subnet configuration entries. - */ - public NodeConfig.ProvFeedSubnet[] getFeedSubnets() { - return (pfsn); - } - - /** - * Get the raw subscription entries. - */ - public NodeConfig.ProvSubscription[] getSubscriptions() { - return (ps); - } - - /** - * Get the raw forced ingress entries. - */ - public NodeConfig.ProvForceIngress[] getForceIngress() { - return (pfi); - } - - /** - * Get the raw forced egress entries. - */ - public NodeConfig.ProvForceEgress[] getForceEgress() { - return (pfe); - } - - /** - * Get the raw next hop entries. - */ - public NodeConfig.ProvHop[] getHops() { - return (ph); - } - - @Nullable - private String getFeedStatus(JSONObject jfeed) { - String stat = null; - if (jfeed.optBoolean("suspend", false)) { - stat = "Feed is suspended"; - } - if (jfeed.optBoolean("deleted", false)) { - stat = "Feed is deleted"; - } - return stat; - } - - private void addJSONFeeds(ArrayList pfv, ArrayList pfuv, ArrayList pfsnv, - JSONObject jcfg) { - JSONArray jfeeds = jcfg.optJSONArray("feeds"); - if (jfeeds != null) { - for (int fx = 0; fx < jfeeds.length(); fx++) { - addJSONFeed(pfv, pfuv, pfsnv, jfeeds, fx); - } - } - } - - private void addJSONFeed(ArrayList pfv, ArrayList pfuv, ArrayList pfsnv, - JSONArray jfeeds, int fx) { - JSONObject jfeed = jfeeds.getJSONObject(fx); - String stat = getFeedStatus(jfeed); - String fid = gvas(jfeed, FEED_ID); - String fname = gvas(jfeed, "name"); - String fver = gvas(jfeed, "version"); - String createdDate = gvas(jfeed, "created_date"); - /* - * START - AAF changes - * TDP EPIC US# 307413 - * Passing aafInstance to ProvFeed from feeds json passed by prov to identify legacy/AAF feeds - */ - String aafInstance = gvas(jfeed, "aaf_instance"); - pfv.add(new ProvFeed(fid, fname + "//" + fver, stat, createdDate, aafInstance)); - /* - * END - AAF changes - */ - addJSONFeedAuthArrays(pfuv, pfsnv, jfeed, fid); - } - - private void addJSONFeedAuthArrays(ArrayList pfuv, ArrayList pfsnv, JSONObject jfeed, - String fid) { - JSONObject jauth = jfeed.optJSONObject("authorization"); - if (jauth == null) { - return; - } - JSONArray jeids = jauth.optJSONArray("endpoint_ids"); - if (jeids != null) { - for (int ux = 0; ux < jeids.length(); ux++) { - JSONObject ju = jeids.getJSONObject(ux); - String login = gvas(ju, "id"); - String password = gvas(ju, "password"); - pfuv.add(new ProvFeedUser(fid, login, NodeUtils.getAuthHdr(login, password))); - } - } - JSONArray jeips = jauth.optJSONArray("endpoint_addrs"); - if (jeips != null) { - for (int ix = 0; ix < jeips.length(); ix++) { - String sn = gvas(jeips, ix); - pfsnv.add(new ProvFeedSubnet(fid, sn)); - } - } - } - - private void addJSONSubs(ArrayList psv, JSONObject jcfg) { - JSONArray jsubs = jcfg.optJSONArray("subscriptions"); - if (jsubs != null) { - for (int sx = 0; sx < jsubs.length(); sx++) { - addJSONSub(psv, jsubs, sx); - } - } - } - - private void addJSONSub(ArrayList psv, JSONArray jsubs, int sx) { - JSONObject jsub = jsubs.getJSONObject(sx); - if (jsub.optBoolean("suspend", false)) { - return; - } - String sid = gvas(jsub, "subid"); - String fid = gvas(jsub, FEED_ID); - JSONObject jdel = jsub.getJSONObject("delivery"); - String delurl = gvas(jdel, "url"); - String id = gvas(jdel, "user"); - String password = gvas(jdel, "password"); - boolean monly = jsub.getBoolean("metadataOnly"); - boolean use100 = jdel.getBoolean("use100"); - boolean privilegedSubscriber = jsub.getBoolean("privilegedSubscriber"); - boolean decompress = jsub.getBoolean("decompress"); - boolean followRedirect = jsub.getBoolean("follow_redirect"); - psv.add(new ProvSubscription(sid, fid, delurl, id, NodeUtils.getAuthHdr(id, password), monly, use100, - privilegedSubscriber, followRedirect, decompress)); - } - - private void addJSONParams(ArrayList pnv, ArrayList ppv, JSONObject jcfg) { - JSONObject jparams = jcfg.optJSONObject("parameters"); - if (jparams != null) { - for (String pname : JSONObject.getNames(jparams)) { - addJSONParam(ppv, jparams, pname); - } - addJSONNodesToParams(pnv, jparams); - } - } - - private void addJSONParam(ArrayList ppv, JSONObject jparams, String pname) { - String pvalue = gvas(jparams, pname); - if (pvalue != null) { - ppv.add(new ProvParam(pname, pvalue)); - } - } - - private void addJSONNodesToParams(ArrayList pnv, JSONObject jparams) { - String sfx = gvas(jparams, "PROV_DOMAIN"); - JSONArray jnodes = jparams.optJSONArray("NODES"); - if (jnodes != null) { - for (int nx = 0; nx < jnodes.length(); nx++) { - String nn = gvas(jnodes, nx); - if (nn == null) { - continue; - } - if (nn.indexOf('.') == -1) { - nn = nn + "." + sfx; - } - pnv.add(new ProvNode(nn)); - } - } - } - - private void addJSONRoutingInformation(ArrayList pfiv, ArrayList pfev, - ArrayList phv, JSONObject jcfg) { - JSONArray jingresses = jcfg.optJSONArray("ingress"); - if (jingresses != null) { - for (int fx = 0; fx < jingresses.length(); fx++) { - addJSONIngressRoute(pfiv, jingresses, fx); - } - } - JSONObject jegresses = jcfg.optJSONObject("egress"); - if (jegresses != null && JSONObject.getNames(jegresses) != null) { - for (String esid : JSONObject.getNames(jegresses)) { - addJSONEgressRoute(pfev, jegresses, esid); - } - } - JSONArray jhops = jcfg.optJSONArray("routing"); - if (jhops != null) { - for (int fx = 0; fx < jhops.length(); fx++) { - addJSONRoutes(phv, jhops, fx); - } - } - } - - private void addJSONIngressRoute(ArrayList pfiv, JSONArray jingresses, int fx) { - JSONObject jingress = jingresses.getJSONObject(fx); - String fid = gvas(jingress, FEED_ID); - String subnet = gvas(jingress, "subnet"); - String user = gvas(jingress, "user"); - if (fid == null || "".equals(fid)) { - return; - } - if ("".equals(subnet)) { - subnet = null; - } - if ("".equals(user)) { - user = null; - } - String[] nodes = gvasa(jingress, "node"); - pfiv.add(new ProvForceIngress(fid, subnet, user, nodes)); - } - - private void addJSONEgressRoute(ArrayList pfev, JSONObject jegresses, String esid) { - String enode = gvas(jegresses, esid); - if (esid != null && enode != null && !"".equals(esid) && !"".equals(enode)) { - pfev.add(new ProvForceEgress(esid, enode)); - } - } - - private void addJSONRoutes(ArrayList phv, JSONArray jhops, int fx) { - JSONObject jhop = jhops.getJSONObject(fx); - String from = gvas(jhop, "from"); - String to = gvas(jhop, "to"); - String via = gvas(jhop, "via"); - if (from == null || to == null || via == null || "".equals(from) || "".equals(to) || "".equals(via)) { - return; - } - phv.add(new ProvHop(from, to, via)); - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PublishId.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PublishId.java deleted file mode 100644 index 1ffc9ec4..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PublishId.java +++ /dev/null @@ -1,56 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.node; - -/** - * Generate publish IDs. - */ -public class PublishId { - - private long nextuid; - private final String myname; - - /** - * Generate publish IDs for the specified name. - * - * @param myname Unique identifier for this publish ID generator (usually fqdn of server) - */ - public PublishId(String myname) { - this.myname = myname; - } - - /** - * Generate a Data Router Publish ID that uniquely identifies the particular invocation of the Publish API for log - * correlation purposes. - */ - public synchronized String next() { - long now = System.currentTimeMillis(); - if (now < nextuid) { - now = nextuid; - } - nextuid = now + 1; - return (now + "." + myname); - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/StatusLog.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/StatusLog.java deleted file mode 100644 index 2e646043..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/StatusLog.java +++ /dev/null @@ -1,288 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - -package org.onap.dmaap.datarouter.node; - -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Logging for data router delivery events (PUB/DEL/EXP). - */ -public class StatusLog { - - private static final String EXCEPTION = "Exception"; - private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(StatusLog.class); - private static StatusLog instance = new StatusLog(); - private SimpleDateFormat filedate = new SimpleDateFormat("-yyyyMMddHHmm"); - - - private String prefix = "logs/events"; - private String suffix = ".log"; - private String plainfile; - private String curfile; - private long nexttime; - private OutputStream os; - private long intvl; - private static NodeConfigManager config = NodeConfigManager.getInstance(); - - private StatusLog() { - } - - /** - * Parse an interval of the form xxhyymzzs and round it to the nearest whole fraction of 24 hours.If no units are - * specified, assume seconds. - */ - public static long parseInterval(String interval, int def) { - try { - Matcher matcher = Pattern.compile("(?:(\\d+)[Hh])?(?:(\\d+)[Mm])?(?:(\\d+)[Ss]?)?").matcher(interval); - if (matcher.matches()) { - int dur = getDur(matcher); - int best = 86400; - int dist = best - dur; - if (dur > best) { - dist = dur - best; - } - best = getBest(dur, best, dist); - def = best * 1000; - } - } catch (Exception e) { - eelfLogger.error(EXCEPTION, e); - } - return (def); - } - - private static int getBest(int dur, int best, int dist) { - int base = 1; - for (int i = 0; i < 8; i++) { - int base2 = base; - base *= 2; - for (int j = 0; j < 4; j++) { - int base3 = base2; - base2 *= 3; - for (int k = 0; k < 3; k++) { - int cur = base3; - base3 *= 5; - int ndist = cur - dur; - if (dur > cur) { - ndist = dur - cur; - } - if (ndist < dist) { - best = cur; - dist = ndist; - } - } - } - } - return best; - } - - private static int getDur(Matcher matcher) { - int dur = 0; - String match = matcher.group(1); - if (match != null) { - dur += 3600 * Integer.parseInt(match); - } - match = matcher.group(2); - if (match != null) { - dur += 60 * Integer.parseInt(match); - } - match = matcher.group(3); - if (match != null) { - dur += Integer.parseInt(match); - } - if (dur < 60) { - dur = 60; - } - return dur; - } - - /** - * Get the name of the current log file. - * - * @return The full path name of the current event log file - */ - public static synchronized String getCurLogFile() { - try { - instance.checkRoll(System.currentTimeMillis()); - } catch (Exception e) { - eelfLogger.error(EXCEPTION, e); - } - return (instance.curfile); - } - - /** - * Log a received publication attempt. - * - * @param pubid The publish ID assigned by the node - * @param feedid The feed id given by the publisher - * @param requrl The URL of the received request - * @param method The method (DELETE or PUT) in the received request - * @param ctype The content type (if method is PUT and clen > 0) - * @param clen The content length (if method is PUT) - * @param srcip The IP address of the publisher - * @param user The identity of the publisher - * @param status The status returned to the publisher - */ - public static void logPub(String pubid, String feedid, String requrl, String method, String ctype, long clen, - String srcip, String user, int status) { - instance.log( - "PUB|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + "|" + srcip - + "|" + user + "|" + status); - eelfLogger.info("PUB|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" - + clen + "|" + srcip + "|" + user + "|" + status); - } - - /** - * Log a data transfer error receiving a publication attempt. - * - * @param pubid The publish ID assigned by the node - * @param feedid The feed id given by the publisher - * @param requrl The URL of the received request - * @param method The method (DELETE or PUT) in the received request - * @param ctype The content type (if method is PUT and clen > 0) - * @param clen The expected content length (if method is PUT) - * @param rcvd The content length received - * @param srcip The IP address of the publisher - * @param user The identity of the publisher - * @param error The error message from the IO exception - */ - public static void logPubFail(String pubid, String feedid, String requrl, String method, String ctype, long clen, - long rcvd, String srcip, String user, String error) { - instance.log("PBF|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + "|" + rcvd - + "|" + srcip + "|" + user + "|" + error); - eelfLogger.info("PBF|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen - + "|" + rcvd + "|" + srcip + "|" + user + "|" + error); - } - - /** - * Log a delivery attempt. - * - * @param pubid The publish ID assigned by the node - * @param feedid The feed ID - * @param subid The (space delimited list of) subscription ID - * @param requrl The URL used in the attempt - * @param method The method (DELETE or PUT) in the attempt - * @param ctype The content type (if method is PUT, not metaonly, and clen > 0) - * @param clen The content length (if PUT and not metaonly) - * @param user The identity given to the subscriber - * @param status The status returned by the subscriber or -1 if an exeception occured trying to connect - * @param xpubid The publish ID returned by the subscriber - */ - public static void logDel(String pubid, String feedid, String subid, String requrl, String method, String ctype, - long clen, String user, int status, String xpubid) { - if (feedid == null) { - return; - } - instance.log( - "DEL|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen - + "|" + user + "|" + status + "|" + xpubid); - eelfLogger.info("DEL|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" - + ctype + "|" + clen + "|" + user + "|" + status + "|" + xpubid); - } - - /** - * Log delivery attempts expired. - * - * @param pubid The publish ID assigned by the node - * @param feedid The feed ID - * @param subid The (space delimited list of) subscription ID - * @param requrl The URL that would be delivered to - * @param method The method (DELETE or PUT) in the request - * @param ctype The content type (if method is PUT, not metaonly, and clen > 0) - * @param clen The content length (if PUT and not metaonly) - * @param reason The reason the attempts were discontinued - * @param attempts The number of attempts made - */ - public static void logExp(String pubid, String feedid, String subid, String requrl, String method, String ctype, - long clen, String reason, int attempts) { - if (feedid == null) { - return; - } - instance.log( - "EXP|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen - + "|" + reason + "|" + attempts); - eelfLogger.info("EXP|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" - + ctype + "|" + clen + "|" + reason + "|" + attempts); - } - - /** - * Log extra statistics about unsuccessful delivery attempts. - * - * @param pubid The publish ID assigned by the node - * @param feedid The feed ID - * @param subid The (space delimited list of) subscription ID - * @param clen The content length - * @param sent The # of bytes sent or -1 if subscriber returned an error instead of 100 Continue, otherwise, the - * number of bytes sent before an error occurred. - */ - public static void logDelExtra(String pubid, String feedid, String subid, long clen, long sent) { - if (feedid == null) { - return; - } - instance.log("DLX|" + pubid + "|" + feedid + "|" + subid + "|" + clen + "|" + sent); - eelfLogger.info("DLX|" + pubid + "|" + feedid + "|" + subid + "|" + clen + "|" + sent); - } - - private synchronized void checkRoll(long now) throws IOException { - if (now >= nexttime) { - if (os != null) { - os.close(); - os = null; - } - intvl = parseInterval(config.getEventLogInterval(), 300000); - prefix = config.getEventLogPrefix(); - suffix = config.getEventLogSuffix(); - nexttime = now - now % intvl + intvl; - curfile = prefix + filedate.format(new Date(nexttime - intvl)) + suffix; - plainfile = prefix + suffix; - notifyAll(); - } - } - - private synchronized void log(String string) { - try { - long now = System.currentTimeMillis(); - checkRoll(now); - if (os == null) { - os = new FileOutputStream(curfile, true); - Files.deleteIfExists(new File(plainfile).toPath()); - Files.createLink(Paths.get(plainfile), Paths.get(curfile)); - } - os.write((NodeUtils.logts(new Date(now)) + '|' + string + '\n').getBytes()); - os.flush(); - } catch (IOException ioe) { - eelfLogger.error("IOException", ioe); - } - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java deleted file mode 100644 index 2f510120..00000000 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java +++ /dev/null @@ -1,74 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START================================================== - * * org.onap.dmaap - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ - - -package org.onap.dmaap.datarouter.node; - -/** - * Compare IP addresses as byte arrays to a subnet specified as a CIDR. - */ -public class SubnetMatcher { - - private byte[] sn; - private int len; - private int mask; - - /** - * Construct a subnet matcher given a CIDR. - * - * @param subnet The CIDR to match - */ - public SubnetMatcher(String subnet) { - int index = subnet.lastIndexOf('/'); - if (index == -1) { - sn = NodeUtils.getInetAddress(subnet); - len = sn.length; - } else { - len = Integer.parseInt(subnet.substring(index + 1)); - sn = NodeUtils.getInetAddress(subnet.substring(0, index)); - mask = ((0xff00) >> (len % 8)) & 0xff; - len /= 8; - } - } - - /** - * Is the IP address in the CIDR. - * - * @param addr the IP address as bytes in network byte order - * @return true if the IP address matches. - */ - public boolean matches(byte[] addr) { - if (addr.length != sn.length) { - return (false); - } - for (int i = 0; i < len; i++) { - if (addr[i] != sn[i]) { - return (false); - } - } - if (mask != 0 && ((addr[len] ^ sn[len]) & mask) != 0) { - return (false); - } - return (true); - } -} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/TaskList.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/TaskList.java index a77277f2..26031854 100644 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/TaskList.java +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/TaskList.java @@ -41,7 +41,7 @@ import java.util.Iterator; class TaskList { private Iterator runlist; - private HashSet tasks = new HashSet<>(); + private final HashSet tasks = new HashSet<>(); private HashSet togo; private HashSet sofar; private HashSet added; diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/NodeConfig.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/NodeConfig.java new file mode 100644 index 00000000..33b7bd0e --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/NodeConfig.java @@ -0,0 +1,959 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.node.config; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import org.jetbrains.annotations.NotNull; +import org.onap.dmaap.datarouter.node.DestInfo; +import org.onap.dmaap.datarouter.node.DestInfoBuilder; +import org.onap.dmaap.datarouter.node.IsFrom; +import org.onap.dmaap.datarouter.node.Target; +import org.onap.dmaap.datarouter.node.utils.NodeUtils; + +/** + * Processed configuration for this node. + * + *

The NodeConfig represents a processed configuration from the Data Router provisioning server. Each time + * configuration data is received from the provisioning server, a new NodeConfig is created and the previous one + * discarded. + */ +public class NodeConfig { + + private static final String PUBLISHER_NOT_PERMITTED = "Publisher not permitted for this feed"; + private static final EELFLogger logger = EELFManager.getInstance().getLogger(NodeConfig.class); + private final HashMap params = new HashMap<>(); + private final HashMap feeds = new HashMap<>(); + private final HashMap nodeinfo = new HashMap<>(); + private final HashMap subinfo = new HashMap<>(); + private final HashMap nodes = new HashMap<>(); + private final HashMap provSubscriptions = new HashMap<>(); + private final String myname; + private String myauth; + private final DestInfo[] alldests; + private int rrcntr; + + /** + * Process the raw provisioning data to configure this node. + * + * @param pd The parsed provisioning data + * @param myname My name as seen by external systems + * @param spooldir The directory where temporary files live + * @param port The port number for URLs + * @param nodeauthkey The keying string used to generate node authentication credentials + */ + public NodeConfig(ProvData pd, String myname, String spooldir, int port, String nodeauthkey) { + this.myname = myname; + for (ProvParam p : pd.getParams()) { + params.put(p.getName(), p.getValue()); + } + ArrayList destInfos = addDestInfoToNodeConfig(pd, myname, spooldir, port, nodeauthkey); + PathFinder pf = new PathFinder(myname, nodeinfo.keySet().toArray(new String[0]), pd.getHops()); + HashMap> rdtab = addSubRedirInfoToNodeConfig(pd); + HashMap> pfutab = addFeedUsersToNodeConfig(pd); + HashMap egrtab = addEgressRoutesToNodeConfig(pd, myname); + HashMap> pfstab = addFeedSubnetToNodeConfig(pd); + HashSet allfeeds = addFeedsToNodeConfig(pd); + HashMap feedTargets = addSubsToNodeConfig(pd, spooldir, destInfos, pf, egrtab, allfeeds); + alldests = destInfos.toArray(new DestInfo[0]); + addFeedTargetsToNodeConfig(pd, rdtab, pfutab, pfstab, feedTargets); + } + + @NotNull + private ArrayList addDestInfoToNodeConfig(ProvData pd, String myname, String spooldir, int port, + String nodeauthkey) { + ArrayList destInfos = new ArrayList<>(); + myauth = NodeUtils.getNodeAuthHdr(myname, nodeauthkey); + for (ProvNode pn : pd.getNodes()) { + String commonName = pn.getCName(); + if (nodeinfo.get(commonName) != null) { + continue; + } + DestInfo di = new DestInfoBuilder().setName("n:" + commonName).setSpool(spooldir + "/n/" + commonName) + .setSubid(null) + .setLogdata("n2n-" + commonName).setUrl("https://" + commonName + ":" + port + "/internal/publish") + .setAuthuser(commonName).setAuthentication(myauth).setMetaonly(false).setUse100(true) + .setPrivilegedSubscriber(false).setFollowRedirects(false).setDecompress(false).createDestInfo(); + (new File(di.getSpool())).mkdirs(); + String auth = NodeUtils.getNodeAuthHdr(commonName, nodeauthkey); + destInfos.add(di); + nodeinfo.put(commonName, di); + nodes.put(auth, new IsFrom(commonName)); + } + return destInfos; + } + + @NotNull + private HashMap> addSubRedirInfoToNodeConfig(ProvData pd) { + HashMap> rdtab = new HashMap<>(); + for (ProvForceIngress pfi : pd.getForceIngress()) { + ArrayList redirections = rdtab.get(pfi.getFeedId()); + if (redirections == null) { + redirections = new ArrayList<>(); + rdtab.put(pfi.getFeedId(), redirections); + } + Redirection redirection = new Redirection(); + if (pfi.getSubnet() != null) { + redirection.snm = new SubnetMatcher(pfi.getSubnet()); + } + redirection.user = pfi.getUser(); + redirection.nodes = pfi.getNodes(); + redirections.add(redirection); + } + return rdtab; + } + + @NotNull + private HashMap> addFeedUsersToNodeConfig(ProvData pd) { + HashMap> pfutab = new HashMap<>(); + for (ProvFeedUser pfu : pd.getFeedUsers()) { + HashMap userInfo = pfutab.get(pfu.getFeedId()); + if (userInfo == null) { + userInfo = new HashMap<>(); + pfutab.put(pfu.getFeedId(), userInfo); + } + userInfo.put(pfu.getCredentials(), pfu.getUser()); + } + return pfutab; + } + + @NotNull + private HashMap addEgressRoutesToNodeConfig(ProvData pd, String myname) { + HashMap egrtab = new HashMap<>(); + for (ProvForceEgress pfe : pd.getForceEgress()) { + if (pfe.getNode().equals(myname) || nodeinfo.get(pfe.getNode()) == null) { + continue; + } + egrtab.put(pfe.getSubId(), pfe.getNode()); + } + return egrtab; + } + + @NotNull + private HashMap> addFeedSubnetToNodeConfig(ProvData pd) { + HashMap> pfstab = new HashMap<>(); + for (ProvFeedSubnet pfs : pd.getFeedSubnets()) { + ArrayList subnetMatchers = pfstab.get(pfs.getFeedId()); + if (subnetMatchers == null) { + subnetMatchers = new ArrayList<>(); + pfstab.put(pfs.getFeedId(), subnetMatchers); + } + subnetMatchers.add(new SubnetMatcher(pfs.getCidr())); + } + return pfstab; + } + + @NotNull + private HashSet addFeedsToNodeConfig(ProvData pd) { + HashSet allfeeds = new HashSet<>(); + for (ProvFeed pfx : pd.getFeeds()) { + if (pfx.getStatus() == null) { + allfeeds.add(pfx.getId()); + } + } + return allfeeds; + } + + @NotNull + private HashMap addSubsToNodeConfig(ProvData pd, String spooldir, + ArrayList destInfos, PathFinder pf, HashMap egrtab, HashSet allfeeds) { + HashMap feedTargets = new HashMap<>(); + for (ProvSubscription provSubscription : pd.getSubscriptions()) { + String subId = provSubscription.getSubId(); + String feedId = provSubscription.getFeedId(); + if (isFeedOrSubKnown(allfeeds, subId, feedId)) { + continue; + } + int sididx = 999; + try { + sididx = Integer.parseInt(subId); + sididx -= sididx % 100; + } catch (Exception e) { + logger.error("NODE0517 Exception NodeConfig: " + e); + } + String subscriptionDirectory = sididx + "/" + subId; + DestInfo destinationInfo = new DestInfo("s:" + subId, + spooldir + "/s/" + subscriptionDirectory, provSubscription); + (new File(destinationInfo.getSpool())).mkdirs(); + destInfos.add(destinationInfo); + provSubscriptions.put(subId, provSubscription); + subinfo.put(subId, destinationInfo); + String egr = egrtab.get(subId); + if (egr != null) { + subId = pf.getPath(egr) + subId; + } + StringBuilder sb = feedTargets.get(feedId); + if (sb == null) { + sb = new StringBuilder(); + feedTargets.put(feedId, sb); + } + sb.append(' ').append(subId); + } + return feedTargets; + } + + private void addFeedTargetsToNodeConfig(ProvData pd, HashMap> rdtab, + HashMap> pfutab, HashMap> pfstab, + HashMap feedTargets) { + for (ProvFeed pfx : pd.getFeeds()) { + String fid = pfx.getId(); + Feed feed = feeds.get(fid); + if (feed != null) { + continue; + } + feed = new Feed(); + feeds.put(fid, feed); + feed.createdDate = pfx.getCreatedDate(); + feed.loginfo = pfx.getLogData(); + feed.status = pfx.getStatus(); + ArrayList v1 = pfstab.get(fid); + if (v1 == null) { + feed.subnets = new SubnetMatcher[0]; + } else { + feed.subnets = v1.toArray(new SubnetMatcher[0]); + } + HashMap h1 = pfutab.get(fid); + if (h1 == null) { + h1 = new HashMap(); + } + feed.authusers = h1; + ArrayList v2 = rdtab.get(fid); + if (v2 == null) { + feed.redirections = new Redirection[0]; + } else { + feed.redirections = v2.toArray(new Redirection[0]); + } + StringBuilder sb = feedTargets.get(fid); + if (sb == null) { + feed.targets = new Target[0]; + } else { + feed.targets = parseRouting(sb.toString()); + } + } + } + + /** + * Parse a target string into an array of targets. + * + * @param routing Target string + * @return Array of targets. + */ + public Target[] parseRouting(String routing) { + routing = routing.trim(); + if ("".equals(routing)) { + return (new Target[0]); + } + String[] routingTable = routing.split("\\s+"); + HashMap tmap = new HashMap<>(); + HashSet subset = new HashSet<>(); + ArrayList targets = new ArrayList<>(); + for (int i = 0; i < routingTable.length; i++) { + String target = routingTable[i]; + int index = target.indexOf('/'); + if (index == -1) { + addTarget(subset, targets, target); + } else { + addTargetWithRouting(tmap, targets, target, index); + } + } + return (targets.toArray(new Target[0])); + } + + /** + * Check whether this is a valid node-to-node transfer. + * + * @param credentials Credentials offered by the supposed node + * @param ip IP address the request came from + */ + public boolean isAnotherNode(String credentials, String ip) { + IsFrom node = nodes.get(credentials); + return (node != null && node.isFrom(ip)); + } + + /** + * Check whether publication is allowed. + * + * @param feedid The ID of the feed being requested. + * @param credentials The offered credentials + * @param ip The requesting IP address + */ + public String isPublishPermitted(String feedid, String credentials, String ip) { + Feed feed = feeds.get(feedid); + String nf = "Feed does not exist"; + if (feed != null) { + nf = feed.status; + } + if (nf != null) { + return (nf); + } + String user = feed.authusers.get(credentials); + if (user == null) { + return (PUBLISHER_NOT_PERMITTED); + } + if (feed.subnets.length == 0) { + return (null); + } + byte[] addr = NodeUtils.getInetAddress(ip); + for (SubnetMatcher snm : feed.subnets) { + if (snm.matches(addr)) { + return (null); + } + } + return (PUBLISHER_NOT_PERMITTED); + } + + /** + * Check whether delete file is allowed. + * + * @param subId The ID of the subscription being requested. + */ + public boolean isDeletePermitted(String subId) { + ProvSubscription provSubscription = provSubscriptions.get(subId); + return provSubscription.isPrivilegedSubscriber(); + } + + /** + * Get authenticated user. + */ + public String getAuthUser(String feedid, String credentials) { + return (feeds.get(feedid).authusers.get(credentials)); + } + + /** + * Check if the request should be redirected to a different ingress node. + */ + public String getIngressNode(String feedid, String user, String ip) { + Feed feed = feeds.get(feedid); + if (feed.redirections.length == 0) { + return (null); + } + byte[] addr = NodeUtils.getInetAddress(ip); + for (Redirection r : feed.redirections) { + if ((r.user != null && !user.equals(r.user)) || (r.snm != null && !r.snm.matches(addr))) { + continue; + } + for (String n : r.nodes) { + if (myname.equals(n)) { + return (null); + } + } + if (r.nodes.length == 0) { + return (null); + } + return (r.nodes[rrcntr++ % r.nodes.length]); + } + return (null); + } + + /** + * Get a provisioned configuration parameter. + */ + public String getProvParam(String name) { + return (params.get(name)); + } + + /** + * Get all the DestInfos. + */ + public DestInfo[] getAllDests() { + return (alldests); + } + + /** + * Get the targets for a feed. + * + * @param feedid The feed ID + * @return The targets this feed should be delivered to + */ + public Target[] getTargets(String feedid) { + if (feedid == null) { + return (new Target[0]); + } + Feed feed = feeds.get(feedid); + if (feed == null) { + return (new Target[0]); + } + return (feed.targets); + } + + /** + * Get the creation date for a feed. + * + * @param feedid The feed ID + * @return the timestamp of creation date of feed id passed + */ + public String getCreatedDate(String feedid) { + Feed feed = feeds.get(feedid); + return (feed.createdDate); + } + + /** + * Get the feed ID for a subscription. + * + * @param subid The subscription ID + * @return The feed ID + */ + public String getFeedId(String subid) { + DestInfo di = subinfo.get(subid); + if (di == null) { + return (null); + } + return (di.getLogData()); + } + + /** + * Get the spool directory for a subscription. + * + * @param subid The subscription ID + * @return The spool directory + */ + public String getSpoolDir(String subid) { + DestInfo di = subinfo.get(subid); + if (di == null) { + return (null); + } + return (di.getSpool()); + } + + /** + * Get the Authorization value this node uses. + * + * @return The Authorization header value for this node + */ + public String getMyAuth() { + return (myauth); + } + + private boolean isFeedOrSubKnown(HashSet allfeeds, String subId, String feedId) { + return !allfeeds.contains(feedId) || subinfo.get(subId) != null; + } + + private void addTargetWithRouting(HashMap tmap, ArrayList targets, String target, + int index) { + String node = target.substring(0, index); + String rtg = target.substring(index + 1); + DestInfo di = nodeinfo.get(node); + if (di == null) { + targets.add(new Target(null, target)); + } else { + Target tt = tmap.get(node); + if (tt == null) { + tt = new Target(di, rtg); + tmap.put(node, tt); + targets.add(tt); + } else { + tt.addRouting(rtg); + } + } + } + + private void addTarget(HashSet subset, ArrayList targets, String target) { + DestInfo destInfo = subinfo.get(target); + if (destInfo == null) { + targets.add(new Target(null, target)); + } else { + if (!subset.contains(target)) { + subset.add(target); + targets.add(new Target(destInfo, null)); + } + } + } + + /** + * Raw configuration entry for a data router node. + */ + public static class ProvNode { + + private String cname; + + /** + * Construct a node configuration entry. + * + * @param cname The cname of the node. + */ + public ProvNode(String cname) { + this.cname = cname; + } + + /** + * Get the cname of the node. + */ + public String getCName() { + return (cname); + } + } + + /** + * Raw configuration entry for a provisioning parameter. + */ + public static class ProvParam { + + private String name; + private String value; + + /** + * Construct a provisioning parameter configuration entry. + * + * @param name The name of the parameter. + * @param value The value of the parameter. + */ + public ProvParam(String name, String value) { + this.name = name; + this.value = value; + } + + /** + * Get the name of the parameter. + */ + public String getName() { + return (name); + } + + /** + * Get the value of the parameter. + */ + public String getValue() { + return (value); + } + } + + /** + * Raw configuration entry for a data feed. + */ + public static class ProvFeed { + + private final String id; + private final String logdata; + private final String status; + private final String createdDate; + + /** + * Construct a feed configuration entry. + * + * @param id The feed ID of the entry. + * @param logdata String for log entries about the entry. + * @param status The reason why this feed cannot be used (Feed has been deleted, Feed has been suspended) or + * null if it is valid. + */ + public ProvFeed(String id, String logdata, String status, String createdDate) { + this.id = id; + this.logdata = logdata; + this.status = status; + this.createdDate = createdDate; + } + + /** + * Get the created date of the data feed. + */ + public String getCreatedDate() { + return (createdDate); + } + + /** + * Get the feed id of the data feed. + */ + public String getId() { + return (id); + } + + /** + * Get the log data of the data feed. + */ + public String getLogData() { + return (logdata); + } + + /** + * Get the status of the data feed. + */ + public String getStatus() { + return (status); + } + } + + /** + * Raw configuration entry for a feed user. + */ + public static class ProvFeedUser { + + private final String feedid; + private final String user; + private final String credentials; + + /** + * Construct a feed user configuration entry. + * + * @param feedid The feed id. + * @param user The user that will publish to the feed. + * @param credentials The Authorization header the user will use to publish. + */ + public ProvFeedUser(String feedid, String user, String credentials) { + this.feedid = feedid; + this.user = user; + this.credentials = credentials; + } + + /** + * Get the feed id of the feed user. + */ + public String getFeedId() { + return (feedid); + } + + /** + * Get the user for the feed user. + */ + public String getUser() { + return (user); + } + + /** + * Get the credentials for the feed user. + */ + public String getCredentials() { + return (credentials); + } + } + + /** + * Raw configuration entry for a feed subnet. + */ + public static class ProvFeedSubnet { + + private final String feedid; + private final String cidr; + + /** + * Construct a feed subnet configuration entry. + * + * @param feedid The feed ID + * @param cidr The CIDR allowed to publish to the feed. + */ + public ProvFeedSubnet(String feedid, String cidr) { + this.feedid = feedid; + this.cidr = cidr; + } + + /** + * Get the feed id of the feed subnet. + */ + public String getFeedId() { + return (feedid); + } + + /** + * Get the CIDR of the feed subnet. + */ + public String getCidr() { + return (cidr); + } + } + + /** + * Raw configuration entry for a subscription. + */ + public static class ProvSubscription { + + private final String subid; + private final String feedid; + private final String url; + private final String authuser; + private final String credentials; + private final boolean metaonly; + private final boolean use100; + private final boolean privilegedSubscriber; + private final boolean followRedirect; + private final boolean decompress; + + /** + * Construct a subscription configuration entry. + * + * @param subid The subscription ID + * @param feedid The feed ID + * @param url The base delivery URL (not including the fileid) + * @param authuser The user in the credentials used to deliver + * @param credentials The credentials used to authenticate to the delivery URL exactly as they go in the + * Authorization header. + * @param metaonly Is this a meta data only subscription? + * @param use100 Should we send Expect: 100-continue? + * @param privilegedSubscriber Can we wait to receive a delete file call before deleting file + * @param followRedirect Is follow redirect of destination enabled? + * @param decompress To see if they want their information compressed or decompressed + */ + public ProvSubscription(String subid, String feedid, String url, String authuser, String credentials, + boolean metaonly, boolean use100, boolean privilegedSubscriber, boolean followRedirect, + boolean decompress) { + this.subid = subid; + this.feedid = feedid; + this.url = url; + this.authuser = authuser; + this.credentials = credentials; + this.metaonly = metaonly; + this.use100 = use100; + this.privilegedSubscriber = privilegedSubscriber; + this.followRedirect = followRedirect; + this.decompress = decompress; + } + + /** + * Get the subscription ID. + */ + public String getSubId() { + return (subid); + } + + /** + * Get the feed ID. + */ + public String getFeedId() { + return (feedid); + } + + /** + * Get the delivery URL. + */ + public String getURL() { + return (url); + } + + /** + * Get the user. + */ + public String getAuthUser() { + return (authuser); + } + + /** + * Get the delivery credentials. + */ + public String getCredentials() { + return (credentials); + } + + /** + * Is this a meta data only subscription. + */ + public boolean isMetaDataOnly() { + return (metaonly); + } + + /** + * Should we send Expect: 100-continue. + */ + public boolean isUsing100() { + return (use100); + } + + /** + * Can we wait to receive a delete file call before deleting file. + */ + public boolean isPrivilegedSubscriber() { + return (privilegedSubscriber); + } + + /** + * Should I decompress the file before sending it on. + */ + public boolean isDecompress() { + return (decompress); + } + + /** + * New field is added - FOLLOW_REDIRECTS feature iTrack:DATARTR-17 - 1706 Get the followRedirect of this + * destination. + */ + public boolean getFollowRedirect() { + return (followRedirect); + } + } + + /** + * Raw configuration entry for controlled ingress to the data router node. + */ + public static class ProvForceIngress { + + private final String feedid; + private final String subnet; + private final String user; + private final String[] nodes; + + /** + * Construct a forced ingress configuration entry. + * + * @param feedid The feed ID that this entry applies to + * @param subnet The CIDR for which publisher IP addresses this entry applies to or "" if it applies to all + * publisher IP addresses + * @param user The publishing user this entry applies to or "" if it applies to all publishing users. + * @param nodes The array of FQDNs of the data router nodes to redirect publication attempts to. + */ + public ProvForceIngress(String feedid, String subnet, String user, String[] nodes) { + this.feedid = feedid; + this.subnet = subnet; + this.user = user; + //Sonar fix + if (nodes == null) { + this.nodes = new String[0]; + } else { + this.nodes = Arrays.copyOf(nodes, nodes.length); + } + } + + /** + * Get the feed ID. + */ + public String getFeedId() { + return (feedid); + } + + /** + * Get the subnet. + */ + public String getSubnet() { + return (subnet); + } + + /** + * Get the user. + */ + public String getUser() { + return (user); + } + + /** + * Get the node. + */ + public String[] getNodes() { + return (nodes); + } + } + + /** + * Raw configuration entry for controlled egress from the data router. + */ + public static class ProvForceEgress { + + private final String subid; + private final String node; + + /** + * Construct a forced egress configuration entry. + * + * @param subid The subscription ID the subscription with forced egress + * @param node The node handling deliveries for this subscription + */ + public ProvForceEgress(String subid, String node) { + this.subid = subid; + this.node = node; + } + + /** + * Get the subscription ID. + */ + public String getSubId() { + return (subid); + } + + /** + * Get the node. + */ + public String getNode() { + return (node); + } + } + + /** + * Raw configuration entry for routing within the data router network. + */ + public static class ProvHop { + + private final String from; + private final String to; + private final String via; + + /** + * Construct a hop entry. + * + * @param from The FQDN of the node with the data to be delivered + * @param to The FQDN of the node that will deliver to the subscriber + * @param via The FQDN of the node where the from node should send the data + */ + public ProvHop(String from, String to, String via) { + this.from = from; + this.to = to; + this.via = via; + } + + /** + * A human readable description of this entry. + */ + public String toString() { + return ("Hop " + from + "->" + to + " via " + via); + } + + /** + * Get the from node. + */ + public String getFrom() { + return (from); + } + + /** + * Get the to node. + */ + public String getTo() { + return (to); + } + + /** + * Get the next intermediate node. + */ + public String getVia() { + return (via); + } + } + + private static class Redirection { + + SubnetMatcher snm; + String user; + String[] nodes; + } + + private static class Feed { + + String loginfo; + String status; + SubnetMatcher[] subnets; + HashMap authusers = new HashMap<>(); + Redirection[] redirections; + Target[] targets; + String createdDate; + } +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/PathFinder.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/PathFinder.java new file mode 100644 index 00000000..b911df2c --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/PathFinder.java @@ -0,0 +1,155 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.node.config; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import org.jetbrains.annotations.Nullable; +import org.onap.dmaap.datarouter.node.config.NodeConfig; +import org.onap.dmaap.datarouter.node.config.NodeConfig.ProvHop; + +/** + * Given a set of node names and next hops, identify and ignore any cycles and figure out the sequence of next hops to + * get from this node to any other node. + */ + +public class PathFinder { + + private final ArrayList errors = new ArrayList<>(); + private final HashMap routes = new HashMap<>(); + + /** + * Find routes from a specified origin to all of the nodes given a set of specified next hops. + * + * @param origin where we start + * @param nodes where we can go + * @param hops detours along the way + */ + public PathFinder(String origin, String[] nodes, NodeConfig.ProvHop[] hops) { + HashSet known = new HashSet<>(); + HashMap> ht = new HashMap<>(); + for (String n : nodes) { + known.add(n); + ht.put(n, new HashMap<>()); + } + for (NodeConfig.ProvHop ph : hops) { + Hop hop = getHop(known, ht, ph); + if (hop == null) { + continue; + } + if (ph.getVia().equals(ph.getTo())) { + errors.add(ph + " gives destination as via"); + hop.bad = true; + } + } + for (String n : known) { + if (n.equals(origin)) { + routes.put(n, ""); + } + routes.put(n, plot(origin, n, ht.get(n)) + "/"); + } + } + + /** + * Get list of errors encountered while finding paths. + * + * @return array of error descriptions + */ + public String[] getErrors() { + return (errors.toArray(new String[0])); + } + + /** + * Get the route from this node to the specified node. + * + * @param destination node + * @return list of node names separated by and ending with "/" + */ + public String getPath(String destination) { + String ret = routes.get(destination); + if (ret == null) { + return (""); + } + return (ret); + } + + private String plot(String from, String to, HashMap info) { + Hop nh = info.get(from); + if (nh == null || nh.bad) { + return (to); + } + if (nh.mark) { + while (!nh.bad) { + nh.bad = true; + errors.add(nh.basis + " is part of a cycle"); + nh = info.get(nh.basis.getVia()); + } + return (to); + } + nh.mark = true; + String route = plot(nh.basis.getVia(), to, info); + nh.mark = false; + if (nh.bad) { + return (to); + } + return (nh.basis.getVia() + "/" + route); + } + + @Nullable + private Hop getHop(HashSet known, HashMap> ht, ProvHop ph) { + if (!known.contains(ph.getFrom())) { + errors.add(ph + " references unknown from node"); + return null; + } + if (!known.contains(ph.getTo())) { + errors.add(ph + " references unknown destination node"); + return null; + } + HashMap ht2 = ht.get(ph.getTo()); + Hop hop = ht2.get(ph.getFrom()); + if (hop != null) { + hop.bad = true; + errors.add(ph + " gives duplicate next hop - previous via was " + hop.basis.getVia()); + return null; + } + hop = new Hop(); + hop.basis = ph; + ht2.put(ph.getFrom(), hop); + if (!known.contains(ph.getVia())) { + errors.add(ph + " references unknown via node"); + hop.bad = true; + return null; + } + return hop; + } + + private static class Hop { + + boolean mark; + boolean bad; + NodeConfig.ProvHop basis; + } +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/ProvData.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/ProvData.java new file mode 100644 index 00000000..1c584fd7 --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/ProvData.java @@ -0,0 +1,397 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.node.config; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.IOException; +import java.io.Reader; +import java.util.ArrayList; +import org.jetbrains.annotations.Nullable; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.json.JSONTokener; +import org.onap.dmaap.datarouter.node.config.NodeConfig.ProvFeed; +import org.onap.dmaap.datarouter.node.config.NodeConfig.ProvFeedSubnet; +import org.onap.dmaap.datarouter.node.config.NodeConfig.ProvFeedUser; +import org.onap.dmaap.datarouter.node.config.NodeConfig.ProvForceEgress; +import org.onap.dmaap.datarouter.node.config.NodeConfig.ProvForceIngress; +import org.onap.dmaap.datarouter.node.config.NodeConfig.ProvHop; +import org.onap.dmaap.datarouter.node.config.NodeConfig.ProvNode; +import org.onap.dmaap.datarouter.node.config.NodeConfig.ProvParam; +import org.onap.dmaap.datarouter.node.config.NodeConfig.ProvSubscription; +import org.onap.dmaap.datarouter.node.eelf.EelfMsgs; +import org.onap.dmaap.datarouter.node.utils.NodeUtils; + +/** + * Parser for provisioning data from the provisioning server. + * + *

The ProvData class uses a Reader for the text configuration from the provisioning server to construct arrays of + * raw configuration entries. + */ +public class ProvData { + + private static final String FEED_ID = "feedid"; + + private static final EELFLogger eelfLogger = EELFManager.getInstance().getLogger(ProvData.class); + private final NodeConfig.ProvNode[] provNodes; + private final NodeConfig.ProvParam[] provParams; + private final NodeConfig.ProvFeed[] provFeeds; + private final NodeConfig.ProvFeedUser[] provFeedUsers; + private final NodeConfig.ProvFeedSubnet[] provFeedSubnets; + private final NodeConfig.ProvSubscription[] provSubscriptions; + private final NodeConfig.ProvForceIngress[] provForceIngresses; + private final NodeConfig.ProvForceEgress[] provForceEgresses; + private final NodeConfig.ProvHop[] provHops; + + /** + * Construct raw provisioing data entries from the text (JSON) provisioning document received from the provisioning + * server. + * + * @param reader The reader for the JSON text. + */ + public ProvData(Reader reader) throws IOException { + ArrayList provNodeArrayList = new ArrayList<>(); + ArrayList provParamArrayList = new ArrayList<>(); + ArrayList provFeedArrayList = new ArrayList<>(); + ArrayList provFeedUserArrayList = new ArrayList<>(); + ArrayList provFeedSubnetArrayList = new ArrayList<>(); + ArrayList provSubscriptionArrayList = new ArrayList<>(); + ArrayList provForceIngressArrayList = new ArrayList<>(); + ArrayList provForceEgressArrayList = new ArrayList<>(); + ArrayList provHopArrayList = new ArrayList<>(); + try { + JSONTokener jsonTokener = new JSONTokener(reader); + JSONObject jsonObject = new JSONObject(jsonTokener); + char nextCleanChar = jsonTokener.nextClean(); + if (nextCleanChar != '\0') { + throw new JSONException("Spurious characters following configuration"); + } + reader.close(); + addJSONFeeds(provFeedArrayList, provFeedUserArrayList, provFeedSubnetArrayList, jsonObject); + addJSONSubs(provSubscriptionArrayList, jsonObject); + addJSONParams(provNodeArrayList, provParamArrayList, jsonObject); + addJSONRoutingInformation(provForceIngressArrayList, provForceEgressArrayList, provHopArrayList, jsonObject); + } catch (JSONException jse) { + NodeUtils.setIpAndFqdnForEelf("ProvData"); + eelfLogger.error(EelfMsgs.MESSAGE_PARSING_ERROR, jse.toString()); + eelfLogger + .error("NODE0201 Error parsing configuration data from provisioning server " + jse.toString(), jse); + throw new IOException(jse.toString(), jse); + } + provNodes = provNodeArrayList.toArray(new ProvNode[0]); + provParams = provParamArrayList.toArray(new ProvParam[0]); + provFeeds = provFeedArrayList.toArray(new ProvFeed[0]); + provFeedUsers = provFeedUserArrayList.toArray(new ProvFeedUser[0]); + provFeedSubnets = provFeedSubnetArrayList.toArray(new ProvFeedSubnet[0]); + provSubscriptions = provSubscriptionArrayList.toArray(new ProvSubscription[0]); + provForceIngresses = provForceIngressArrayList.toArray(new ProvForceIngress[0]); + provForceEgresses = provForceEgressArrayList.toArray(new ProvForceEgress[0]); + provHops = provHopArrayList.toArray(new ProvHop[0]); + } + + private static String[] gvasa(JSONObject object, String key) { + return (gvasa(object.opt(key))); + } + + private static String[] gvasa(Object object) { + if (object instanceof JSONArray) { + JSONArray jsonArray = (JSONArray) object; + ArrayList array = new ArrayList<>(); + for (int i = 0; i < jsonArray.length(); i++) { + String string = gvas(jsonArray, i); + if (string != null) { + array.add(string); + } + } + return (array.toArray(new String[array.size()])); + } else { + String string = gvas(object); + if (string == null) { + return (new String[0]); + } else { + return (new String[]{string}); + } + } + } + + private static String gvas(JSONArray array, int index) { + return (gvas(array.get(index))); + } + + private static String gvas(JSONObject object, String key) { + return (gvas(object.opt(key))); + } + + private static String gvas(Object object) { + if (object instanceof Boolean || object instanceof Number || object instanceof String) { + return (object.toString()); + } + return (null); + } + + /** + * Get the raw node configuration entries. + */ + public NodeConfig.ProvNode[] getNodes() { + return (provNodes); + } + + /** + * Get the raw parameter configuration entries. + */ + public NodeConfig.ProvParam[] getParams() { + return (provParams); + } + + /** + * Ge the raw feed configuration entries. + */ + public NodeConfig.ProvFeed[] getFeeds() { + return (provFeeds); + } + + /** + * Get the raw feed user configuration entries. + */ + public NodeConfig.ProvFeedUser[] getFeedUsers() { + return (provFeedUsers); + } + + /** + * Get the raw feed subnet configuration entries. + */ + public NodeConfig.ProvFeedSubnet[] getFeedSubnets() { + return (provFeedSubnets); + } + + /** + * Get the raw subscription entries. + */ + public NodeConfig.ProvSubscription[] getSubscriptions() { + return (provSubscriptions); + } + + /** + * Get the raw forced ingress entries. + */ + public NodeConfig.ProvForceIngress[] getForceIngress() { + return (provForceIngresses); + } + + /** + * Get the raw forced egress entries. + */ + public NodeConfig.ProvForceEgress[] getForceEgress() { + return (provForceEgresses); + } + + /** + * Get the raw next hop entries. + */ + public NodeConfig.ProvHop[] getHops() { + return (provHops); + } + + @Nullable + private String getFeedStatus(JSONObject jfeed) { + String stat = null; + if (jfeed.optBoolean("suspend", false)) { + stat = "Feed is suspended"; + } + if (jfeed.optBoolean("deleted", false)) { + stat = "Feed is deleted"; + } + return stat; + } + + private void addJSONFeeds(ArrayList pfv, ArrayList pfuv, ArrayList pfsnv, + JSONObject jcfg) { + JSONArray jfeeds = jcfg.optJSONArray("feeds"); + if (jfeeds != null) { + for (int fx = 0; fx < jfeeds.length(); fx++) { + addJSONFeed(pfv, pfuv, pfsnv, jfeeds, fx); + } + } + } + + private void addJSONFeed(ArrayList pfv, ArrayList pfuv, ArrayList pfsnv, + JSONArray jfeeds, int fx) { + JSONObject jfeed = jfeeds.getJSONObject(fx); + String stat = getFeedStatus(jfeed); + String fid = gvas(jfeed, FEED_ID); + String fname = gvas(jfeed, "name"); + String fver = gvas(jfeed, "version"); + String createdDate = gvas(jfeed, "created_date"); + pfv.add(new ProvFeed(fid, fname + "//" + fver, stat, createdDate)); + addJSONFeedAuthArrays(pfuv, pfsnv, jfeed, fid); + } + + private void addJSONFeedAuthArrays(ArrayList pfuv, ArrayList pfsnv, JSONObject jfeed, + String fid) { + JSONObject jauth = jfeed.optJSONObject("authorization"); + if (jauth == null) { + return; + } + JSONArray jeids = jauth.optJSONArray("endpoint_ids"); + if (jeids != null) { + for (int ux = 0; ux < jeids.length(); ux++) { + JSONObject ju = jeids.getJSONObject(ux); + String login = gvas(ju, "id"); + String password = gvas(ju, "password"); + pfuv.add(new ProvFeedUser(fid, login, NodeUtils.getAuthHdr(login, password))); + } + } + JSONArray jeips = jauth.optJSONArray("endpoint_addrs"); + if (jeips != null) { + for (int ix = 0; ix < jeips.length(); ix++) { + String sn = gvas(jeips, ix); + pfsnv.add(new ProvFeedSubnet(fid, sn)); + } + } + } + + private void addJSONSubs(ArrayList psv, JSONObject jcfg) { + JSONArray jsubs = jcfg.optJSONArray("subscriptions"); + if (jsubs != null) { + for (int sx = 0; sx < jsubs.length(); sx++) { + addJSONSub(psv, jsubs, sx); + } + } + } + + private void addJSONSub(ArrayList psv, JSONArray jsubs, int sx) { + JSONObject jsub = jsubs.getJSONObject(sx); + if (jsub.optBoolean("suspend", false)) { + return; + } + String sid = gvas(jsub, "subid"); + String fid = gvas(jsub, FEED_ID); + JSONObject jdel = jsub.getJSONObject("delivery"); + String delurl = gvas(jdel, "url"); + String id = gvas(jdel, "user"); + String password = gvas(jdel, "password"); + boolean monly = jsub.getBoolean("metadataOnly"); + boolean use100 = jdel.getBoolean("use100"); + boolean privilegedSubscriber = jsub.getBoolean("privilegedSubscriber"); + boolean decompress = jsub.getBoolean("decompress"); + boolean followRedirect = jsub.getBoolean("follow_redirect"); + psv.add(new ProvSubscription(sid, fid, delurl, id, NodeUtils.getAuthHdr(id, password), monly, use100, + privilegedSubscriber, followRedirect, decompress)); + } + + private void addJSONParams(ArrayList pnv, ArrayList ppv, JSONObject jcfg) { + JSONObject jparams = jcfg.optJSONObject("parameters"); + if (jparams != null) { + for (String pname : JSONObject.getNames(jparams)) { + addJSONParam(ppv, jparams, pname); + } + addJSONNodesToParams(pnv, jparams); + } + } + + private void addJSONParam(ArrayList ppv, JSONObject jparams, String pname) { + String pvalue = gvas(jparams, pname); + if (pvalue != null) { + ppv.add(new ProvParam(pname, pvalue)); + } + } + + private void addJSONNodesToParams(ArrayList pnv, JSONObject jparams) { + String sfx = gvas(jparams, "PROV_DOMAIN"); + JSONArray jnodes = jparams.optJSONArray("NODES"); + if (jnodes != null) { + for (int nx = 0; nx < jnodes.length(); nx++) { + String nn = gvas(jnodes, nx); + if (nn == null) { + continue; + } + if (nn.indexOf('.') == -1) { + nn = nn + "." + sfx; + } + pnv.add(new ProvNode(nn)); + } + } + } + + private void addJSONRoutingInformation(ArrayList pfiv, ArrayList pfev, + ArrayList phv, JSONObject jcfg) { + JSONArray jingresses = jcfg.optJSONArray("ingress"); + if (jingresses != null) { + for (int fx = 0; fx < jingresses.length(); fx++) { + addJSONIngressRoute(pfiv, jingresses, fx); + } + } + JSONObject jegresses = jcfg.optJSONObject("egress"); + if (jegresses != null && JSONObject.getNames(jegresses) != null) { + for (String esid : JSONObject.getNames(jegresses)) { + addJSONEgressRoute(pfev, jegresses, esid); + } + } + JSONArray jhops = jcfg.optJSONArray("routing"); + if (jhops != null) { + for (int fx = 0; fx < jhops.length(); fx++) { + addJSONRoutes(phv, jhops, fx); + } + } + } + + private void addJSONIngressRoute(ArrayList pfiv, JSONArray jingresses, int fx) { + JSONObject jingress = jingresses.getJSONObject(fx); + String fid = gvas(jingress, FEED_ID); + String subnet = gvas(jingress, "subnet"); + String user = gvas(jingress, "user"); + if (fid == null || "".equals(fid)) { + return; + } + if ("".equals(subnet)) { + subnet = null; + } + if ("".equals(user)) { + user = null; + } + String[] nodes = gvasa(jingress, "node"); + pfiv.add(new ProvForceIngress(fid, subnet, user, nodes)); + } + + private void addJSONEgressRoute(ArrayList pfev, JSONObject jegresses, String esid) { + String enode = gvas(jegresses, esid); + if (esid != null && enode != null && !"".equals(esid) && !"".equals(enode)) { + pfev.add(new ProvForceEgress(esid, enode)); + } + } + + private void addJSONRoutes(ArrayList phv, JSONArray jhops, int fx) { + JSONObject jhop = jhops.getJSONObject(fx); + String from = gvas(jhop, "from"); + String to = gvas(jhop, "to"); + String via = gvas(jhop, "via"); + if (from == null || to == null || via == null || "".equals(from) || "".equals(to) || "".equals(via)) { + return; + } + phv.add(new ProvHop(from, to, via)); + } +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/SubnetMatcher.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/SubnetMatcher.java new file mode 100644 index 00000000..602c283e --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/config/SubnetMatcher.java @@ -0,0 +1,76 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.node.config; + +import org.onap.dmaap.datarouter.node.utils.NodeUtils; + +/** + * Compare IP addresses as byte arrays to a subnet specified as a CIDR. + */ +public class SubnetMatcher { + + private byte[] sn; + private int len; + private int mask; + + /** + * Construct a subnet matcher given a CIDR. + * + * @param subnet The CIDR to match + */ + public SubnetMatcher(String subnet) { + int index = subnet.lastIndexOf('/'); + if (index == -1) { + sn = NodeUtils.getInetAddress(subnet); + len = sn.length; + } else { + len = Integer.parseInt(subnet.substring(index + 1)); + sn = NodeUtils.getInetAddress(subnet.substring(0, index)); + mask = ((0xff00) >> (len % 8)) & 0xff; + len /= 8; + } + } + + /** + * Is the IP address in the CIDR. + * + * @param addr the IP address as bytes in network byte order + * @return true if the IP address matches. + */ + public boolean matches(byte[] addr) { + if (addr.length != sn.length) { + return (false); + } + for (int i = 0; i < len; i++) { + if (addr[i] != sn[i]) { + return (false); + } + } + if (mask != 0 && ((addr[len] ^ sn[len]) & mask) != 0) { + return (false); + } + return (true); + } +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/Delivery.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/Delivery.java new file mode 100644 index 00000000..5164e612 --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/Delivery.java @@ -0,0 +1,326 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + +package org.onap.dmaap.datarouter.node.delivery; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Objects; +import org.onap.dmaap.datarouter.node.DestInfo; +import org.onap.dmaap.datarouter.node.NodeConfigManager; + +/** + * Main control point for delivering files to destinations. + * + *

The Delivery class manages assignment of delivery threads to delivery queues and creation and destruction of + * delivery queues as configuration changes. DeliveryQueues are assigned threads based on a modified round-robin + * approach giving priority to queues with more work as measured by both bytes to deliver and files to deliver and lower + * priority to queues that already have delivery threads working. A delivery thread continues to work for a delivery + * queue as long as that queue has more files to deliver. + */ +public class Delivery { + + private static final String TOTAL = " total="; + private static final String YELLOW = " yellow="; + private static EELFLogger logger = EELFManager.getInstance().getLogger(Delivery.class); + private double fdstart; + private double fdstop; + private int threads; + private int curthreads; + private NodeConfigManager config; + private HashMap dqs = new HashMap<>(); + private DeliveryQueue[] queues = new DeliveryQueue[0]; + private int qpos = 0; + private long nextcheck; + + /** + * Constructs a new Delivery system using the specified configuration manager. + * + * @param config The configuration manager for this delivery system. + */ + public Delivery(NodeConfigManager config) { + this.config = config; + Runnable cmon = this::checkconfig; + config.registerConfigTask(cmon); + } + + /** + * Reset the retry timer for a delivery queue. + */ + public synchronized void resetQueue(String spool) { + if (spool != null) { + DeliveryQueue dq = dqs.get(spool); + if (dq != null) { + dq.resetQueue(); + } + } + } + + /** + * Mark the task in spool a success. + */ + public synchronized boolean markTaskSuccess(String spool, String pubId) { + boolean succeeded = false; + if (spool != null) { + DeliveryQueue dq = dqs.get(spool); + if (dq != null) { + succeeded = dq.markTaskSuccess(pubId); + } + } + return succeeded; + } + + private void cleardir(String dir) { + if (dqs.get(dir) != null) { + return; + } + File fdir = new File(dir); + try { + for (File junk : fdir.listFiles()) { + if (junk.isFile()) { + Files.delete(fdir.toPath()); + } + } + Files.delete(fdir.toPath()); + } catch (IOException e) { + logger.error("Failed to delete file: " + fdir.getPath(), e); + } + } + + private void freeDiskCheck() { + File spoolfile = new File(config.getSpoolBase()); + long tspace = spoolfile.getTotalSpace(); + long start = (long) (tspace * fdstart); + long cur = spoolfile.getUsableSpace(); + if (cur >= start) { + return; + } + ArrayList cv = new ArrayList<>(); + for (String sdir : dqs.keySet()) { + for (String meta : (new File(sdir)).list()) { + if (!meta.endsWith(".M") || meta.charAt(0) == '.') { + continue; + } + cv.add(new DelItem(meta.substring(0, meta.length() - 2), sdir)); + } + } + DelItem[] items = cv.toArray(new DelItem[cv.size()]); + Arrays.sort(items); + long stop = (long) (tspace * fdstop); + logger.warn( + "NODE0501 Free disk space below red threshold. current=" + cur + " red=" + start + TOTAL + tspace); + if (determineFreeDiskSpace(spoolfile, tspace, stop, cur, items)) { + return; + } + cur = spoolfile.getUsableSpace(); + if (cur >= stop) { + logger.warn("NODE0503 Free disk space at or above yellow threshold. current=" + cur + YELLOW + stop + + TOTAL + tspace); + return; + } + logger.warn( + "NODE0504 Unable to recover sufficient disk space to reach green status. current=" + cur + YELLOW + + stop + TOTAL + tspace); + } + + private void cleardirs() { + String basedir = config.getSpoolBase(); + String nbase = basedir + "/n"; + for (String nodedir : (new File(nbase)).list()) { + if (!nodedir.startsWith(".")) { + cleardir(nbase + "/" + nodedir); + } + } + String sxbase = basedir + "/s"; + for (String sxdir : (new File(sxbase)).list()) { + if (sxdir.startsWith(".")) { + continue; + } + File sxf = new File(sxbase + File.separator + sxdir); + for (String sdir : sxf.list()) { + if (!sdir.startsWith(".")) { + cleardir(sxbase + "/" + sxdir + "/" + sdir); + } + } + try { + if (sxf.list().length == 0) { + Files.delete(sxf.toPath()); // won't if anything still in it + } + } catch (IOException e) { + logger.error("Failed to delete file: " + sxf.getPath(), e); + } + } + } + + private synchronized void checkconfig() { + if (!config.isConfigured()) { + return; + } + fdstart = config.getFreeDiskStart(); + fdstop = config.getFreeDiskStop(); + threads = config.getDeliveryThreads(); + if (threads < 1) { + threads = 1; + } + DestInfo[] alldis = config.getAllDests(); + DeliveryQueue[] nqs = new DeliveryQueue[alldis.length]; + qpos = 0; + HashMap ndqs = new HashMap<>(); + for (DestInfo di : alldis) { + String spl = di.getSpool(); + DeliveryQueue dq = dqs.get(spl); + if (dq == null) { + dq = new DeliveryQueue(config, di); + } else { + dq.config(di); + } + ndqs.put(spl, dq); + nqs[qpos++] = dq; + } + queues = nqs; + dqs = ndqs; + cleardirs(); + while (curthreads < threads) { + curthreads++; + (new Thread("del-thread-" + curthreads) { + @Override + public void run() { + dodelivery(); + } + }).start(); + } + nextcheck = 0; + notifyAll(); + } + + private void dodelivery() { + DeliveryQueue dq; + while ((dq = getNextQueue()) != null) { + dq.run(); + } + } + + private synchronized DeliveryQueue getNextQueue() { + while (true) { + if (curthreads > threads) { + curthreads--; + return (null); + } + if (qpos < queues.length) { + DeliveryQueue dq = queues[qpos++]; + if (dq.isSkipSet()) { + continue; + } + nextcheck = 0; + notifyAll(); + return (dq); + } + long now = System.currentTimeMillis(); + if (now < nextcheck) { + try { + wait(nextcheck + 500 - now); + } catch (Exception e) { + logger.error("InterruptedException", e); + } + now = System.currentTimeMillis(); + } + if (now >= nextcheck) { + nextcheck = now + 5000; + qpos = 0; + freeDiskCheck(); + } + } + } + + private boolean determineFreeDiskSpace(File spoolfile, long tspace, long stop, long cur, DelItem[] items) { + for (DelItem item : items) { + long amount = dqs.get(item.getSpool()).cancelTask(item.getPublishId()); + logger.debug("NODE0502 Attempting to discard " + item.getSpool() + "/" + item.getPublishId() + + " to free up disk"); + if (amount > 0) { + cur += amount; + if (cur >= stop) { + cur = spoolfile.getUsableSpace(); + } + if (cur >= stop) { + logger.warn( + "NODE0503 Free disk space at or above yellow threshold. current=" + cur + YELLOW + stop + + TOTAL + tspace); + return true; + } + } + } + return false; + } + + public static class DelItem implements Comparable { + + private String pubid; + private String spool; + + public DelItem(String pubid, String spool) { + this.pubid = pubid; + this.spool = spool; + } + + public int compareTo(DelItem other) { + int diff = pubid.compareTo(other.pubid); + if (diff == 0) { + diff = spool.compareTo(other.spool); + } + return (diff); + } + + public String getPublishId() { + return (pubid); + } + + public String getSpool() { + return (spool); + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + if (object == null || getClass() != object.getClass()) { + return false; + } + DelItem delItem = (DelItem) object; + return Objects.equals(pubid, delItem.pubid) + && Objects.equals(getSpool(), delItem.getSpool()); + } + + @Override + public int hashCode() { + return Objects.hash(pubid, getSpool()); + } + } +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryQueue.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryQueue.java new file mode 100644 index 00000000..de1defca --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryQueue.java @@ -0,0 +1,451 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.node.delivery; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import org.jetbrains.annotations.Nullable; +import org.onap.dmaap.datarouter.node.DestInfo; +import org.onap.dmaap.datarouter.node.log.StatusLog; + +/** + * Mechanism for monitoring and controlling delivery of files to a destination. + * + *

The DeliveryQueue class maintains lists of DeliveryTasks for a single + * destination (a subscription or another data router node) and assigns + * delivery threads to try to deliver them. It also maintains a delivery + * status that causes it to back off on delivery attempts after a failure. + * + *

If the most recent delivery result was a failure, then no more attempts + * will be made for a period of time. Initially, and on the first failure + * following a success, this delay will be DeliveryQueueHelper.getInitFailureTimer() (milliseconds). + * If, after this delay, additional failures occur, each failure will + * multiply the delay by DeliveryQueueHelper.getFailureBackoff() up to a + * maximum delay specified by DeliveryQueueHelper.getMaxFailureTimer(). + * Note that this behavior applies to the delivery queue as a whole and not + * to individual files in the queue. If multiple files are being + * delivered and one fails, the delay will be started. If a second + * delivery fails while the delay was active, it will not change the delay + * or change the duration of any subsequent delay. + * If, however, it succeeds, it will cancel the delay. + * The queue maintains 3 collections of files to deliver: A todoList of + * files that will be attempted, a working set of files that are being + * attempted, and a retry set of files that were attempted and failed. + * Whenever the todoList is empty and needs to be refilled, a scan of the + * spool directory is made and the file names sorted. Any files in the working set are ignored. + * If a DeliveryTask for the file is in the retry set, then that delivery + * task is placed on the todoList. Otherwise, a new DeliveryTask for the + * file is created and placed on the todoList. + * If, when a DeliveryTask is about to be removed from the todoList, its + * age exceeds DeliveryQueueHelper.getExpirationTimer(), then it is instead + * marked as expired. + * + *

A delivery queue also maintains a skip flag. This flag is true if the + * failure timer is active or if no files are found in a directory scan. + */ +public class DeliveryQueue implements Runnable, DeliveryTaskHelper { + private static EELFLogger logger = EELFManager.getInstance().getLogger(DeliveryQueue.class); + private DeliveryQueueHelper deliveryQueueHelper; + + private DestInfo destinationInfo; + private HashMap working = new HashMap<>(); + private HashMap retry = new HashMap<>(); + private int todoindex; + private boolean failed; + private long failduration; + private long resumetime; + private File dir; + private List todoList = new ArrayList<>(); + + /** + * Create a delivery queue for a given destination info. + */ + public DeliveryQueue(DeliveryQueueHelper deliveryQueueHelper, DestInfo destinationInfo) { + this.deliveryQueueHelper = deliveryQueueHelper; + this.destinationInfo = destinationInfo; + dir = new File(destinationInfo.getSpool()); + dir.mkdirs(); + } + + /** + * Try to cancel a delivery task. + * + * @return The length of the task in bytes or 0 if the task cannot be cancelled. + */ + public synchronized long cancelTask(String pubid) { + if (working.get(pubid) != null) { + return (0); + } + DeliveryTask dt = retry.get(pubid); + if (dt == null) { + for (int i = todoindex; i < todoList.size(); i++) { + DeliveryTask xdt = todoList.get(i); + if (xdt.getPublishId().equals(pubid)) { + dt = xdt; + break; + } + } + } + if (dt == null) { + dt = new DeliveryTask(this, pubid); + if (dt.getFileId() == null) { + return (0); + } + } + if (dt.isCleaned()) { + return (0); + } + StatusLog.logExp(dt.getPublishId(), dt.getFeedId(), dt.getSubId(), dt.getURL(), + dt.getMethod(), dt.getCType(), dt.getLength(), "diskFull", dt.getAttempts()); + dt.clean(); + return (dt.getLength()); + } + + /** + * Mark that a delivery task has succeeded. + */ + private synchronized void markSuccess(DeliveryTask task) { + working.remove(task.getPublishId()); + logger.info(task.getPublishId() + " marked as success."); + task.clean(); + failed = false; + failduration = 0; + } + + /** + * Mark that a delivery task has expired. + */ + private synchronized void markExpired(DeliveryTask task) { + logger.info(task.getPublishId() + " marked as expired."); + task.clean(); + } + + /** + * Mark that a delivery task has failed permanently. + */ + private synchronized void markFailNoRetry(DeliveryTask task) { + working.remove(task.getPublishId()); + logger.info(task.getPublishId() + " marked as failed permanently"); + task.clean(); + failed = false; + failduration = 0; + } + + private void fdupdate() { + if (!failed) { + failed = true; + if (failduration == 0) { + if (destinationInfo.isPrivilegedSubscriber()) { + failduration = deliveryQueueHelper.getWaitForFileProcessFailureTimer(); + } else { + failduration = deliveryQueueHelper.getInitFailureTimer(); + } + } + resumetime = System.currentTimeMillis() + failduration; + long maxdur = deliveryQueueHelper.getMaxFailureTimer(); + failduration = (long) (failduration * deliveryQueueHelper.getFailureBackoff()); + if (failduration > maxdur) { + failduration = maxdur; + } + } + } + + /** + * Mark that a delivery task has been redirected. + */ + private synchronized void markRedirect(DeliveryTask task) { + working.remove(task.getPublishId()); + logger.info(task.getPublishId() + " marked as redirected."); + retry.put(task.getPublishId(), task); + } + + /** + * Mark that a delivery task has temporarily failed. + */ + private synchronized void markFailWithRetry(DeliveryTask task) { + working.remove(task.getPublishId()); + logger.info(task.getPublishId() + " marked as temporarily failed."); + retry.put(task.getPublishId(), task); + fdupdate(); + } + + /** + * Get the next task. + */ + public synchronized DeliveryTask getNext() { + DeliveryTask ret = peekNext(); + if (ret != null) { + todoindex++; + working.put(ret.getPublishId(), ret); + } + return (ret); + } + + /** + * Peek at the next task. + */ + public synchronized DeliveryTask peekNext() { + long now = System.currentTimeMillis(); + long mindate = now - deliveryQueueHelper.getExpirationTimer(); + if (failed) { + if (now > resumetime) { + failed = false; + } else { + return (null); + } + } + while (true) { + if (todoindex >= todoList.size()) { + todoindex = 0; + todoList = new ArrayList<>(); + String[] files = dir.list(); + if (files != null) { + Arrays.sort(files); + scanForNextTask(files); + } + retry = new HashMap<>(); + } + return getDeliveryTask(mindate); + } + } + + /** + * Update the destination info for this delivery queue. + */ + public void config(DestInfo destinationInfo) { + this.destinationInfo = destinationInfo; + } + + /** + * Get the dest info. + */ + public DestInfo getDestinationInfo() { + return (destinationInfo); + } + + /** + * Get the config manager. + */ + public DeliveryQueueHelper getConfig() { + return (deliveryQueueHelper); + } + + /** + * Exceptional condition occurred during delivery. + */ + public void reportDeliveryExtra(DeliveryTask task, long sent) { + StatusLog.logDelExtra(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getLength(), sent); + } + + /** + * Message too old to deliver. + */ + void reportExpiry(DeliveryTask task) { + StatusLog.logExp(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), + task.getCType(), task.getLength(), "retriesExhausted", task.getAttempts()); + markExpired(task); + } + + /** + * Completed a delivery attempt. + */ + public void reportStatus(DeliveryTask task, int status, String xpubid, String location) { + if (status < 300) { + StatusLog.logDel(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), + task.getCType(), task.getLength(), destinationInfo.getAuthUser(), status, xpubid); + if (destinationInfo.isPrivilegedSubscriber()) { + task.setResumeTime(System.currentTimeMillis() + + deliveryQueueHelper.getWaitForFileProcessFailureTimer()); + markFailWithRetry(task); + } else { + markSuccess(task); + } + } else if (status < 400 && deliveryQueueHelper.isFollowRedirects()) { + StatusLog.logDel(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), + task.getCType(), task.getLength(), destinationInfo.getAuthUser(), status, location); + if (deliveryQueueHelper.handleRedirection(destinationInfo, location, task.getFileId())) { + markRedirect(task); + } else { + StatusLog.logExp(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), + task.getMethod(), task.getCType(), task.getLength(), "notRetryable", task.getAttempts()); + markFailNoRetry(task); + } + } else if (status < 500 && status != 429) { + // Status 429 is the standard response for Too Many Requests and indicates + // that a file needs to be delivered again at a later time. + StatusLog.logDel(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), + task.getCType(), task.getLength(), destinationInfo.getAuthUser(), status, location); + StatusLog.logExp(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), + task.getCType(), task.getLength(), "notRetryable", task.getAttempts()); + markFailNoRetry(task); + } else { + StatusLog.logDel(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), + task.getCType(), task.getLength(), destinationInfo.getAuthUser(), status, location); + markFailWithRetry(task); + } + } + + /** + * Delivery failed by reason of an exception. + */ + public void reportException(DeliveryTask task, Exception exception) { + StatusLog.logDel(task.getPublishId(), task.getFeedId(), task.getSubId(), task.getURL(), task.getMethod(), + task.getCType(), task.getLength(), destinationInfo.getAuthUser(), -1, exception.toString()); + deliveryQueueHelper.handleUnreachable(destinationInfo); + markFailWithRetry(task); + } + + /** + * Get the feed ID for a subscription. + * + * @param subid The subscription ID + * @return The feed ID + */ + public String getFeedId(String subid) { + return (deliveryQueueHelper.getFeedId(subid)); + } + + /** + * Get the URL to deliver a message to given the file ID. + */ + public String getDestURL(String fileid) { + return (deliveryQueueHelper.getDestURL(destinationInfo, fileid)); + } + + /** + * Deliver files until there's a failure or there are no more. + * files to deliver + */ + public void run() { + DeliveryTask task; + long endtime = System.currentTimeMillis() + deliveryQueueHelper.getFairTimeLimit(); + int filestogo = deliveryQueueHelper.getFairFileLimit(); + while ((task = getNext()) != null) { + logger.info("Processing file: " + task.getPublishId()); + task.run(); + if (--filestogo <= 0 || System.currentTimeMillis() > endtime) { + break; + } + } + } + + /** + * Is there no work to do for this queue right now?. + */ + synchronized boolean isSkipSet() { + return (peekNext() == null); + } + + /** + * Reset the retry timer. + */ + public void resetQueue() { + resumetime = System.currentTimeMillis(); + } + + /** + * Get task if in queue and mark as success. + */ + public boolean markTaskSuccess(String pubId) { + DeliveryTask task = working.get(pubId); + if (task != null) { + markSuccess(task); + return true; + } + task = retry.get(pubId); + if (task != null) { + retry.remove(pubId); + task.clean(); + resetQueue(); + failduration = 0; + return true; + } + return false; + } + + private void scanForNextTask(String[] files) { + for (String fname : files) { + String pubId = getPubId(fname); + if (pubId == null) { + continue; + } + DeliveryTask dt = retry.get(pubId); + if (dt == null) { + dt = new DeliveryTask(this, pubId); + } + todoList.add(dt); + } + } + + @Nullable + private DeliveryTask getDeliveryTask(long mindate) { + if (todoindex < todoList.size()) { + DeliveryTask dt = todoList.get(todoindex); + if (dt.isCleaned()) { + todoindex++; + } + if (destinationInfo.isPrivilegedSubscriber() && dt.getResumeTime() > System.currentTimeMillis()) { + retry.put(dt.getPublishId(), dt); + todoindex++; + } + if (dt.getDate() >= mindate) { + return (dt); + } + todoindex++; + reportExpiry(dt); + } + return null; + } + + @Nullable + private String getPubId(String fname) { + if (!fname.endsWith(".M")) { + return null; + } + String fname2 = fname.substring(0, fname.length() - 2); + long pidtime = 0; + int dot = fname2.indexOf('.'); + if (dot < 1) { + return null; + } + try { + pidtime = Long.parseLong(fname2.substring(0, dot)); + } catch (Exception e) { + logger.error("Exception", e); + } + if (pidtime < 1000000000000L) { + return null; + } + if (working.get(fname2) != null) { + return null; + } + return fname2; + } +} \ No newline at end of file diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryQueueHelper.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryQueueHelper.java new file mode 100644 index 00000000..ec69fff1 --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryQueueHelper.java @@ -0,0 +1,110 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.node.delivery; + +import org.onap.dmaap.datarouter.node.DestInfo; + +/** + * Interface to allow independent testing of the DeliveryQueue code + * + *

This interface represents all of the configuration information and + * feedback mechanisms that a delivery queue needs. + */ +public interface DeliveryQueueHelper { + /** + * Get the timeout (milliseconds) before retrying after an initial delivery failure. + */ + long getInitFailureTimer(); + + /** + * Get the timeout before retrying after delivery and wait for file processing. + */ + long getWaitForFileProcessFailureTimer(); + + /** + * Get the ratio between timeouts on consecutive delivery attempts. + */ + double getFailureBackoff(); + + /** + * Get the maximum timeout (milliseconds) between delivery attempts. + */ + long getMaxFailureTimer(); + + /** + * Get the expiration timer (milliseconds) for deliveries. + */ + long getExpirationTimer(); + + /** + * Get the maximum number of file delivery attempts before checking + * if another queue has work to be performed. + */ + int getFairFileLimit(); + + /** + * Get the maximum amount of time spent delivering files before checking if another queue has work to be performed. + */ + long getFairTimeLimit(); + + /** + * Get the URL for delivering a file. + * + * @param destinationInfo The destination information for the file to be delivered. + * @param fileid The file id for the file to be delivered. + * @return The URL for delivering the file (typically, destinationInfo.getURL() + "/" + fileid). + */ + String getDestURL(DestInfo destinationInfo, String fileid); + + /** + * Forget redirections associated with a subscriber. + * + * @param destinationInfo Destination information to forget + */ + void handleUnreachable(DestInfo destinationInfo); + + /** + * Post redirection for a subscriber. + * + * @param destinationInfo Destination information to update + * @param location Location given by subscriber + * @param fileid File ID of request + * @return true if this 3xx response is retryable, otherwise, false. + */ + boolean handleRedirection(DestInfo destinationInfo, String location, String fileid); + + /** + * Should I handle 3xx responses differently than 4xx responses?. + */ + boolean isFollowRedirects(); + + /** + * Get the feed ID for a subscription. + * + * @param subid The subscription ID + * @return The feed ID + */ + String getFeedId(String subid); +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryTask.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryTask.java new file mode 100644 index 00000000..744a4476 --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryTask.java @@ -0,0 +1,474 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.node.delivery; + +import static com.att.eelf.configuration.Configuration.MDC_KEY_REQUEST_ID; +import static org.onap.dmaap.datarouter.node.utils.NodeUtils.isFiletypeGzip; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.net.ProtocolException; +import java.net.URL; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.UUID; +import java.util.zip.GZIPInputStream; +import org.jetbrains.annotations.Nullable; +import org.onap.dmaap.datarouter.node.DestInfo; +import org.onap.dmaap.datarouter.node.utils.NodeUtils; +import org.onap.dmaap.datarouter.node.eelf.EelfMsgs; +import org.slf4j.MDC; + +/** + * A file to be delivered to a destination. + * + *

A Delivery task represents a work item for the data router - a file that needs to be delivered and provides + * mechanisms to get information about the file and its delivery data as well as to attempt delivery. + */ +public class DeliveryTask implements Runnable, Comparable { + + private static final String DECOMPRESSION_STATUS = "Decompression_Status"; + private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(DeliveryTask.class); + private DeliveryTaskHelper deliveryTaskHelper; + private String pubid; + private DestInfo destInfo; + private String spool; + private File datafile; + private File metafile; + private long length; + private long date; + private String method; + private String fileid; + private String ctype; + private String url; + private String feedid; + private String subid; + private int attempts; + private boolean followRedirects; + private String[][] hdrs; + private String newInvocationId; + private long resumeTime; + + + /** + * Create a delivery task for a given delivery queue and pub ID. + * + * @param deliveryTaskHelper The delivery task helper for the queue this task is in. + * @param pubid The publish ID for this file. This is used as the base for the file name in the spool directory and + * is of the form (milliseconds since 1970).(fqdn of initial data router node) + */ + public DeliveryTask(DeliveryTaskHelper deliveryTaskHelper, String pubid) { + this.deliveryTaskHelper = deliveryTaskHelper; + this.pubid = pubid; + destInfo = deliveryTaskHelper.getDestinationInfo(); + subid = destInfo.getSubId(); + this.followRedirects = destInfo.isFollowRedirects(); + feedid = destInfo.getLogData(); + spool = destInfo.getSpool(); + String dfn = spool + File.separator + pubid; + String mfn = dfn + ".M"; + datafile = new File(spool + File.separator + pubid); + metafile = new File(mfn); + boolean monly = destInfo.isMetaDataOnly(); + date = Long.parseLong(pubid.substring(0, pubid.indexOf('.'))); + resumeTime = System.currentTimeMillis(); + ArrayList hdrv = new ArrayList<>(); + + try (BufferedReader br = new BufferedReader(new FileReader(metafile))) { + String line = br.readLine(); + int index = line.indexOf('\t'); + method = line.substring(0, index); + NodeUtils.setIpAndFqdnForEelf(method); + if (!"DELETE".equals(method) && !monly) { + length = datafile.length(); + } + fileid = line.substring(index + 1); + while ((line = br.readLine()) != null) { + index = line.indexOf('\t'); + String header = line.substring(0, index); + String headerValue = line.substring(index + 1); + if ("x-dmaap-dr-routing".equalsIgnoreCase(header)) { + subid = headerValue.replaceAll("[^ ]*/+", ""); + feedid = deliveryTaskHelper.getFeedId(subid.replaceAll(" .*", "")); + } + if (length == 0 && header.toLowerCase().startsWith("content-")) { + continue; + } + if ("content-type".equalsIgnoreCase(header)) { + ctype = headerValue; + } + if ("x-onap-requestid".equalsIgnoreCase(header)) { + MDC.put(MDC_KEY_REQUEST_ID, headerValue); + } + if ("x-invocationid".equalsIgnoreCase(header)) { + MDC.put("InvocationId", headerValue); + headerValue = UUID.randomUUID().toString(); + newInvocationId = headerValue; + } + hdrv.add(new String[]{header, headerValue}); + } + } catch (Exception e) { + eelfLogger.error("Exception", e); + } + hdrs = hdrv.toArray(new String[hdrv.size()][]); + url = deliveryTaskHelper.getDestURL(fileid); + } + + /** + * Is the object a DeliveryTask with the same publication ID. + */ + public boolean equals(Object object) { + if (!(object instanceof DeliveryTask)) { + return (false); + } + return (pubid.equals(((DeliveryTask) object).pubid)); + } + + /** + * Compare the publication IDs. + */ + public int compareTo(DeliveryTask other) { + return (pubid.compareTo(other.pubid)); + } + + /** + * Get the hash code of the publication ID. + */ + public int hashCode() { + return (pubid.hashCode()); + } + + /** + * Return the publication ID. + */ + public String toString() { + return (pubid); + } + + /** + * Get the publish ID. + */ + public String getPublishId() { + return (pubid); + } + + /** + * Attempt delivery. + */ + public void run() { + attempts++; + try { + destInfo = deliveryTaskHelper.getDestinationInfo(); + boolean monly = destInfo.isMetaDataOnly(); + length = 0; + if (!"DELETE".equals(method) && !monly) { + length = datafile.length(); + } + stripSuffixIfIsDecompress(); + url = deliveryTaskHelper.getDestURL(fileid); + URL urlObj = new URL(url); + HttpURLConnection urlConnection = (HttpURLConnection) urlObj.openConnection(); + urlConnection.setConnectTimeout(60000); + urlConnection.setReadTimeout(60000); + urlConnection.setInstanceFollowRedirects(false); + urlConnection.setRequestMethod(method); + urlConnection.setRequestProperty("Content-Length", Long.toString(length)); + urlConnection.setRequestProperty("Authorization", destInfo.getAuth()); + urlConnection.setRequestProperty("X-DMAAP-DR-PUBLISH-ID", pubid); + boolean expect100 = destInfo.isUsing100(); + int rc = deliverFileToSubscriber(expect100, urlConnection); + String rmsg = urlConnection.getResponseMessage(); + rmsg = getResponseMessage(urlConnection, rmsg); + String xpubid = null; + InputStream is; + if (rc >= 200 && rc <= 299) { + is = urlConnection.getInputStream(); + xpubid = urlConnection.getHeaderField("X-DMAAP-DR-PUBLISH-ID"); + } else { + if (rc >= 300 && rc <= 399) { + rmsg = urlConnection.getHeaderField("Location"); + } + is = urlConnection.getErrorStream(); + } + byte[] buf = new byte[4096]; + if (is != null) { + while (is.read(buf) > 0) { + //flush the buffer + } + is.close(); + } + deliveryTaskHelper.reportStatus(this, rc, xpubid, rmsg); + } catch (Exception e) { + eelfLogger.error("Exception " + Arrays.toString(e.getStackTrace()), e); + deliveryTaskHelper.reportException(this, e); + } + } + + /** + * To send decompressed gzip to the subscribers. + * + * @param httpURLConnection connection used to make request + */ + private void sendDecompressedFile(HttpURLConnection httpURLConnection) throws IOException { + byte[] buffer = new byte[8164]; + httpURLConnection.setRequestProperty(DECOMPRESSION_STATUS, "SUCCESS"); + OutputStream outputStream = getOutputStream(httpURLConnection); + if (outputStream != null) { + int bytesRead; + try (InputStream gzipInputStream = new GZIPInputStream(new FileInputStream(datafile))) { + int bufferLength = buffer.length; + while ((bytesRead = gzipInputStream.read(buffer, 0, bufferLength)) > 0) { + outputStream.write(buffer, 0, bytesRead); + } + outputStream.close(); + } catch (IOException e) { + httpURLConnection.setRequestProperty(DECOMPRESSION_STATUS, "FAILURE"); + eelfLogger.info("Could not decompress file", e); + sendFile(httpURLConnection); + } + + } + } + + /** + * To send any file to the subscriber. + * + * @param httpURLConnection connection used to make request + */ + private void sendFile(HttpURLConnection httpURLConnection) throws IOException { + OutputStream os = getOutputStream(httpURLConnection); + if (os == null) { + return; + } + long sofar = 0; + try (InputStream is = new FileInputStream(datafile)) { + byte[] buf = new byte[1024 * 1024]; + while (sofar < length) { + int len = buf.length; + if (sofar + len > length) { + len = (int) (length - sofar); + } + len = is.read(buf, 0, len); + if (len <= 0) { + throw new IOException("Unexpected problem reading data file " + datafile); + } + sofar += len; + os.write(buf, 0, len); + } + os.close(); + } catch (IOException ioe) { + deliveryTaskHelper.reportDeliveryExtra(this, sofar); + throw ioe; + } + } + + /** + * Get the outputstream that will be used to send data. + * + * @param httpURLConnection connection used to make request + * @return AN Outpustream that can be used to send your data. + */ + OutputStream getOutputStream(HttpURLConnection httpURLConnection) throws IOException { + OutputStream outputStream = null; + try { + outputStream = httpURLConnection.getOutputStream(); + } catch (ProtocolException pe) { + deliveryTaskHelper.reportDeliveryExtra(this, -1L); + // Rcvd error instead of 100-continue + eelfLogger.error("Exception " + Arrays.toString(pe.getStackTrace()), pe); + } + return outputStream; + } + + private void stripSuffixIfIsDecompress() { + if (destInfo.isDecompress() && isFiletypeGzip(datafile) && fileid.endsWith(".gz")) { + fileid = fileid.replace(".gz", ""); + } + } + + private int deliverFileToSubscriber(boolean expect100, HttpURLConnection uc) throws IOException { + for (String[] nv : hdrs) { + uc.addRequestProperty(nv[0], nv[1]); + } + if (length > 0) { + if (expect100) { + uc.setRequestProperty("Expect", "100-continue"); + } + uc.setDoOutput(true); + if (destInfo.isDecompress()) { + if (isFiletypeGzip(datafile)) { + sendDecompressedFile(uc); + } else { + uc.setRequestProperty(DECOMPRESSION_STATUS, "UNSUPPORTED_FORMAT"); + sendFile(uc); + } + } else { + sendFile(uc); + } + } + return uc.getResponseCode(); + } + + @Nullable + private String getResponseMessage(HttpURLConnection uc, String rmsg) { + if (rmsg == null) { + String h0 = uc.getHeaderField(0); + if (h0 != null) { + int indexOfSpace1 = h0.indexOf(' '); + int indexOfSpace2 = h0.indexOf(' ', indexOfSpace1 + 1); + if (indexOfSpace1 != -1 && indexOfSpace2 != -1) { + rmsg = h0.substring(indexOfSpace2 + 1); + } + } + } + return rmsg; + } + + /** + * Remove meta and data files. + */ + public void clean() { + deleteWithRetry(datafile); + deleteWithRetry(metafile); + eelfLogger.info(EelfMsgs.INVOKE, newInvocationId); + eelfLogger.info(EelfMsgs.EXIT); + hdrs = null; + } + + private void deleteWithRetry(File file) { + int maxTries = 3; + int tryCount = 1; + while (tryCount <= maxTries) { + try { + Files.deleteIfExists(file.toPath()); + break; + } catch (IOException e) { + eelfLogger.error("IOException : Failed to delete file :" + + file.getName() + " on attempt " + tryCount, e); + } + tryCount++; + } + } + + /** + * Get the resume time for a delivery task. + */ + long getResumeTime() { + return resumeTime; + } + + /** + * Set the resume time for a delivery task. + */ + public void setResumeTime(long resumeTime) { + this.resumeTime = resumeTime; + } + + /** + * Has this delivery task been cleaned. + */ + public boolean isCleaned() { + return (hdrs == null); + } + + /** + * Get length of body. + */ + public long getLength() { + return (length); + } + + /** + * Get creation date as encoded in the publish ID. + */ + public long getDate() { + return (date); + } + + /** + * Get the most recent delivery attempt URL. + */ + public String getURL() { + return (url); + } + + /** + * Get the content type. + */ + public String getCType() { + return (ctype); + } + + /** + * Get the method. + */ + public String getMethod() { + return (method); + } + + /** + * Get the file ID. + */ + public String getFileId() { + return (fileid); + } + + /** + * Get the number of delivery attempts. + */ + public int getAttempts() { + return (attempts); + } + + /** + * Get the (space delimited list of) subscription ID for this delivery task. + */ + public String getSubId() { + return (subid); + } + + /** + * Get the feed ID for this delivery task. + */ + public String getFeedId() { + return (feedid); + } + + /** + * Get the followRedirects for this delivery task. + */ + public boolean getFollowRedirects() { + return (followRedirects); + } +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryTaskHelper.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryTaskHelper.java new file mode 100644 index 00000000..529acfe0 --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/delivery/DeliveryTaskHelper.java @@ -0,0 +1,86 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.node.delivery; + +import org.onap.dmaap.datarouter.node.DestInfo; + +/** + * Interface to allow independent testing of the DeliveryTask code. + * + *

This interface represents all the configuraiton information and feedback mechanisms that a delivery task needs. + */ + +public interface DeliveryTaskHelper { + + /** + * Report that a delivery attempt failed due to an exception (like can't connect to remote host). + * + * @param task The task that failed + * @param exception The exception that occurred + */ + void reportException(DeliveryTask task, Exception exception); + + /** + * Report that a delivery attempt completed (successfully or unsuccessfully). + * + * @param task The task that failed + * @param status The HTTP status + * @param xpubid The publish ID from the far end (if any) + * @param location The redirection location for a 3XX response + */ + void reportStatus(DeliveryTask task, int status, String xpubid, String location); + + /** + * Report that a delivery attempt either failed while sending data or that an error was returned instead of a 100 + * Continue. + * + * @param task The task that failed + * @param sent The number of bytes sent or -1 if an error was returned instead of 100 Continue. + */ + void reportDeliveryExtra(DeliveryTask task, long sent); + + /** + * Get the destination information for the delivery queue. + * + * @return The destination information + */ + DestInfo getDestinationInfo(); + + /** + * Given a file ID, get the URL to deliver to. + * + * @param fileid The file id + * @return The URL to deliver to + */ + String getDestURL(String fileid); + + /** + * Get the feed ID for a subscription. + * + * @param subid The subscription ID + * @return The feed iD + */ + String getFeedId(String subid); +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/MetricsFilter.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/MetricsFilter.java index af820797..5f29683f 100644 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/MetricsFilter.java +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/MetricsFilter.java @@ -36,8 +36,6 @@ public class MetricsFilter extends Filter { "PBF|") && !event.getMessage().contains("EXP|") && !event.getMessage().contains("DLX|")) { return FilterReply.ACCEPT; } - } else { - return FilterReply.DENY; } return FilterReply.DENY; } diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/log/LogManager.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/log/LogManager.java new file mode 100644 index 00000000..47739739 --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/log/LogManager.java @@ -0,0 +1,245 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + +package org.onap.dmaap.datarouter.node.log; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.FileWriter; +import java.io.IOException; +import java.io.Writer; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.TimerTask; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.jetbrains.annotations.NotNull; +import org.onap.dmaap.datarouter.node.DestInfo; +import org.onap.dmaap.datarouter.node.DestInfoBuilder; +import org.onap.dmaap.datarouter.node.NodeConfigManager; +import org.onap.dmaap.datarouter.node.delivery.DeliveryQueue; +import org.onap.dmaap.datarouter.node.delivery.DeliveryQueueHelper; + +/** + * Cleanup of old log files. + * + *

Periodically scan the log directory for log files that are older than the log file retention interval, and delete + * them. In a future release, This class will also be responsible for uploading events logs to the log server to + * support the log query APIs. + */ + +public class LogManager extends TimerTask { + + private static final String EXCEPTION = "Exception"; + private EELFLogger logger = EELFManager.getInstance().getLogger(LogManager.class); + private NodeConfigManager config; + private Matcher isnodelog; + private Matcher iseventlog; + private Uploader worker; + private String uploaddir; + private String logdir; + + /** + * Construct a log manager + * + *

The log manager will check for expired log files every 5 minutes at 20 seconds after the 5 minute boundary. + * (Actually, the interval is the event log rollover interval, which defaults to 5 minutes). + */ + public LogManager(NodeConfigManager config) { + this.config = config; + try { + isnodelog = Pattern.compile("node\\.log\\.\\d{8}").matcher(""); + iseventlog = Pattern.compile("events-\\d{12}\\.log").matcher(""); + } catch (Exception e) { + logger.error(EXCEPTION, e); + } + logdir = config.getLogDir(); + uploaddir = logdir + "/.spool"; + (new File(uploaddir)).mkdirs(); + long now = System.currentTimeMillis(); + long intvl = StatusLog.parseInterval(config.getEventLogInterval(), 30000); + long when = now - now % intvl + intvl + 20000L; + config.getTimer().scheduleAtFixedRate(this, when - now, intvl); + worker = new Uploader(); + } + + /** + * Trigger check for expired log files and log files to upload. + */ + public void run() { + worker.poke(); + } + + public Uploader getWorker() { + return worker; + } + + public class Uploader extends Thread implements DeliveryQueueHelper { + + private static final String META = "/.meta"; + private EELFLogger logger = EELFManager.getInstance().getLogger(Uploader.class); + private DeliveryQueue dq; + + Uploader() { + dq = new DeliveryQueue(this, + new DestInfoBuilder().setName("LogUpload").setSpool(uploaddir).setSubid(null).setLogdata(null) + .setUrl(null).setAuthuser(config.getMyName()).setAuthentication(config.getMyAuth()) + .setMetaonly(false).setUse100(false).setPrivilegedSubscriber(false) + .setFollowRedirects(false) + .setDecompress(false).createDestInfo()); + setDaemon(true); + setName("Log Uploader"); + start(); + } + + public long getInitFailureTimer() { + return (10000L); + } + + public long getWaitForFileProcessFailureTimer() { + return (600000L); + } + + public double getFailureBackoff() { + return (2.0); + } + + public long getMaxFailureTimer() { + return (150000L); + } + + public long getExpirationTimer() { + return (604800000L); + } + + public int getFairFileLimit() { + return (10000); + } + + public long getFairTimeLimit() { + return (86400000); + } + + public String getDestURL(DestInfo destinationInfo, String fileid) { + return (config.getEventLogUrl()); + } + + public void handleUnreachable(DestInfo destinationInfo) { + throw new UnsupportedOperationException(); + } + + public boolean handleRedirection(DestInfo destinationInfo, String location, String fileid) { + return (false); + } + + public boolean isFollowRedirects() { + return (false); + } + + public String getFeedId(String subid) { + return (null); + } + + private synchronized void snooze() { + try { + wait(10000); + } catch (Exception e) { + logger.error(EXCEPTION, e); + } + } + + private synchronized void poke() { + notifyAll(); + } + + @Override + public void run() { + while (true) { + scan(); + dq.run(); + snooze(); + } + } + + private void scan() { + long threshold = System.currentTimeMillis() - config.getLogRetention(); + File dir = new File(logdir); + String[] fns = dir.list(); + Arrays.sort(fns); + String lastqueued = "events-000000000000.log"; + String curlog = StatusLog.getCurLogFile(); + curlog = curlog.substring(curlog.lastIndexOf('/') + 1); + try { + Writer writer = new FileWriter(uploaddir + META); + writer.write("POST\tlogdata\nContent-Type\ttext/plain\n"); + writer.close(); + BufferedReader br = new BufferedReader(new FileReader(uploaddir + "/.lastqueued")); + lastqueued = br.readLine(); + br.close(); + } catch (Exception e) { + logger.error(EXCEPTION, e); + } + for (String fn : fns) { + if (!isnodelog.reset(fn).matches()) { + if (!iseventlog.reset(fn).matches()) { + continue; + } + lastqueued = setLastQueued(lastqueued, curlog, fn); + } + File file = new File(dir, fn); + if (file.lastModified() < threshold) { + try { + Files.deleteIfExists(file.toPath()); + } catch (IOException e) { + logger.error("Failed to delete file: " + file.getPath(), e); + } + } + } + try (Writer w = new FileWriter(uploaddir + "/.lastqueued")) { + Files.deleteIfExists(new File(uploaddir + META).toPath()); + w.write(lastqueued + "\n"); + } catch (Exception e) { + logger.error(EXCEPTION, e); + } + } + + @NotNull + private String setLastQueued(String lastqueued, String curlog, String fn) { + if (lastqueued.compareTo(fn) < 0 && curlog.compareTo(fn) > 0) { + lastqueued = fn; + try { + String pid = config.getPublishId(); + Files.createLink(Paths.get(uploaddir + "/" + pid), Paths.get(logdir + "/" + fn)); + Files.createLink(Paths.get(uploaddir + "/" + pid + ".M"), Paths.get(uploaddir + META)); + } catch (Exception e) { + logger.error(EXCEPTION, e); + } + } + return lastqueued; + } + } +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/log/StatusLog.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/log/StatusLog.java new file mode 100644 index 00000000..ba248a58 --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/log/StatusLog.java @@ -0,0 +1,290 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + +package org.onap.dmaap.datarouter.node.log; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.onap.dmaap.datarouter.node.NodeConfigManager; +import org.onap.dmaap.datarouter.node.utils.NodeUtils; + +/** + * Logging for data router delivery events (PUB/DEL/EXP). + */ +public class StatusLog { + + private static final String EXCEPTION = "Exception"; + private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(StatusLog.class); + private static StatusLog instance = new StatusLog(); + private SimpleDateFormat filedate = new SimpleDateFormat("-yyyyMMddHHmm"); + + + private String prefix = "logs/events"; + private String suffix = ".log"; + private String plainfile; + private String curfile; + private long nexttime; + private OutputStream os; + private long intvl; + private static NodeConfigManager config = NodeConfigManager.getInstance(); + + private StatusLog() { + } + + /** + * Parse an interval of the form xxhyymzzs and round it to the nearest whole fraction of 24 hours.If no units are + * specified, assume seconds. + */ + public static long parseInterval(String interval, int def) { + try { + Matcher matcher = Pattern.compile("(?:(\\d+)[Hh])?(?:(\\d+)[Mm])?(?:(\\d+)[Ss]?)?").matcher(interval); + if (matcher.matches()) { + int dur = getDur(matcher); + int best = 86400; + int dist = best - dur; + if (dur > best) { + dist = dur - best; + } + best = getBest(dur, best, dist); + def = best * 1000; + } + } catch (Exception e) { + eelfLogger.error(EXCEPTION, e); + } + return (def); + } + + private static int getBest(int dur, int best, int dist) { + int base = 1; + for (int i = 0; i < 8; i++) { + int base2 = base; + base *= 2; + for (int j = 0; j < 4; j++) { + int base3 = base2; + base2 *= 3; + for (int k = 0; k < 3; k++) { + int cur = base3; + base3 *= 5; + int ndist = cur - dur; + if (dur > cur) { + ndist = dur - cur; + } + if (ndist < dist) { + best = cur; + dist = ndist; + } + } + } + } + return best; + } + + private static int getDur(Matcher matcher) { + int dur = 0; + String match = matcher.group(1); + if (match != null) { + dur += 3600 * Integer.parseInt(match); + } + match = matcher.group(2); + if (match != null) { + dur += 60 * Integer.parseInt(match); + } + match = matcher.group(3); + if (match != null) { + dur += Integer.parseInt(match); + } + if (dur < 60) { + dur = 60; + } + return dur; + } + + /** + * Get the name of the current log file. + * + * @return The full path name of the current event log file + */ + public static synchronized String getCurLogFile() { + try { + instance.checkRoll(System.currentTimeMillis()); + } catch (Exception e) { + eelfLogger.error(EXCEPTION, e); + } + return (instance.curfile); + } + + /** + * Log a received publication attempt. + * + * @param pubid The publish ID assigned by the node + * @param feedid The feed id given by the publisher + * @param requrl The URL of the received request + * @param method The method (DELETE or PUT) in the received request + * @param ctype The content type (if method is PUT and clen > 0) + * @param clen The content length (if method is PUT) + * @param srcip The IP address of the publisher + * @param user The identity of the publisher + * @param status The status returned to the publisher + */ + public static void logPub(String pubid, String feedid, String requrl, String method, String ctype, long clen, + String srcip, String user, int status) { + instance.log( + "PUB|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + "|" + srcip + + "|" + user + "|" + status); + eelfLogger.info("PUB|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + + clen + "|" + srcip + "|" + user + "|" + status); + } + + /** + * Log a data transfer error receiving a publication attempt. + * + * @param pubid The publish ID assigned by the node + * @param feedid The feed id given by the publisher + * @param requrl The URL of the received request + * @param method The method (DELETE or PUT) in the received request + * @param ctype The content type (if method is PUT and clen > 0) + * @param clen The expected content length (if method is PUT) + * @param rcvd The content length received + * @param srcip The IP address of the publisher + * @param user The identity of the publisher + * @param error The error message from the IO exception + */ + public static void logPubFail(String pubid, String feedid, String requrl, String method, String ctype, long clen, + long rcvd, String srcip, String user, String error) { + instance.log("PBF|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + "|" + rcvd + + "|" + srcip + "|" + user + "|" + error); + eelfLogger.info("PBF|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + + "|" + rcvd + "|" + srcip + "|" + user + "|" + error); + } + + /** + * Log a delivery attempt. + * + * @param pubid The publish ID assigned by the node + * @param feedid The feed ID + * @param subid The (space delimited list of) subscription ID + * @param requrl The URL used in the attempt + * @param method The method (DELETE or PUT) in the attempt + * @param ctype The content type (if method is PUT, not metaonly, and clen > 0) + * @param clen The content length (if PUT and not metaonly) + * @param user The identity given to the subscriber + * @param status The status returned by the subscriber or -1 if an exeception occured trying to connect + * @param xpubid The publish ID returned by the subscriber + */ + public static void logDel(String pubid, String feedid, String subid, String requrl, String method, String ctype, + long clen, String user, int status, String xpubid) { + if (feedid == null) { + return; + } + instance.log( + "DEL|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + + "|" + user + "|" + status + "|" + xpubid); + eelfLogger.info("DEL|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" + + ctype + "|" + clen + "|" + user + "|" + status + "|" + xpubid); + } + + /** + * Log delivery attempts expired. + * + * @param pubid The publish ID assigned by the node + * @param feedid The feed ID + * @param subid The (space delimited list of) subscription ID + * @param requrl The URL that would be delivered to + * @param method The method (DELETE or PUT) in the request + * @param ctype The content type (if method is PUT, not metaonly, and clen > 0) + * @param clen The content length (if PUT and not metaonly) + * @param reason The reason the attempts were discontinued + * @param attempts The number of attempts made + */ + public static void logExp(String pubid, String feedid, String subid, String requrl, String method, String ctype, + long clen, String reason, int attempts) { + if (feedid == null) { + return; + } + instance.log( + "EXP|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + + "|" + reason + "|" + attempts); + eelfLogger.info("EXP|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" + + ctype + "|" + clen + "|" + reason + "|" + attempts); + } + + /** + * Log extra statistics about unsuccessful delivery attempts. + * + * @param pubid The publish ID assigned by the node + * @param feedid The feed ID + * @param subid The (space delimited list of) subscription ID + * @param clen The content length + * @param sent The # of bytes sent or -1 if subscriber returned an error instead of 100 Continue, otherwise, the + * number of bytes sent before an error occurred. + */ + public static void logDelExtra(String pubid, String feedid, String subid, long clen, long sent) { + if (feedid == null) { + return; + } + instance.log("DLX|" + pubid + "|" + feedid + "|" + subid + "|" + clen + "|" + sent); + eelfLogger.info("DLX|" + pubid + "|" + feedid + "|" + subid + "|" + clen + "|" + sent); + } + + private synchronized void checkRoll(long now) throws IOException { + if (now >= nexttime) { + if (os != null) { + os.close(); + os = null; + } + intvl = parseInterval(config.getEventLogInterval(), 300000); + prefix = config.getEventLogPrefix(); + suffix = config.getEventLogSuffix(); + nexttime = now - now % intvl + intvl; + curfile = prefix + filedate.format(new Date(nexttime - intvl)) + suffix; + plainfile = prefix + suffix; + notifyAll(); + } + } + + private synchronized void log(String string) { + try { + long now = System.currentTimeMillis(); + checkRoll(now); + if (os == null) { + os = new FileOutputStream(curfile, true); + Files.deleteIfExists(new File(plainfile).toPath()); + Files.createLink(Paths.get(plainfile), Paths.get(curfile)); + } + os.write((NodeUtils.logts(new Date(now)) + '|' + string + '\n').getBytes()); + os.flush(); + } catch (IOException ioe) { + eelfLogger.error("IOException", ioe); + } + } +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/utils/NodeTlsManager.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/utils/NodeTlsManager.java new file mode 100644 index 00000000..a32699d4 --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/utils/NodeTlsManager.java @@ -0,0 +1,169 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.dmaap.datarouter.node.utils; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.FileInputStream; +import java.io.IOException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.util.Enumeration; +import java.util.Properties; +import javax.naming.InvalidNameException; +import javax.naming.ldap.LdapName; +import javax.naming.ldap.Rdn; +import org.onap.dmaap.datarouter.node.eelf.EelfMsgs; + +public class NodeTlsManager { + + private static final EELFLogger eelfLogger = EELFManager.getInstance().getLogger(NodeTlsManager.class); + + private String keyStoreType; + private String keyStorefile; + private String keyStorePassword; + private String keyManagerPassword; + private final String[] enabledProtocols; + + public NodeTlsManager(Properties properties) { + enabledProtocols = properties.getProperty("NodeHttpsProtocols", + "TLSv1.1|TLSv1.2").trim().split("\\|"); + setUpKeyStore(properties); + setUpTrustStore(properties); + } + + private void setUpKeyStore(Properties properties) { + keyStoreType = properties.getProperty("KeyStoreType", "PKCS12"); + keyStorefile = properties.getProperty("KeyStorePath"); + keyStorePassword = properties.getProperty("KeyStorePass"); + keyManagerPassword = properties.getProperty("KeyManagerPass"); + } + + private void setUpTrustStore(Properties properties) { + String trustStoreType = properties.getProperty("TrustStoreType", "jks"); + String trustStoreFile = properties.getProperty("TrustStorePath"); + String trustStorePassword = properties.getProperty("TrustStorePass"); + if (trustStoreFile != null && trustStoreFile.length() > 0) { + eelfLogger.info("TrustStore found. Loading {} file {} to System Properties.", trustStoreType, trustStoreFile); + System.setProperty("javax.net.ssl.trustStoreType", trustStoreType); + System.setProperty("javax.net.ssl.trustStore", trustStoreFile); + System.setProperty("javax.net.ssl.trustStorePassword", trustStorePassword); + return; + } + eelfLogger.error("TrustStore not found. Falling back to 1 way TLS"); + } + + public String getKeyStoreType() { + return keyStoreType; + } + + public String getKeyStorefile() { + return keyStorefile; + } + + public String getKeyStorePassword() { + return keyStorePassword; + } + + public String getKeyManagerPassword() { + return keyManagerPassword; + } + + public String[] getEnabledProtocols() { + return enabledProtocols; + } + + /** + * Get the CN value of the first private key entry with a certificate. + * + * @return CN of the certificate subject or null + */ + public String getMyNameFromCertificate() { + return getCanonicalName(this.keyStoreType, this.keyStorefile, this.keyStorePassword); + } + + private String getCanonicalName(String kstype, String ksfile, String kspass) { + KeyStore ks; + try { + ks = KeyStore.getInstance(kstype); + if (loadKeyStore(ksfile, kspass, ks)) { + return (null); + } + } catch (Exception e) { + NodeUtils.setIpAndFqdnForEelf("getCanonicalName"); + eelfLogger.error(EelfMsgs.MESSAGE_KEYSTORE_LOAD_ERROR, e, ksfile); + return (null); + } + return (getCanonicalName(ks)); + } + + private String getCanonicalName(KeyStore ks) { + try { + Enumeration aliases = ks.aliases(); + while (aliases.hasMoreElements()) { + String name = getNameFromSubject(ks, aliases); + if (name != null) { + return name; + } + } + } catch (Exception e) { + eelfLogger.error("NODE0402 Error extracting my name from my keystore file " + e); + } + return (null); + } + + private boolean loadKeyStore(String ksfile, String kspass, KeyStore ks) + throws NoSuchAlgorithmException, CertificateException { + try (FileInputStream fileInputStream = new FileInputStream(ksfile)) { + ks.load(fileInputStream, kspass.toCharArray()); + } catch (IOException ioException) { + eelfLogger.error("IOException occurred while opening FileInputStream: " + ioException.getMessage(), + ioException); + return true; + } + return false; + } + + private String getNameFromSubject(KeyStore ks, Enumeration aliases) throws KeyStoreException { + String alias = aliases.nextElement(); + String nameFromSubject = null; + if (ks.entryInstanceOf(alias, KeyStore.PrivateKeyEntry.class)) { + X509Certificate cert = (X509Certificate) ks.getCertificate(alias); + if (cert != null) { + String subject = cert.getSubjectX500Principal().getName(); + try { + LdapName ln = new LdapName(subject); + for (Rdn rdn : ln.getRdns()) { + if (rdn.getType().equalsIgnoreCase("CN")) { + nameFromSubject = rdn.getValue().toString(); + } + } + } catch (InvalidNameException e) { + eelfLogger.error("No valid CN not found for dr-node cert", e); + } + } + } + return nameFromSubject; + } +} diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/utils/NodeUtils.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/utils/NodeUtils.java new file mode 100644 index 00000000..bd233d3e --- /dev/null +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/utils/NodeUtils.java @@ -0,0 +1,253 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ + + +package org.onap.dmaap.datarouter.node.utils; + +import static com.att.eelf.configuration.Configuration.MDC_KEY_REQUEST_ID; +import static com.att.eelf.configuration.Configuration.MDC_SERVER_FQDN; +import static com.att.eelf.configuration.Configuration.MDC_SERVER_IP_ADDRESS; +import static com.att.eelf.configuration.Configuration.MDC_SERVICE_NAME; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.net.InetAddress; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Enumeration; +import java.util.TimeZone; +import java.util.UUID; +import java.util.zip.GZIPInputStream; +import javax.naming.InvalidNameException; +import javax.naming.ldap.LdapName; +import javax.naming.ldap.Rdn; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang3.StringUtils; +import org.onap.dmaap.datarouter.node.eelf.EelfMsgs; +import org.slf4j.MDC; + +/** + * Utility functions for the data router node. + */ +public class NodeUtils { + + private static final EELFLogger eelfLogger = EELFManager.getInstance().getLogger(NodeUtils.class); + + private NodeUtils() { + } + + /** + * Base64 encode a byte array. + * + * @param raw The bytes to be encoded + * @return The encoded string + */ + public static String base64Encode(byte[] raw) { + return (Base64.encodeBase64String(raw)); + } + + /** + * Given a user and password, generate the credentials. + * + * @param user User name + * @param password User password + * @return Authorization header value + */ + public static String getAuthHdr(String user, String password) { + if (user == null || password == null) { + return (null); + } + return ("Basic " + base64Encode((user + ":" + password).getBytes())); + } + + /** + * Given a node name, generate the credentials. + * + * @param node Node name + */ + public static String getNodeAuthHdr(String node, String key) { + try { + MessageDigest md = MessageDigest.getInstance("SHA-512"); + md.update(key.getBytes()); + md.update(node.getBytes()); + md.update(key.getBytes()); + return (getAuthHdr(node, base64Encode(md.digest()))); + } catch (Exception exception) { + eelfLogger + .error("Exception in generating Credentials for given node name:= " + exception.getMessage(), + exception); + return (null); + } + } + + /** + * Given a string representation of an IP address, get the corresponding byte array. + * + * @param ip The IP address as a string + * @return The IP address as a byte array or null if the address is invalid + */ + public static byte[] getInetAddress(String ip) { + try { + return (InetAddress.getByName(ip).getAddress()); + } catch (Exception exception) { + eelfLogger + .error("Exception in generating byte array for given IP address := " + exception.toString(), + exception); + } + return (null); + } + + /** + * Given a uri with parameters, split out the feed ID and file ID. + */ + public static String[] getFeedAndFileID(String uriandparams) { + int end = uriandparams.length(); + int index = uriandparams.indexOf('#'); + if (index != -1 && index < end) { + end = index; + } + index = uriandparams.indexOf('?'); + if (index != -1 && index < end) { + end = index; + } + end = uriandparams.lastIndexOf('/', end); + if (end < 2) { + return (null); + } + index = uriandparams.lastIndexOf('/', end - 1); + if (index == -1) { + return (null); + } + return (new String[]{uriandparams.substring(index + 1, end), uriandparams.substring(end + 1)}); + } + + /** + * Escape fields that might contain vertical bar, backslash, or newline by replacing them with backslash p, + * backslash e and backslash n. + */ + public static String loge(String string) { + if (string == null) { + return (string); + } + return (string.replaceAll("\\\\", "\\\\e").replaceAll("\\|", "\\\\p").replaceAll("\n", "\\\\n")); + } + + /** + * Undo what loge does. + */ + public static String unloge(String string) { + if (string == null) { + return (string); + } + return (string.replaceAll("\\\\p", "\\|").replaceAll("\\\\n", "\n").replaceAll("\\\\e", "\\\\")); + } + + /** + * Format a logging timestamp as yyyy-mm-ddThh:mm:ss.mmmZ + */ + public static String logts(long when) { + return (logts(new Date(when))); + } + + /** + * Format a logging timestamp as yyyy-mm-ddThh:mm:ss.mmmZ + */ + public static synchronized String logts(Date when) { + SimpleDateFormat logDate = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); + logDate.setTimeZone(TimeZone.getTimeZone("GMT")); + return (logDate.format(when)); + } + + /** Method prints method name, server FQDN and IP Address of the machine in EELF logs. + * + * @param method Prints method name in EELF log. + */ + public static void setIpAndFqdnForEelf(String method) { + MDC.clear(); + MDC.put(MDC_SERVICE_NAME, method); + try { + MDC.put(MDC_SERVER_FQDN, InetAddress.getLocalHost().getHostName()); + MDC.put(MDC_SERVER_IP_ADDRESS, InetAddress.getLocalHost().getHostAddress()); + } catch (Exception exception) { + eelfLogger + .error("Exception in generating byte array for given IP address := " + exception.toString(), + exception); + } + + } + + /** Method sets RequestIs and InvocationId for se in EELF logs. + * + * @param req Request used to get RequestId and InvocationId. + */ + public static void setRequestIdAndInvocationId(HttpServletRequest req) { + String reqId = req.getHeader("X-ONAP-RequestID"); + if (StringUtils.isBlank(reqId)) { + reqId = UUID.randomUUID().toString(); + } + MDC.put(MDC_KEY_REQUEST_ID, reqId); + String invId = req.getHeader("X-InvocationID"); + if (StringUtils.isBlank(invId)) { + invId = UUID.randomUUID().toString(); + } + MDC.put("InvocationId", invId); + } + + /** + * Sends error as response with error code input. + */ + public static void sendResponseError(HttpServletResponse response, int errorCode, EELFLogger intlogger) { + try { + response.sendError(errorCode); + } catch (IOException ioe) { + intlogger.error("IOException", ioe); + } + } + + /** + * If file is of type gzip. + * + * @param file The name of the file to be checked + * @return True if the file is of type gzip + */ + public static boolean isFiletypeGzip(File file) { + try (FileInputStream fileInputStream = new FileInputStream(file); + GZIPInputStream ignored = new GZIPInputStream(fileInputStream)) { + return true; + } catch (IOException e) { + eelfLogger.error("NODE0403 " + file + " Not in gzip(gz) format: " + e + e); + return false; + } + } +} diff --git a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.cred.props b/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.cred.props deleted file mode 100644 index 9a43f584..00000000 --- a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.cred.props +++ /dev/null @@ -1,17 +0,0 @@ -############################################################ -# Properties Generated by AT&T Certificate Manager -# by root -# on 2022-03-22T12:31:16.865+0000 -# @copyright 2019, AT&T -############################################################ -Challenge=enc:2dZO7B2UVelU01IezilQ9hUYptjIvurC6JWO9vJHOiiqg9qUna3AdEMxzBpXcgIO -cadi_alias=dmaap-dr-node@dmaap-dr.onap.org -cadi_key_password=enc:ow380FBQU5xvQEDzTiXdfx-BD_0dEWbRCXjnUPxVFatk_7KaejgiLCVew6C6x8_9 -cadi_keyfile=/opt/app/osaaf/local/org.onap.dmaap-dr.keyfile -cadi_keystore=/opt/app/osaaf/local/org.onap.dmaap-dr.p12 -cadi_keystore_password=enc:Urky1AuqpokGwoSl72ypmhVONokY0H1Gy28S9P8jfvK1-d7W8SODXf87K37EubDo -cadi_keystore_password_jks=enc:lwXWmxaVAHy207uNeiHPYLho3qezj4xu7_iH4myTitXuj_bo9esv0e1L19HluXlM -cadi_keystore_password_p12=enc:Urky1AuqpokGwoSl72ypmhVONokY0H1Gy28S9P8jfvK1-d7W8SODXf87K37EubDo -cadi_truststore=/opt/app/osaaf/local/org.onap.dmaap-dr.trust.jks -cadi_truststore_password=enc:stAsWWKk5FOcWxyWsNqyQlSdkwWSxcy3Jed6RW_r4jyZosqV8kmSCJLSHNLyWyTH -cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US diff --git a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.keyfile b/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.keyfile deleted file mode 100644 index 987cae1f..00000000 --- a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.keyfile +++ /dev/null @@ -1,27 +0,0 @@ -uXr3OodaCvEvTDHfkpgh-_5UdpEkBowSRpKF18WNxMG0A9Y9k2REr-d075mNHshr5gOUYAlykgaX -ImvY-oFtG71N-Q0uqYnYuU7eX7zgcOOFNpeT4q2db3wkiScaZgdv2UnfDOVX2Aq53GzyYeKcGB1W -u0jKX8ryHVC0uvj69y1boiJBxQY1HwAqsh9q39Ut1XxyB42D7xDM7sa9G1I6YosQlthXPMt7Q_i0 -0CUHrozT6Wtnxfb8-qR6rM5PNmAxp8r5Jb4cnSbxJGEWpx1DkZPPZNvLjE9bBfinVftzrpIlqHd3 -2CCp3k6xrqui2c_0HaNILtKHX5VVKttRf9zoADk_l5_MowEJgh1fgu8m0r-iEB6oH18AWZ_PCtSr -A0cqEDGbP-h7e8w-Dwb0yS_oSfeDISarZ1Q46CQFcDz7f4bVB3AW67YGM4SQBaFGivWS_Lz_juOx -fhyo55HW_wL_92xRvAyXD1Eg64Lw0Cx0f9rn4uM7NkKaSCP8MmIvs84egCvDzCiWkT6SGzUTeZ7M -qI0ow2tJPFUuvhozcYxJW9sFzTqTFL7iLtrO5nHmo7YUVf2WU8ByGOfC-ylZPjw0fiKpe-7MXVSz -MIOxl3pgWSYt6kImBtZCateif6TgF_A-91yAOoQLm72baJKhOo-F-aJ4k6ToT4bZ-9-Hjs5Wk9sO -4thYzDcMaGrWsssnR9QMBjl9x3uw5bfOYGduyyRCWC_TYNorrW6Bvh7gFZAWk_f6cK-pPahRo_3O -erNlUHx6xaektjqUsmp91LzjCEWbmt5nDGw16VUSPhrtkca2ReWDL0vTnpjvPuRI7mWHdfyoVYMw -KUDkRgn61QvELC0Rl0C12polUEjqSxCRvGzL8QiOrU_Z1hXhVyp2gVIYvDKyuyguvJlniEBQ2qbM -3Y5savSaANj-b2-_J0ofhvYFGz1k_bjcCE6Wx2RSWk1nSJFJ0gFjp1Ky2cyKhhVMXCiWcbbA3vC8 -E1cE9Ixn4z5WYyHOAFyzrsiiapdn5MhB_kI-ObR2HuRy_GRJHMVpxqX15Bv3OXxWy51bXeYeRfit -SVCoyfXMXbx12eVG4tvf3WyYg9RqK_QN57VAmnffq09MAXuzFB1NuxSeJeBHGtBW6XIvb6E32K6I -rL7vfxGHhD7ZHqQzzie5I42HufANtBb1KNKHikeeACxxlpjOIHjjEZ6yffhoyhNbSHx4TNs7UYOf -FUKs0TCO9ffSJ8JT67X-baqlnSYFkdqR6QTGhltZq1ssPYq6Y6NGN1iEHJMfapev07AQgp6mBBxJ -dEwDZMAwlmb1yVkU1gfzcrGdAUeAAuM6LowPHUKYLJJvvaFLQJKbwwLvjnm_Tf9UR5QJ8a3JKiLt -F0Ykc1AeO-fyiNPUVRnkc3--Gs0bLUpLe0_medGOZ_LLk45GarEzBCGajgF_joz8fsndKtDrEnB0 -z-8iiQ4bTZb_ALjKnnhcdKjoJNhZ-e3AsmadYO7ve2K3ApwOelWWh24iZ-ZSUbnlw-OQMu4RvI9h -9KOTbu4gWokep3O2uT8-cVN-ax1FYRpmexHgIxZy-tmYjDdJTJQv3dMp5UngUs6xmIy5nyIBr51B -YORSQmXRvbdvYeY7jOEQUGfXfS2yYtzLfJ07f8ODQJ24APDKsVEGWVBcBMqp53o9W7cPJ-4vnMLU -mVYCKqVrT2bThRWb38OtaaJuA4A8GboDDe5Q0IZao0v3gqwgfCzJIjQx97AEHzEbLzEBxio_z8a0 -AYLi-_f7kMtgrMsj61gcRVUan5NVqnL7oVwFn8FuqeTRzMMbgJHUHfFzrgLxpRwnscGRzygZf39Q -JqqOLGriWe_IOhXQ7aIp3uyBF-brJnrXcQIp3PF0fbVTGK_VyKDB6J0BjJyjmyL0TA37hDr8usna -TgMtjF1qq9kOrJr2582n8TW-ogpyqm1ft7R18VT9_1JjtnCcTCudW-oJX87jv3UTJdaEkBMhc1Pe -euLti-qSG3Q5QeO5AxM8mP_9YbLHB9YQboRWnbfC9oIFRmVgGAcdbHNFn0hfILBOTIYStQL4 \ No newline at end of file diff --git a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.location.props b/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.location.props deleted file mode 100644 index c30adee4..00000000 --- a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.location.props +++ /dev/null @@ -1,8 +0,0 @@ -############################################################ -# Properties Generated by AT&T Certificate Manager -# by root -# on 2022-03-22T12:31:14.170+0000 -# @copyright 2019, AT&T -############################################################ -cadi_latitude=0.0 -cadi_longitude=0.0 diff --git a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.p12 b/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.p12 deleted file mode 100644 index 4238c868..00000000 Binary files a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.p12 and /dev/null differ diff --git a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.props b/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.props deleted file mode 100644 index fd5c5aa4..00000000 --- a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.props +++ /dev/null @@ -1,24 +0,0 @@ -############################################################ -# Properties Generated by AT&T Certificate Manager -# by root -# on 2022-03-22T12:31:14.169+0000 -# @copyright 2019, AT&T -############################################################ -aaf_env=DEV -aaf_id=dmaap-dr-node@dmaap-dr.onap.org -aaf_locate_url=https://aaf-locate.onap:8095 -aaf_locator_app_ns=org.osaaf.aaf -aaf_locator_container=oom -aaf_locator_container_ns=onap -aaf_locator_fqdn=dmaap-dr-node -aaf_locator_public_fqdn=dmaap-dr.onap.org -aaf_oauth2_introspect_url=https://AAF_LOCATE_URL/%CNS.%AAF_NS.introspect:2.1/introspect -aaf_oauth2_token_url=https://AAF_LOCATE_URL/%CNS.%AAF_NS.token:2.1/token -aaf_url=https://AAF_LOCATE_URL/%CNS.%AAF_NS.service:2.1 -aaf_url_cm=https://AAF_LOCATE_URL/%CNS.%AAF_NS.cm:2.1 -aaf_url_fs=https://AAF_LOCATE_URL/%CNS.%AAF_NS.fs:2.1 -aaf_url_gui=https://AAF_LOCATE_URL/%CNS.%AAF_NS.gui:2.1 -aaf_url_hello=https://aaf-locate.onap:8095/locate/onap.org.osaaf.aaf.hello:2.1 -aaf_url_oauth=https://AAF_LOCATE_URL/%CNS.%AAF_NS.oauth:2.1 -cadi_prop_files=/opt/app/osaaf/local/org.onap.dmaap-dr.location.props:/opt/app/osaaf/local/org.onap.dmaap-dr.cred.props -cadi_protocols=TLSv1.1,TLSv1.2 diff --git a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.trust.jks b/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.trust.jks deleted file mode 100644 index e09e2e61..00000000 Binary files a/datarouter-node/src/main/resources/aaf/org.onap.dmaap-dr.trust.jks and /dev/null differ diff --git a/datarouter-node/src/main/resources/node.properties b/datarouter-node/src/main/resources/node.properties index ac9aec22..68580f73 100644 --- a/datarouter-node/src/main/resources/node.properties +++ b/datarouter-node/src/main/resources/node.properties @@ -56,35 +56,35 @@ SpoolDir = /opt/app/datartr/spool # The path to the redirection data file RedirectionFile = etc/redirections.dat # +# https security required for publish request +TlsEnabled = false +# +# Enabled TLS protocols +NodeHttpsProtocols = TLSv1.1|TLSv1.2 +# # The type of keystore for https KeyStoreType = PKCS12 # +# The path to your KeyStoreFile +KeyStorePath = /opt/app/datartr/certs/org.onap.dmaap-dr-node.p12 +# +# The key store password +KeyStorePass = changeit +# +# The key manager password +KeyManagerPass = changeit +# # The type of truststore for https TrustStoreType = jks # +# The path to your trust store +TrustStorePath = /opt/app/datartr/certs/truststore.jks +# +# The trust store password +TrustStorePass = changeit +# # The path to the file used to trigger an orderly shutdown QuiesceFile = etc/SHUTDOWN # # The key used to generate passwords for node to node transfers NodeAuthKey = Node123! -# -# DR_NODE DEFAULT ENABLED TLS PROTOCOLS -NodeHttpsProtocols = TLSv1.1|TLSv1.2 -# -# AAF CADI enabled flag -CadiEnabled = false -# -# AAF type to generate permission string -AAFType = org.onap.dmaap-dr.feed -# -# AAF default instance to generate permission string - default should be legacy -AAFInstance = legacy -# -# AAF action to generate permission string - default should be publish -AAFAction = publish -# -# AAF Props file path -AAFPropsFilePath = /opt/app/osaaf/local/org.onap.dmaap-dr.props - -# https security required for publish request -TlsEnabled = false diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java index d1194279..ea221cbd 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java @@ -44,6 +44,9 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; +import org.onap.dmaap.datarouter.node.delivery.DeliveryQueue; +import org.onap.dmaap.datarouter.node.delivery.DeliveryQueueHelper; +import org.onap.dmaap.datarouter.node.delivery.DeliveryTask; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTaskTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTaskTest.java index 0f019161..b749afec 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTaskTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTaskTest.java @@ -33,6 +33,8 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; +import org.onap.dmaap.datarouter.node.delivery.DeliveryQueue; +import org.onap.dmaap.datarouter.node.delivery.DeliveryTask; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java index 25830cdd..61c53180 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java @@ -29,7 +29,6 @@ import static org.mockito.Mockito.verify; import java.io.File; import java.io.IOException; import java.util.HashMap; -import java.util.Hashtable; import org.apache.commons.lang3.reflect.FieldUtils; import org.junit.After; import org.junit.Assert; @@ -37,7 +36,9 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; -import org.onap.dmaap.datarouter.node.Delivery.DelItem; +import org.onap.dmaap.datarouter.node.delivery.Delivery; +import org.onap.dmaap.datarouter.node.delivery.Delivery.DelItem; +import org.onap.dmaap.datarouter.node.delivery.DeliveryQueue; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/LogManagerTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/LogManagerTest.java index cb3c88a4..1875a220 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/LogManagerTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/LogManagerTest.java @@ -30,13 +30,13 @@ import java.io.IOException; import java.util.Timer; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.reflect.FieldUtils; -import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; -import org.onap.dmaap.datarouter.node.LogManager.Uploader; +import org.onap.dmaap.datarouter.node.log.LogManager; +import org.onap.dmaap.datarouter.node.log.LogManager.Uploader; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeAafPropsUtilsTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeAafPropsUtilsTest.java deleted file mode 100644 index fbb93324..00000000 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeAafPropsUtilsTest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * ============LICENSE_START======================================================= - * Copyright (C) 2019 Nordix Foundation. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * SPDX-License-Identifier: Apache-2.0 - * ============LICENSE_END========================================================= - */ -package org.onap.dmaap.datarouter.node; - -import java.io.File; -import java.io.IOException; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -public class NodeAafPropsUtilsTest { - - private NodeAafPropsUtils nodeAafPropsUtils; - - @Before - public void setUp() throws IOException { - nodeAafPropsUtils = new NodeAafPropsUtils(new File("src/test/resources/aaf/org.onap.dmaap-dr.props")); - } - - @Test - public void Veirfy_Aaf_Pass_Decryp_Successful() { - Assert.assertEquals("w7(O#.QV#kQ;L(8fsaoT7cY&", nodeAafPropsUtils.getDecryptedPass("cadi_keystore_password")); - } -} diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigManagerTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigManagerTest.java index 046a56e2..10732dfa 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigManagerTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigManagerTest.java @@ -45,6 +45,7 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mock; +import org.onap.dmaap.datarouter.node.config.NodeConfig; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; @@ -95,10 +96,8 @@ public class NodeConfigManagerTest { @Test public void Verify_NodeConfigMan_Getters_Secure() { NodeConfigManager nodeConfigManager = NodeConfigManager.getInstance(); - Assert.assertEquals("legacy", nodeConfigManager.getAafInstance()); Assert.assertEquals("src/test/resources/spool/f", nodeConfigManager.getSpoolDir()); Assert.assertEquals("src/test/resources/spool", nodeConfigManager.getSpoolBase()); - Assert.assertEquals("PKCS12", nodeConfigManager.getKSType()); Assert.assertEquals(8080, nodeConfigManager.getHttpPort()); Assert.assertEquals(8443, nodeConfigManager.getHttpsPort()); Assert.assertEquals(443, nodeConfigManager.getExtHttpsPort()); @@ -108,12 +107,8 @@ public class NodeConfigManagerTest { Assert.assertEquals(".log", nodeConfigManager.getEventLogSuffix()); Assert.assertEquals("src/test/resources/logs", nodeConfigManager.getLogDir()); Assert.assertEquals((86400000L * 30), nodeConfigManager.getLogRetention()); - Assert.assertEquals(new String[] {"TLSv1.1", "TLSv1.2"}, nodeConfigManager.getEnabledprotocols()); - Assert.assertEquals("org.onap.dmaap-dr.feed", nodeConfigManager.getAafType()); - Assert.assertEquals("publish", nodeConfigManager.getAafAction()); - Assert.assertTrue(nodeConfigManager.getCadiEnabled()); Assert.assertFalse(nodeConfigManager.isShutdown()); - Assert.assertTrue(nodeConfigManager.isTlsEnabled()); + Assert.assertFalse(nodeConfigManager.isTlsEnabled()); Assert.assertTrue(nodeConfigManager.isConfigured()); Assert.assertNotNull(nodeConfigManager.getPublishId()); Assert.assertNotNull(nodeConfigManager.getAllDests()); @@ -126,9 +121,6 @@ public class NodeConfigManagerTest { Assert.assertEquals(60000, nodeConfigManager.getFairTimeLimit()); Assert.assertNotNull(nodeConfigManager.getTargets("1")); Assert.assertEquals("src/test/resources/spool/f", nodeConfigManager.getSpoolDir()); - Assert.assertEquals("src/test/resources/aaf/org.onap.dmaap-dr.p12", nodeConfigManager.getKSFile()); - Assert.assertEquals("jks", nodeConfigManager.getTstype()); - Assert.assertEquals("src/test/resources/aaf/org.onap.dmaap-dr.trust.jks", nodeConfigManager.getTsfile()); Assert.assertEquals(40, nodeConfigManager.getDeliveryThreads()); Assert.assertEquals("30", nodeConfigManager.getEventLogInterval()); Assert.assertFalse(nodeConfigManager.isFollowRedirects()); @@ -137,7 +129,6 @@ public class NodeConfigManagerTest { Assert.assertEquals("Basic ZG1hYXAtZHItbm9kZTp2OStFanZpWVBXSURrazVxRlF5ZkoxSC9LdHBuYWo4K0NVTXlNL0lRRUp2UGdjOUxpU2s5ZnpKTjdFazl3SzZkaG11S1E4S3RtcC9kelpBU3BRUGZCdz09", nodeConfigManager.getMyAuth()); Assert.assertEquals(0.05, nodeConfigManager.getFreeDiskStart(), 0.0); Assert.assertEquals(0.2, nodeConfigManager.getFreeDiskStop(), 0.0); - Assert.assertEquals("org.onap.dmaap-dr.feed|legacy|publish", nodeConfigManager.getPermission("legacy")); } /** diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java index 6804ebfe..1272f3c2 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java @@ -31,12 +31,14 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; +import org.onap.dmaap.datarouter.node.config.NodeConfig; +import org.onap.dmaap.datarouter.node.config.ProvData; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) -@SuppressStaticInitializationFor({"org.onap.dmaap.datarouter.node.ProvData"}) +@SuppressStaticInitializationFor({"org.onap.dmaap.datarouter.node.config.ProvData"}) @PowerMockIgnore({"com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*"}) public class NodeConfigTest { @@ -79,7 +81,6 @@ public class NodeConfigTest { endpointAddrs.put("172.0.0.1"); auth.put("endpoint_addrs", endpointAddrs); feed.put("authorization", auth); - feed.put("aaf_instance", "legacy"); feeds.put(feed); provData.put("feeds", feeds); } diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServerTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServerTest.java index af43e5d8..8d0b2a2d 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServerTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServerTest.java @@ -27,12 +27,15 @@ import static org.mockito.Mockito.when; import java.io.File; import java.io.IOException; +import java.util.Properties; import org.apache.commons.lang3.reflect.FieldUtils; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.onap.dmaap.datarouter.node.delivery.Delivery; +import org.onap.dmaap.datarouter.node.utils.NodeTlsManager; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; @@ -44,9 +47,11 @@ import org.powermock.modules.junit4.PowerMockRunner; public class NodeServerTest { private final NodeConfigManager config = mock(NodeConfigManager.class); + private NodeTlsManager nodeTlsManager; @Before public void setUp() throws Exception { setUpConfig(); + setUpNodeTlsMan(); setUpNodeMainDelivery(); createFilesAndDirectories(); } @@ -61,7 +66,7 @@ public class NodeServerTest { Assert.assertNotNull(NodeServer.getServerInstance(config)); } - private void setUpConfig() throws IllegalAccessException { + private void setUpConfig() { PowerMockito.mockStatic(NodeConfigManager.class); when(config.isShutdown()).thenReturn(false); when(config.isConfigured()).thenReturn(true); @@ -74,15 +79,16 @@ public class NodeServerTest { when(config.isDeletePermitted("1")).thenReturn(true); when(config.getAllDests()).thenReturn(new DestInfo[0]); when(config.isTlsEnabled()).thenReturn(true); - when(config.getKSType()).thenReturn("PKCS12"); - when(config.getKSFile()).thenReturn("src/test/resources/aaf/org.onap.dmaap-dr.p12"); - when(config.getKSPass()).thenReturn("tVac2#@Stx%tIOE^x[c&2fgZ"); - when(config.getTstype()).thenReturn("jks"); - when(config.getTsfile()).thenReturn("src/test/resources/aaf/org.onap.dmaap-dr.trust.jks"); - when(config.getTspass()).thenReturn("XHX$2Vl?Lk*2CB.i1+ZFAhZd"); PowerMockito.when(NodeConfigManager.getInstance()).thenReturn(config); } + private void setUpNodeTlsMan() throws IOException { + Properties nodeProps = new Properties(); + nodeProps.load(NodeTlsManagerTest.class.getClassLoader().getResourceAsStream("node_test.properties")); + nodeTlsManager = new NodeTlsManager(nodeProps); + PowerMockito.when(NodeConfigManager.getNodeTlsManager()).thenReturn(nodeTlsManager); + } + private void setUpNodeMainDelivery() throws IllegalAccessException{ Delivery delivery = mock(Delivery.class); doNothing().when(delivery).resetQueue(anyObject()); diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java index 0dcc0a19..b48db880 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java @@ -24,7 +24,6 @@ package org.onap.dmaap.datarouter.node; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyObject; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.anyString; @@ -50,10 +49,8 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.BDDMockito; import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.internal.matchers.Any; +import org.onap.dmaap.datarouter.node.delivery.Delivery; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; @@ -238,16 +235,6 @@ public class NodeServletTest { verify(response).sendError(eq(HttpServletResponse.SC_BAD_REQUEST), anyString()); } - @Test - public void Given_Request_Is_HTTP_PUT_On_Publish_On_AAF_Feed_And_Cadi_Enabled_And_No_Permissions_Then_Forbidden_Response_Is_Generated() throws Exception { - when(config.getCadiEnabled()).thenReturn(true); - when(config.getAafInstance("1")).thenReturn("*"); - when(request.getPathInfo()).thenReturn("/publish/1/fileName"); - setHeadersForValidRequest(true); - nodeServlet.doPut(request, response); - verify(response).sendError(eq(HttpServletResponse.SC_FORBIDDEN), anyString()); - verifyEnteringExitCalled(listAppender); - } @Test public void Given_Request_Is_HTTP_DELETE_On_Publish_With_Meta_Data_Malformed_Then_Bad_Request_Response_Is_Generated() throws Exception { diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeTlsManagerTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeTlsManagerTest.java new file mode 100644 index 00000000..f259d926 --- /dev/null +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeTlsManagerTest.java @@ -0,0 +1,51 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2019 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ +package org.onap.dmaap.datarouter.node; + +import java.io.IOException; +import java.util.Properties; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.onap.dmaap.datarouter.node.utils.NodeTlsManager; +import org.powermock.core.classloader.annotations.PowerMockIgnore; +import org.powermock.modules.junit4.PowerMockRunner; + +@RunWith(PowerMockRunner.class) +@PowerMockIgnore({"java.net.ssl", "javax.security.auth.x500.X500Principal", "com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"}) +public class NodeTlsManagerTest { + + private static NodeTlsManager nodeTlsManager; + + @BeforeClass + public static void setUpClass() throws IOException { + Properties nodeProps = new Properties(); + nodeProps.load(NodeTlsManagerTest.class.getClassLoader().getResourceAsStream("node_test.properties")); + nodeTlsManager = new NodeTlsManager(nodeProps); + } + + @Test + public void Given_Get_CanonicalName_Called_Valid_CN_Returned_From_JKS() { + String canonicalName = nodeTlsManager.getMyNameFromCertificate(); + Assert.assertEquals("dmaap-dr-node", canonicalName); + } + +} diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeUtilsTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeUtilsTest.java index 40cb11fb..9469ce0f 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeUtilsTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeUtilsTest.java @@ -29,12 +29,13 @@ import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; +import org.onap.dmaap.datarouter.node.utils.NodeUtils; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.MDC; @RunWith(PowerMockRunner.class) -@PowerMockIgnore({"java.net.ssl", "javax.security.auth.x500.X500Principal", "com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"}) +@PowerMockIgnore({"java.net.ssl", "com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "javax.management.*"}) public class NodeUtilsTest { @Mock @@ -76,16 +77,4 @@ public class NodeUtilsTest { Assert.assertEquals("123", MDC.get("RequestId")); Assert.assertEquals("456", MDC.get("InvocationId")); } - - @Test - public void Given_Get_CanonicalName_Called_Valid_CN_Returned_From_JKS() { - String canonicalName = NodeUtils.getCanonicalName("jks", "src/test/resources/org.onap.dmaap-dr-test-cert.jks", "WGxd2P6MDo*Bi4+UdzWs{?$8"); - Assert.assertEquals("dmaap-dr-node", canonicalName); - } - - @Test - public void Given_Get_CanonicalName_Called_Valid_CN_Returned_From_P12() { - String canonicalName = NodeUtils.getCanonicalName("PKCS12", "src/test/resources/aaf/org.onap.dmaap-dr.p12", "w7(O#.QV#kQ;L(8fsaoT7cY&"); - Assert.assertEquals("dmaap-dr-node", canonicalName); - } } diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/PathFinderTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/PathFinderTest.java index 25edd0c0..2bf320b3 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/PathFinderTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/PathFinderTest.java @@ -26,6 +26,8 @@ import static org.junit.Assert.assertThat; import org.junit.Test; import org.junit.runner.RunWith; +import org.onap.dmaap.datarouter.node.config.NodeConfig; +import org.onap.dmaap.datarouter.node.config.PathFinder; import org.powermock.modules.junit4.PowerMockRunner; @RunWith(PowerMockRunner.class) diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/ProvDataTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/ProvDataTest.java index 1a4564ab..1747fc12 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/ProvDataTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/ProvDataTest.java @@ -29,6 +29,7 @@ import java.io.Reader; import java.nio.charset.StandardCharsets; import org.junit.Test; import org.junit.runner.RunWith; +import org.onap.dmaap.datarouter.node.config.ProvData; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.modules.junit4.PowerMockRunner; diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/StatusLogTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/StatusLogTest.java index 28dcba9a..5cd46e88 100644 --- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/StatusLogTest.java +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/StatusLogTest.java @@ -29,6 +29,7 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.onap.dmaap.datarouter.node.log.StatusLog; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; diff --git a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.cred.props b/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.cred.props deleted file mode 100644 index 452a0112..00000000 --- a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.cred.props +++ /dev/null @@ -1,17 +0,0 @@ -############################################################ -# Properties Generated by AT&T Certificate Manager -# by root -# on 2022-03-22T12:31:16.865+0000 -# @copyright 2019, AT&T -############################################################ -Challenge=enc:2dZO7B2UVelU01IezilQ9hUYptjIvurC6JWO9vJHOiiqg9qUna3AdEMxzBpXcgIO -cadi_alias=dmaap-dr-node@dmaap-dr.onap.org -cadi_key_password=enc:ow380FBQU5xvQEDzTiXdfx-BD_0dEWbRCXjnUPxVFatk_7KaejgiLCVew6C6x8_9 -cadi_keyfile=src/test/resources/aaf/org.onap.dmaap-dr.keyfile -cadi_keystore=src/test/resources/aaf/org.onap.dmaap-dr.p12 -cadi_keystore_password=enc:Urky1AuqpokGwoSl72ypmhVONokY0H1Gy28S9P8jfvK1-d7W8SODXf87K37EubDo -cadi_keystore_password_jks=enc:lwXWmxaVAHy207uNeiHPYLho3qezj4xu7_iH4myTitXuj_bo9esv0e1L19HluXlM -cadi_keystore_password_p12=enc:Urky1AuqpokGwoSl72ypmhVONokY0H1Gy28S9P8jfvK1-d7W8SODXf87K37EubDo -cadi_truststore=src/test/resources/aaf/org.onap.dmaap-dr.trust.jks -cadi_truststore_password=enc:stAsWWKk5FOcWxyWsNqyQlSdkwWSxcy3Jed6RW_r4jyZosqV8kmSCJLSHNLyWyTH -cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US diff --git a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.keyfile b/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.keyfile deleted file mode 100644 index 987cae1f..00000000 --- a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.keyfile +++ /dev/null @@ -1,27 +0,0 @@ -uXr3OodaCvEvTDHfkpgh-_5UdpEkBowSRpKF18WNxMG0A9Y9k2REr-d075mNHshr5gOUYAlykgaX -ImvY-oFtG71N-Q0uqYnYuU7eX7zgcOOFNpeT4q2db3wkiScaZgdv2UnfDOVX2Aq53GzyYeKcGB1W -u0jKX8ryHVC0uvj69y1boiJBxQY1HwAqsh9q39Ut1XxyB42D7xDM7sa9G1I6YosQlthXPMt7Q_i0 -0CUHrozT6Wtnxfb8-qR6rM5PNmAxp8r5Jb4cnSbxJGEWpx1DkZPPZNvLjE9bBfinVftzrpIlqHd3 -2CCp3k6xrqui2c_0HaNILtKHX5VVKttRf9zoADk_l5_MowEJgh1fgu8m0r-iEB6oH18AWZ_PCtSr -A0cqEDGbP-h7e8w-Dwb0yS_oSfeDISarZ1Q46CQFcDz7f4bVB3AW67YGM4SQBaFGivWS_Lz_juOx -fhyo55HW_wL_92xRvAyXD1Eg64Lw0Cx0f9rn4uM7NkKaSCP8MmIvs84egCvDzCiWkT6SGzUTeZ7M -qI0ow2tJPFUuvhozcYxJW9sFzTqTFL7iLtrO5nHmo7YUVf2WU8ByGOfC-ylZPjw0fiKpe-7MXVSz -MIOxl3pgWSYt6kImBtZCateif6TgF_A-91yAOoQLm72baJKhOo-F-aJ4k6ToT4bZ-9-Hjs5Wk9sO -4thYzDcMaGrWsssnR9QMBjl9x3uw5bfOYGduyyRCWC_TYNorrW6Bvh7gFZAWk_f6cK-pPahRo_3O -erNlUHx6xaektjqUsmp91LzjCEWbmt5nDGw16VUSPhrtkca2ReWDL0vTnpjvPuRI7mWHdfyoVYMw -KUDkRgn61QvELC0Rl0C12polUEjqSxCRvGzL8QiOrU_Z1hXhVyp2gVIYvDKyuyguvJlniEBQ2qbM -3Y5savSaANj-b2-_J0ofhvYFGz1k_bjcCE6Wx2RSWk1nSJFJ0gFjp1Ky2cyKhhVMXCiWcbbA3vC8 -E1cE9Ixn4z5WYyHOAFyzrsiiapdn5MhB_kI-ObR2HuRy_GRJHMVpxqX15Bv3OXxWy51bXeYeRfit -SVCoyfXMXbx12eVG4tvf3WyYg9RqK_QN57VAmnffq09MAXuzFB1NuxSeJeBHGtBW6XIvb6E32K6I -rL7vfxGHhD7ZHqQzzie5I42HufANtBb1KNKHikeeACxxlpjOIHjjEZ6yffhoyhNbSHx4TNs7UYOf -FUKs0TCO9ffSJ8JT67X-baqlnSYFkdqR6QTGhltZq1ssPYq6Y6NGN1iEHJMfapev07AQgp6mBBxJ -dEwDZMAwlmb1yVkU1gfzcrGdAUeAAuM6LowPHUKYLJJvvaFLQJKbwwLvjnm_Tf9UR5QJ8a3JKiLt -F0Ykc1AeO-fyiNPUVRnkc3--Gs0bLUpLe0_medGOZ_LLk45GarEzBCGajgF_joz8fsndKtDrEnB0 -z-8iiQ4bTZb_ALjKnnhcdKjoJNhZ-e3AsmadYO7ve2K3ApwOelWWh24iZ-ZSUbnlw-OQMu4RvI9h -9KOTbu4gWokep3O2uT8-cVN-ax1FYRpmexHgIxZy-tmYjDdJTJQv3dMp5UngUs6xmIy5nyIBr51B -YORSQmXRvbdvYeY7jOEQUGfXfS2yYtzLfJ07f8ODQJ24APDKsVEGWVBcBMqp53o9W7cPJ-4vnMLU -mVYCKqVrT2bThRWb38OtaaJuA4A8GboDDe5Q0IZao0v3gqwgfCzJIjQx97AEHzEbLzEBxio_z8a0 -AYLi-_f7kMtgrMsj61gcRVUan5NVqnL7oVwFn8FuqeTRzMMbgJHUHfFzrgLxpRwnscGRzygZf39Q -JqqOLGriWe_IOhXQ7aIp3uyBF-brJnrXcQIp3PF0fbVTGK_VyKDB6J0BjJyjmyL0TA37hDr8usna -TgMtjF1qq9kOrJr2582n8TW-ogpyqm1ft7R18VT9_1JjtnCcTCudW-oJX87jv3UTJdaEkBMhc1Pe -euLti-qSG3Q5QeO5AxM8mP_9YbLHB9YQboRWnbfC9oIFRmVgGAcdbHNFn0hfILBOTIYStQL4 \ No newline at end of file diff --git a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.location.props b/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.location.props deleted file mode 100644 index c30adee4..00000000 --- a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.location.props +++ /dev/null @@ -1,8 +0,0 @@ -############################################################ -# Properties Generated by AT&T Certificate Manager -# by root -# on 2022-03-22T12:31:14.170+0000 -# @copyright 2019, AT&T -############################################################ -cadi_latitude=0.0 -cadi_longitude=0.0 diff --git a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.p12 b/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.p12 deleted file mode 100644 index 4238c868..00000000 Binary files a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.p12 and /dev/null differ diff --git a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.props b/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.props deleted file mode 100644 index c408b9ca..00000000 --- a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.props +++ /dev/null @@ -1,24 +0,0 @@ -############################################################ -# Properties Generated by AT&T Certificate Manager -# by root -# on 2022-03-22T12:31:14.169+0000 -# @copyright 2019, AT&T -############################################################ -aaf_env=DEV -aaf_id=dmaap-dr-node@dmaap-dr.onap.org -aaf_locate_url=https://aaf-locate.onap:8095 -aaf_locator_app_ns=org.osaaf.aaf -aaf_locator_container=oom -aaf_locator_container_ns=onap -aaf_locator_fqdn=dmaap-dr-node -aaf_locator_public_fqdn=dmaap-dr.onap.org -aaf_oauth2_introspect_url=https://AAF_LOCATE_URL/%CNS.%AAF_NS.introspect:2.1/introspect -aaf_oauth2_token_url=https://AAF_LOCATE_URL/%CNS.%AAF_NS.token:2.1/token -aaf_url=https://AAF_LOCATE_URL/%CNS.%AAF_NS.service:2.1 -aaf_url_cm=https://AAF_LOCATE_URL/%CNS.%AAF_NS.cm:2.1 -aaf_url_fs=https://AAF_LOCATE_URL/%CNS.%AAF_NS.fs:2.1 -aaf_url_gui=https://AAF_LOCATE_URL/%CNS.%AAF_NS.gui:2.1 -aaf_url_hello=https://aaf-locate.onap:8095/locate/onap.org.osaaf.aaf.hello:2.1 -aaf_url_oauth=https://AAF_LOCATE_URL/%CNS.%AAF_NS.oauth:2.1 -cadi_prop_files=src/test/resources/aaf/org.onap.dmaap-dr.location.props:src/test/resources/aaf/org.onap.dmaap-dr.cred.props -cadi_protocols=TLSv1.1,TLSv1.2 diff --git a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.trust.jks b/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.trust.jks deleted file mode 100644 index e09e2e61..00000000 Binary files a/datarouter-node/src/test/resources/aaf/org.onap.dmaap-dr.trust.jks and /dev/null differ diff --git a/datarouter-node/src/test/resources/certs/org.onap.dmaap-dr-node.p12 b/datarouter-node/src/test/resources/certs/org.onap.dmaap-dr-node.p12 new file mode 100644 index 00000000..3793a9d4 Binary files /dev/null and b/datarouter-node/src/test/resources/certs/org.onap.dmaap-dr-node.p12 differ diff --git a/datarouter-node/src/test/resources/certs/truststore.jks b/datarouter-node/src/test/resources/certs/truststore.jks new file mode 100644 index 00000000..91547c60 Binary files /dev/null and b/datarouter-node/src/test/resources/certs/truststore.jks differ diff --git a/datarouter-node/src/test/resources/node_test.properties b/datarouter-node/src/test/resources/node_test.properties index 407d37c6..7560bd78 100644 --- a/datarouter-node/src/test/resources/node_test.properties +++ b/datarouter-node/src/test/resources/node_test.properties @@ -56,36 +56,35 @@ SpoolDir = src/test/resources/spool # The path to the redirection data file RedirectionFile = src/test/redirections.dat # +# https security required for publish request +TlsEnabled = false +# +# Enabled TLS protocols +NodeHttpsProtocols = TLSv1.1|TLSv1.2 +# # The type of keystore for https KeyStoreType = PKCS12 # +# The path to your KeyStoreFile +KeyStorePath = src/test/resources/certs/org.onap.dmaap-dr-node.p12 +# +# The key store password +KeyStorePass = secret +# +# The key manager password +KeyManagerPass = secret +# # The type of truststore for https TrustStoreType = jks # +# The path to your trust store +TrustStorePath = src/test/certs/truststore.jks +# +# The trust store password +TrustStorePass = secret +# # The path to the file used to trigger an orderly shutdown QuiesceFile = etc/SHUTDOWN # # The key used to generate passwords for node to node transfers NodeAuthKey = Node123! -# -# DR_NODE DEFAULT ENABLED TLS PROTOCOLS -NodeHttpsProtocols = TLSv1.1|TLSv1.2 -# -# AAF CADI enabled flag -CadiEnabled = true -# -# AAF type to generate permission string -AAFType = org.onap.dmaap-dr.feed -# -# AAF default instance to generate permission string - default should be legacy -AAFInstance = legacy -# -# AAF action to generate permission string - default should be publish -AAFAction = publish -# -# AAF Props file path -AAFPropsFilePath = src/test/resources/aaf/org.onap.dmaap-dr.props - -# https security required for publish request -TlsEnabled = true - diff --git a/datarouter-node/src/test/resources/org.onap.dmaap-dr-test-cert.jks b/datarouter-node/src/test/resources/org.onap.dmaap-dr-test-cert.jks deleted file mode 100644 index 2320dc9f..00000000 Binary files a/datarouter-node/src/test/resources/org.onap.dmaap-dr-test-cert.jks and /dev/null differ diff --git a/datarouter-node/src/test/resources/prov_data.json b/datarouter-node/src/test/resources/prov_data.json index cf455c71..8b9985c3 100644 --- a/datarouter-node/src/test/resources/prov_data.json +++ b/datarouter-node/src/test/resources/prov_data.json @@ -22,7 +22,6 @@ "feedid": 1, "name": "Default PM Feed", "business_description": "Default Feed", - "aaf_instance": "legacy", "publisher": "dradmin", "links": { "subscribe": "https://dmaap-dr-prov/subscribe/1", @@ -60,7 +59,6 @@ "feedid": 1, "follow_redirect": false, "decompress": true, - "aaf_instance": "legacy", "links": { "feed": "https://dmaap-dr-prov/feed/1", "log": "https://dmaap-dr-prov/sublog/1", -- cgit 1.2.3-korg