summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--INFO.yaml20
-rw-r--r--ci_scripts/onap-style-java.xml225
-rwxr-xr-xci_scripts/pre-commit.sh56
-rw-r--r--ci_scripts/suppressedFilesForStyleCheck.xml27
-rwxr-xr-xdatarouter-node/pom.xml13
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilter.java58
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Delivery.java202
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueue.java124
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java330
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTaskHelper.java27
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java150
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfoBuilder.java149
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/IsFrom.java39
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java146
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java1017
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java311
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeMain.java172
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java282
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java151
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java143
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java66
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java489
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PublishId.java8
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/RateLimitedOperation.java51
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/RedirManager.java77
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/StatusLog.java273
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java15
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Target.java13
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/TaskList.java62
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/AuditFilter.java38
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/EELFFilter.java43
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/JettyFilter.java37
-rw-r--r--datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/MetricsFilter.java42
-rw-r--r--datarouter-node/src/main/resources/docker/Dockerfile2
-rw-r--r--datarouter-node/src/main/resources/logback.xml304
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilterTest.java47
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java186
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTaskTest.java133
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java162
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DestInfoTest.java77
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/LogManagerTest.java107
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java206
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java2
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeUtilsTest.java42
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/PathFinderTest.java75
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/ProvDataTest.java153
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/RedirManagerTest.java73
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/StatusLogTest.java6
-rw-r--r--datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/TaskListTest.java44
-rw-r--r--datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node1
-rw-r--r--datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.M8
-rw-r--r--datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.gzbin0 -> 150 bytes
-rw-r--r--datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.gz.M8
-rw-r--r--datarouter-node/src/test/resources/org.onap.dmaap-dr-test-cert.jksbin0 -> 3647 bytes
-rw-r--r--datarouter-node/src/test/resources/redir_file2
-rwxr-xr-xdatarouter-prov/pom.xml8
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespImpl.java26
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespSupplementImpl.java8
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthzResource.java11
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthorizer.java88
-rwxr-xr-xdatarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java394
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/DRFeedsServlet.java30
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/FeedServlet.java40
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/GroupServlet.java90
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/InternalServlet.java80
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/LogServlet.java85
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Main.java10
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Poker.java20
-rwxr-xr-xdatarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProxyServlet.java20
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java3
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/RouteServlet.java59
-rwxr-xr-xdatarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java143
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscribeServlet.java26
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServlet.java66
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java328
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/DeliveryExtraRecord.java7
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRoute.java26
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Feed.java43
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/FeedEndpointID.java4
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java89
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRoute.java64
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecord.java2
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java29
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java22
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/PubFailRecord.java10
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/SubDelivery.java8
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Subscription.java36
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java10
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRProvCadiFilter.java4
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRRouteCLI.java10
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/HttpServletUtils.java13
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/JSONUtilities.java5
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LOGJSONObject.java228
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java999
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PasswordProcessor.java12
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java4
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/ThrottleFilter.java4
-rw-r--r--datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/URLUtilities.java5
-rw-r--r--datarouter-prov/src/test/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthTest.java134
-rwxr-xr-xdatarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/BaseServletTest.java90
-rw-r--r--datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java3
-rwxr-xr-xdatarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/FeedServletTest.java13
-rwxr-xr-xdatarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscribeServletTest.java44
-rwxr-xr-xdatarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServletTest.java15
-rwxr-xr-xdatarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTaskTest.java203
-rw-r--r--datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/SubscriptionTest.java52
-rw-r--r--datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoaderTest.java91
-rwxr-xr-xdatarouter-prov/src/test/resources/create.sql3
-rwxr-xr-xdatarouter-prov/src/test/resources/h2Database.properties11
-rw-r--r--datarouter-prov/src/test/resources/prov_data.json129
-rw-r--r--datarouter-subscriber/src/main/java/org/onap/dmaap/datarouter/subscriber/SampleSubscriberServlet.java2
-rw-r--r--datarouter-subscriber/src/main/java/org/onap/dmaap/datarouter/subscriber/SubscriberProps.java2
-rw-r--r--docs/release-notes.rst2
-rwxr-xr-xpom.xml26
114 files changed, 5989 insertions, 4494 deletions
diff --git a/INFO.yaml b/INFO.yaml
index 419d763b..3b16c126 100644
--- a/INFO.yaml
+++ b/INFO.yaml
@@ -3,9 +3,9 @@ project: 'dmaap-datarouter'
project_creation_date: '2017-06-30'
lifecycle_state: 'Incubation'
project_lead: &onap_releng_ptl
- name: 'Ram Koya'
- email: 'rk541m@att.com'
- id: 'rampi_k'
+ name: 'Mandar Sawant'
+ email: 'ms5838@att.com'
+ id: 'sawantmandar'
company: 'ATT'
timezone: 'America/Dallas'
primary_contact: *onap_releng_ptl
@@ -45,11 +45,6 @@ committers:
company: 'ATT'
id: 'sawantmandar'
timezone: 'America/Dallas'
- - name: 'Varun Gudisena'
- email: 'vg411h@att.com'
- company: 'ATT'
- id: 'vg411h'
- timezone: 'America/Dallas'
- name: 'Bhanu Ramesh'
email: 'bg6954@att.com'
company: 'ATT'
@@ -69,7 +64,13 @@ committers:
email: 'conor.ward@ericsson.com'
company: 'ericsson'
id: 'econwar'
- timezone: 'America/Dallas'
+ timezone: 'Europe/Dublin'
+ - name: 'Fiachra Corcoran'
+ email: 'fiachra.corcoran@est.tech'
+ company: 'ericsson'
+ id: 'efiacor'
+ timezone: 'Europe/Dublin'
+
tsc:
approval: 'https://lists.onap.org/pipermail/onap-tsc'
changes:
@@ -78,3 +79,4 @@ tsc:
name: 'Xinhui Li'
name: 'Jing Wang'
name: 'Ramdas Sawant'
+ name: 'Varun Gudisena'
diff --git a/ci_scripts/onap-style-java.xml b/ci_scripts/onap-style-java.xml
new file mode 100644
index 00000000..502ea7ac
--- /dev/null
+++ b/ci_scripts/onap-style-java.xml
@@ -0,0 +1,225 @@
+<?xml version="1.0"?>
+<!--
+ ============LICENSE_START=======================================================
+ Copyright (C) 2019 Nordix Foundation.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ SPDX-License-Identifier: Apache-2.0
+ ============LICENSE_END=========================================================
+-->
+<!DOCTYPE module PUBLIC
+ "-//Puppy Crawl//DTD Check Configuration 1.3//EN"
+ "http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
+
+<!--
+
+ Checkstyle configuration that checks the Google coding conventions from:
+
+ - Google Java Style
+ https://google-styleguide.googlecode.com/svn-history/r130/trunk/javaguide.html
+
+ Checkstyle is very configurable. Be sure to read the documentation at
+ http://checkstyle.sf.net (or in your downloaded distribution).
+
+ Most Checks are configurable, be sure to consult the documentation.
+
+ To completely disable a check, just comment it out or delete it from the file.
+
+ Authors: Max Vetrenko, Ruslan Diachenko, Roman Ivanov.
+
+ -->
+
+<module name = "Checker">
+ <property name="charset" value="UTF-8"/>
+
+ <property name="severity" value="warning"/>
+
+ <property name="fileExtensions" value="java, properties, xml"/>
+ <!-- Checks for whitespace -->
+ <!-- See http://checkstyle.sf.net/config_whitespace.html -->
+ <module name="FileTabCharacter">
+ <property name="eachLine" value="true"/>
+ </module>
+ <module name="SuppressionFilter">
+ <property name="file" value="ci_scripts/suppressedFilesForStyleCheck.xml" />
+ </module>
+ <module name="TreeWalker">
+ <module name="OuterTypeFilename"/>
+ <module name="IllegalTokenText">
+ <property name="tokens" value="STRING_LITERAL, CHAR_LITERAL"/>
+ <property name="format" value="\\u00(08|09|0(a|A)|0(c|C)|0(d|D)|22|27|5(C|c))|\\(0(10|11|12|14|15|42|47)|134)"/>
+ <property name="message" value="Avoid using corresponding octal or Unicode escape."/>
+ </module>
+ <module name="AvoidEscapedUnicodeCharacters">
+ <property name="allowEscapesForControlCharacters" value="true"/>
+ <property name="allowByTailComment" value="true"/>
+ <property name="allowNonPrintableEscapes" value="true"/>
+ </module>
+ <module name="LineLength">
+ <property name="max" value="120"/>
+ <property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://"/>
+ </module>
+ <module name="AvoidStarImport"/>
+ <module name="OneTopLevelClass"/>
+ <module name="NoLineWrap"/>
+ <module name="EmptyBlock">
+ <property name="option" value="TEXT"/>
+ <property name="tokens" value="LITERAL_TRY, LITERAL_FINALLY, LITERAL_IF, LITERAL_ELSE, LITERAL_SWITCH"/>
+ </module>
+ <module name="NeedBraces"/>
+ <module name="LeftCurly">
+ </module>
+ <module name="RightCurly">
+ <property name="option" value="alone"/>
+ <property name="tokens" value="CLASS_DEF, METHOD_DEF, CTOR_DEF, LITERAL_FOR, LITERAL_WHILE, LITERAL_DO, STATIC_INIT, INSTANCE_INIT"/>
+ </module>
+ <module name="WhitespaceAround">
+ <property name="allowEmptyConstructors" value="true"/>
+ <property name="allowEmptyMethods" value="true"/>
+ <property name="allowEmptyTypes" value="true"/>
+ <property name="allowEmptyLoops" value="true"/>
+ <message key="ws.notFollowed"
+ value="WhitespaceAround: ''{0}'' is not followed by whitespace. Empty blocks may only be represented as '{}' when not part of a multi-block statement (4.1.3)"/>
+ <message key="ws.notPreceded"
+ value="WhitespaceAround: ''{0}'' is not preceded with whitespace."/>
+ </module>
+ <module name="OneStatementPerLine"/>
+ <module name="MultipleVariableDeclarations"/>
+ <module name="ArrayTypeStyle"/>
+ <module name="MissingSwitchDefault"/>
+ <module name="FallThrough"/>
+ <module name="UpperEll"/>
+ <module name="ModifierOrder"/>
+ <module name="EmptyLineSeparator">
+ <property name="allowNoEmptyLineBetweenFields" value="true"/>
+ </module>
+ <module name="SeparatorWrap">
+ <property name="tokens" value="DOT"/>
+ <property name="option" value="nl"/>
+ </module>
+ <module name="SeparatorWrap">
+ <property name="tokens" value="COMMA"/>
+ <property name="option" value="EOL"/>
+ </module>
+ <module name="PackageName">
+ <property name="format" value="^[a-z]+(\.[a-z][a-z0-9]*)*$"/>
+ <message key="name.invalidPattern"
+ value="Package name ''{0}'' must match pattern ''{1}''."/>
+ </module>
+ <module name="TypeName">
+ <message key="name.invalidPattern"
+ value="Type name ''{0}'' must match pattern ''{1}''."/>
+ </module>
+ <module name="MemberName">
+ <property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9]*$"/>
+ <message key="name.invalidPattern"
+ value="Member name ''{0}'' must match pattern ''{1}''."/>
+ </module>
+ <module name="ParameterName">
+ <property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9]*$"/>
+ <message key="name.invalidPattern"
+ value="Parameter name ''{0}'' must match pattern ''{1}''."/>
+ </module>
+ <module name="LocalVariableName">
+ <property name="tokens" value="VARIABLE_DEF"/>
+ <property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9]*$"/>
+ <property name="allowOneCharVarInForLoop" value="true"/>
+ <message key="name.invalidPattern"
+ value="Local variable name ''{0}'' must match pattern ''{1}''."/>
+ </module>
+ <module name="ClassTypeParameterName">
+ <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/>
+ <message key="name.invalidPattern"
+ value="Class type name ''{0}'' must match pattern ''{1}''."/>
+ </module>
+ <module name="MethodTypeParameterName">
+ <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/>
+ <message key="name.invalidPattern"
+ value="Method type name ''{0}'' must match pattern ''{1}''."/>
+ </module>
+ <module name="NoFinalizer"/>
+ <module name="GenericWhitespace">
+ <message key="ws.followed"
+ value="GenericWhitespace ''{0}'' is followed by whitespace."/>
+ <message key="ws.preceded"
+ value="GenericWhitespace ''{0}'' is preceded with whitespace."/>
+ <message key="ws.illegalFollow"
+ value="GenericWhitespace ''{0}'' should followed by whitespace."/>
+ <message key="ws.notPreceded"
+ value="GenericWhitespace ''{0}'' is not preceded with whitespace."/>
+ </module>
+ <module name="Indentation">
+ <property name="basicOffset" value="4"/>
+ <property name="braceAdjustment" value="0"/>
+ <property name="caseIndent" value="4"/>
+ <property name="throwsIndent" value="4"/>
+ <property name="lineWrappingIndentation" value="4"/>
+ <property name="arrayInitIndent" value="4"/>
+ </module>
+ <module name="AbbreviationAsWordInName">
+ <property name="ignoreFinal" value="false"/>
+ <property name="allowedAbbreviationLength" value="20"/>
+ </module>
+ <module name="OverloadMethodsDeclarationOrder"/>
+ <module name="VariableDeclarationUsageDistance"/>
+ <module name="CustomImportOrder">
+ <property name="sortImportsInGroupAlphabetically" value="true"/>
+ <property name="separateLineBetweenGroups" value="true"/>
+ <property name="customImportOrderRules" value="STATIC###THIRD_PARTY_PACKAGE"/>
+ </module>
+ <module name="MethodParamPad"/>
+ <module name="OperatorWrap">
+ <property name="option" value="NL"/>
+ <property name="tokens" value="BAND, BOR, BSR, BXOR, DIV, EQUAL, GE, GT, LAND, LE, LITERAL_INSTANCEOF, LOR, LT, MINUS, MOD, NOT_EQUAL, PLUS, QUESTION, SL, SR, STAR "/>
+ </module>
+ <module name="AnnotationLocation">
+ <property name="tokens" value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF"/>
+ </module>
+ <module name="AnnotationLocation">
+ <property name="tokens" value="VARIABLE_DEF"/>
+ <property name="allowSamelineMultipleAnnotations" value="true"/>
+ </module>
+ <module name="NonEmptyAtclauseDescription"/>
+ <module name="JavadocTagContinuationIndentation"/>
+ <module name="SummaryJavadocCheck">
+ <property name="forbiddenSummaryFragments" value="^@return the *|^This method returns |^A [{]@code [a-zA-Z0-9]+[}]( is a )"/>
+ </module>
+ <module name="JavadocParagraph"/>
+ <module name="AtclauseOrder">
+ <property name="tagOrder" value="@param, @return, @throws, @deprecated"/>
+ <property name="target" value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF, VARIABLE_DEF"/>
+ </module>
+ <module name="JavadocMethod">
+ <property name="scope" value="public"/>
+ <property name="allowMissingParamTags" value="true"/>
+ <property name="allowMissingThrowsTags" value="true"/>
+ <property name="allowMissingReturnTag" value="true"/>
+ <property name="minLineCount" value="2"/>
+ <property name="allowedAnnotations" value="Override, Test"/>
+ <property name="allowThrowsTagsForSubclasses" value="true"/>
+ </module>
+ <module name="MethodName">
+ <property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9_]*$"/>
+ <message key="name.invalidPattern"
+ value="Method name ''{0}'' must match pattern ''{1}''."/>
+ </module>
+ <module name="SingleLineJavadoc">
+ <property name="ignoreInlineTags" value="false"/>
+ </module>
+ <module name="EmptyCatchBlock">
+ <property name="exceptionVariableName" value="expected"/>
+ </module>
+ <module name="CommentsIndentation"/>
+ </module>
+</module>
diff --git a/ci_scripts/pre-commit.sh b/ci_scripts/pre-commit.sh
new file mode 100755
index 00000000..636f8ff3
--- /dev/null
+++ b/ci_scripts/pre-commit.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+#
+# ============LICENSE_START=======================================================
+# Copyright (C) 2019 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+#
+# Pre-commit hook for running checkstyle on changed Java sources
+#
+# To use this you need:
+# 1. Checkstyle's jar file downloaded *version is important checkstyle-8.13-all.jar
+# 2. To configure git:
+# * git config --add checkstyle.jar <location_of_jar>
+# 3. Copy this file to your .git/hooks directory as pre-commit
+#
+# Now, when you commit, you will be disallowed from doing so
+# until you pass your checkstyle checks.
+
+changed_files=" "
+for file in $(git diff --cached --name-status | grep -E '\.(java)$' | grep -vE '^D' | awk '{print $2}')
+do
+ changed_files+="$file "
+done
+
+printf "Using checkstyle sheet "
+checkstlye_jar_command='git config --get checkstyle.jar'
+
+if ! ($checkstlye_jar_command)
+then
+ printf "You must configure checkstyle in your git config"
+ exit 1
+fi
+
+checkstyle_warnings=$(java -jar $($checkstlye_jar_command) -c ci_scripts/onap-style-java.xml $changed_files | grep WARN)
+if [ $? == 0 ]
+then
+ printf "\nWarnings found\n\n"
+ echo "$checkstyle_warnings"
+ printf "\n###############################################################\n\nFix warnings before committing\n\n"
+ exit 1
+else
+ printf "\nCode checkstyle passed.\n"
+fi
diff --git a/ci_scripts/suppressedFilesForStyleCheck.xml b/ci_scripts/suppressedFilesForStyleCheck.xml
new file mode 100644
index 00000000..ed56442a
--- /dev/null
+++ b/ci_scripts/suppressedFilesForStyleCheck.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+<!--
+ ============LICENSE_START=======================================================
+ Copyright (C) 2019 Nordix Foundation.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ SPDX-License-Identifier: Apache-2.0
+ ============LICENSE_END=========================================================
+-->
+<!DOCTYPE suppressions PUBLIC
+ "-//Puppy Crawl//DTD Suppressions 1.1//EN"
+ "http://www.puppycrawl.com/dtds/suppressions_1_1.dtd">
+
+<suppressions>
+ <suppress files=".*Test\.java" checks="[a-zA-Z0-9]*"/>
+</suppressions> \ No newline at end of file
diff --git a/datarouter-node/pom.xml b/datarouter-node/pom.xml
index 06aa3fcf..42c1c9a4 100755
--- a/datarouter-node/pom.xml
+++ b/datarouter-node/pom.xml
@@ -53,6 +53,10 @@
<artifactId>commons-codec</artifactId>
</dependency>
<dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+ <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
@@ -128,6 +132,11 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.awaitility</groupId>
+ <artifactId>awaitility</artifactId>
+ <version>3.1.6</version>
+ </dependency>
</dependencies>
<profiles>
<profile>
@@ -361,10 +370,6 @@
<artifactId>cobertura-maven-plugin</artifactId>
</plugin>
<plugin>
- <groupId>org.sonatype.plugins</groupId>
- <artifactId>nexus-staging-maven-plugin</artifactId>
- </plugin>
- <plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
</plugin>
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilter.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilter.java
index 30ad1618..245dbccd 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilter.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilter.java
@@ -17,23 +17,24 @@
* SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+
package org.onap.dmaap.datarouter.node;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import org.onap.aaf.cadi.PropAccess;
-import org.onap.aaf.cadi.filter.CadiFilter;
-
+import java.io.IOException;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
+import org.onap.aaf.cadi.PropAccess;
+import org.onap.aaf.cadi.filter.CadiFilter;
public class DRNodeCadiFilter extends CadiFilter {
+
private static EELFLogger logger = EELFManager.getInstance().getLogger(NodeServlet.class);
DRNodeCadiFilter(boolean init, PropAccess access) throws ServletException {
@@ -41,23 +42,16 @@ public class DRNodeCadiFilter extends CadiFilter {
}
@Override
- public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
+ public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
+ throws IOException, ServletException {
HttpServletRequest httpRequest = (HttpServletRequest) request;
String path = httpRequest.getPathInfo();
if (!(path.startsWith("/internal"))) {
- if (!(httpRequest.getMethod().equalsIgnoreCase("POST"))) {
- if (httpRequest.getMethod().equalsIgnoreCase("DELETE") && path.startsWith("/delete")) {
+ if (!("POST".equalsIgnoreCase(httpRequest.getMethod()))) {
+ if ("DELETE".equalsIgnoreCase(httpRequest.getMethod()) && path.startsWith("/delete")) {
chain.doFilter(request, response);
} else {
- String feedId = getFeedId(request, response);
- String aafDbInstance = NodeConfigManager.getInstance().getAafInstance(feedId);
- if (aafDbInstance != null && !aafDbInstance.equals("") && !aafDbInstance.equalsIgnoreCase("legacy")) {
- logger.info("DRNodeCadiFilter - doFilter: FeedId - " + feedId + ":" + "AAF Instance -" + aafDbInstance);
- super.doFilter(request, response, chain);
- } else {
- logger.info("DRNodeCadiFilter - doFilter: FeedId - " + feedId + ":" + "Legacy Feed");
- chain.doFilter(request, response);
- }
+ doFilterWithFeedId(request, response, chain);
}
}
} else {
@@ -72,9 +66,10 @@ public class DRNodeCadiFilter extends CadiFilter {
if (fileid == null) {
logger.error("NODE0105 Rejecting bad URI for PUT " + req.getPathInfo() + " from " + req.getRemoteAddr());
try {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
+ resp.sendError(HttpServletResponse.SC_NOT_FOUND,
+ "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
} catch (IOException e) {
- logger.error("NODE0541 DRNodeCadiFilter.getFeedId: ", e.getMessage());
+ logger.error("NODE0541 DRNodeCadiFilter.getFeedId: ", e);
}
return null;
}
@@ -82,19 +77,34 @@ public class DRNodeCadiFilter extends CadiFilter {
if (fileid.startsWith("/publish/")) {
fileid = fileid.substring(9);
- int i = fileid.indexOf('/');
- if (i == -1 || i == fileid.length() - 1) {
- logger.error("NODE0105 Rejecting bad URI for PUT (publish) of " + req.getPathInfo() + " from " + req.getRemoteAddr());
+ int index = fileid.indexOf('/');
+ if (index == -1 || index == fileid.length() - 1) {
+ logger.error("NODE0105 Rejecting bad URI for PUT (publish) of " + req.getPathInfo() + " from " + req
+ .getRemoteAddr());
try {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid request URI. Expecting <feed-publishing-url>/<fileid>. Possible missing fileid.");
+ resp.sendError(HttpServletResponse.SC_NOT_FOUND,
+ "Invalid request URI. Expecting <feed-publishing-url>/<fileid>. "
+ + "Possible missing fileid.");
} catch (IOException e) {
- logger.error("NODE0542 DRNodeCadiFilter.getFeedId: ", e.getMessage());
+ logger.error("NODE0542 DRNodeCadiFilter.getFeedId: ", e);
}
return null;
}
- feedid = fileid.substring(0, i);
+ feedid = fileid.substring(0, index);
}
return feedid;
}
+ private void doFilterWithFeedId(ServletRequest request, ServletResponse response, FilterChain chain)
+ throws IOException, ServletException {
+ String feedId = getFeedId(request, response);
+ String aafDbInstance = NodeConfigManager.getInstance().getAafInstance(feedId);
+ if (aafDbInstance != null && !"".equals(aafDbInstance) && !"legacy".equalsIgnoreCase(aafDbInstance)) {
+ logger.info("DRNodeCadiFilter - doFilter: FeedId - " + feedId + ":" + "AAF Instance -" + aafDbInstance);
+ super.doFilter(request, response, chain);
+ } else {
+ logger.info("DRNodeCadiFilter - doFilter: FeedId - " + feedId + ":" + "Legacy Feed");
+ chain.doFilter(request, response);
+ }
+ }
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Delivery.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Delivery.java
index 501e489c..150d2aa2 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Delivery.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Delivery.java
@@ -23,67 +23,37 @@
package org.onap.dmaap.datarouter.node;
-import java.util.*;
-import java.io.*;
-
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Objects;
/**
* Main control point for delivering files to destinations.
- * <p>
- * The Delivery class manages assignment of delivery threads to delivery
- * queues and creation and destruction of delivery queues as
- * configuration changes. DeliveryQueues are assigned threads based on a
- * modified round-robin approach giving priority to queues with more work
- * as measured by both bytes to deliver and files to deliver and lower
- * priority to queues that already have delivery threads working.
- * A delivery thread continues to work for a delivery queue as long as
- * that queue has more files to deliver.
+ *
+ * <p>The Delivery class manages assignment of delivery threads to delivery queues and creation and destruction of
+ * delivery queues as configuration changes. DeliveryQueues are assigned threads based on a modified round-robin
+ * approach giving priority to queues with more work as measured by both bytes to deliver and files to deliver and lower
+ * priority to queues that already have delivery threads working. A delivery thread continues to work for a delivery
+ * queue as long as that queue has more files to deliver.
*/
public class Delivery {
- private static EELFLogger logger = EELFManager.getInstance().getLogger(Delivery.class);
-
- private static class DelItem implements Comparable<DelItem> {
- private String pubid;
- private String spool;
-
- public int compareTo(DelItem x) {
- int i = pubid.compareTo(x.pubid);
- if (i == 0) {
- i = spool.compareTo(x.spool);
- }
- return (i);
- }
-
- public String getPublishId() {
- return (pubid);
- }
-
- public String getSpool() {
- return (spool);
- }
-
- public DelItem(String pubid, String spool) {
- this.pubid = pubid;
- this.spool = spool;
- }
- }
+ private static final String TOTAL = " total=";
+ private static final String YELLOW = " yellow=";
+ private static EELFLogger logger = EELFManager.getInstance().getLogger(Delivery.class);
private double fdstart;
private double fdstop;
private int threads;
private int curthreads;
private NodeConfigManager config;
- private Hashtable<String, DeliveryQueue> dqs = new Hashtable<String, DeliveryQueue>();
+ private HashMap<String, DeliveryQueue> dqs = new HashMap<>();
private DeliveryQueue[] queues = new DeliveryQueue[0];
private int qpos = 0;
private long nextcheck;
- private Runnable cmon = new Runnable() {
- public void run() {
- checkconfig();
- }
- };
/**
* Constructs a new Delivery system using the specified configuration manager.
@@ -92,10 +62,37 @@ public class Delivery {
*/
public Delivery(NodeConfigManager config) {
this.config = config;
+ Runnable cmon = this::checkconfig;
config.registerConfigTask(cmon);
checkconfig();
}
+ /**
+ * Reset the retry timer for a delivery queue.
+ */
+ public synchronized void resetQueue(String spool) {
+ if (spool != null) {
+ DeliveryQueue dq = dqs.get(spool);
+ if (dq != null) {
+ dq.resetQueue();
+ }
+ }
+ }
+
+ /**
+ * Mark the task in spool a success.
+ */
+ public synchronized boolean markTaskSuccess(String spool, String pubId) {
+ boolean succeeded = false;
+ if (spool != null) {
+ DeliveryQueue dq = dqs.get(spool);
+ if (dq != null) {
+ succeeded = dq.markTaskSuccess(pubId);
+ }
+ }
+ return succeeded;
+ }
+
private void cleardir(String dir) {
if (dqs.get(dir) != null) {
return;
@@ -113,12 +110,11 @@ public class Delivery {
File spoolfile = new File(config.getSpoolBase());
long tspace = spoolfile.getTotalSpace();
long start = (long) (tspace * fdstart);
- long stop = (long) (tspace * fdstop);
long cur = spoolfile.getUsableSpace();
if (cur >= start) {
return;
}
- Vector<DelItem> cv = new Vector<DelItem>();
+ ArrayList<DelItem> cv = new ArrayList<>();
for (String sdir : dqs.keySet()) {
for (String meta : (new File(sdir)).list()) {
if (!meta.endsWith(".M") || meta.charAt(0) == '.') {
@@ -129,27 +125,21 @@ public class Delivery {
}
DelItem[] items = cv.toArray(new DelItem[cv.size()]);
Arrays.sort(items);
- logger.info("NODE0501 Free disk space below red threshold. current=" + cur + " red=" + start + " total=" + tspace);
- for (DelItem item : items) {
- long amount = dqs.get(item.getSpool()).cancelTask(item.getPublishId());
- logger.info("NODE0502 Attempting to discard " + item.getSpool() + "/" + item.getPublishId() + " to free up disk");
- if (amount > 0) {
- cur += amount;
- if (cur >= stop) {
- cur = spoolfile.getUsableSpace();
- }
- if (cur >= stop) {
- logger.info("NODE0503 Free disk space at or above yellow threshold. current=" + cur + " yellow=" + stop + " total=" + tspace);
- return;
- }
- }
+ long stop = (long) (tspace * fdstop);
+ logger.info(
+ "NODE0501 Free disk space below red threshold. current=" + cur + " red=" + start + TOTAL + tspace);
+ if (determineFreeDiskSpace(spoolfile, tspace, stop, cur, items)) {
+ return;
}
cur = spoolfile.getUsableSpace();
if (cur >= stop) {
- logger.info("NODE0503 Free disk space at or above yellow threshold. current=" + cur + " yellow=" + stop + " total=" + tspace);
+ logger.info("NODE0503 Free disk space at or above yellow threshold. current=" + cur + YELLOW + stop
+ + TOTAL + tspace);
return;
}
- logger.warn("NODE0504 Unable to recover sufficient disk space to reach green status. current=" + cur + " yellow=" + stop + " total=" + tspace);
+ logger.warn(
+ "NODE0504 Unable to recover sufficient disk space to reach green status. current=" + cur + YELLOW
+ + stop + TOTAL + tspace);
}
private void cleardirs() {
@@ -188,7 +178,7 @@ public class Delivery {
DestInfo[] alldis = config.getAllDests();
DeliveryQueue[] nqs = new DeliveryQueue[alldis.length];
qpos = 0;
- Hashtable<String, DeliveryQueue> ndqs = new Hashtable<String, DeliveryQueue>();
+ HashMap<String, DeliveryQueue> ndqs = new HashMap<>();
for (DestInfo di : alldis) {
String spl = di.getSpool();
DeliveryQueue dq = dqs.get(spl);
@@ -205,11 +195,8 @@ public class Delivery {
cleardirs();
while (curthreads < threads) {
curthreads++;
- (new Thread() {
- {
- setName("Delivery Thread");
- }
-
+ (new Thread("del-thread-" + curthreads) {
+ @Override
public void run() {
dodelivery();
}
@@ -246,6 +233,7 @@ public class Delivery {
try {
wait(nextcheck + 500 - now);
} catch (Exception e) {
+ logger.error("InterruptedException", e);
}
now = System.currentTimeMillis();
}
@@ -257,29 +245,69 @@ public class Delivery {
}
}
- /**
- * Reset the retry timer for a delivery queue
- */
- public synchronized void resetQueue(String spool) {
- if (spool != null) {
- DeliveryQueue dq = dqs.get(spool);
- if (dq != null) {
- dq.resetQueue();
+ private boolean determineFreeDiskSpace(File spoolfile, long tspace, long stop, long cur, DelItem[] items) {
+ for (DelItem item : items) {
+ long amount = dqs.get(item.getSpool()).cancelTask(item.getPublishId());
+ logger.info("NODE0502 Attempting to discard " + item.getSpool() + "/" + item.getPublishId()
+ + " to free up disk");
+ if (amount > 0) {
+ cur += amount;
+ if (cur >= stop) {
+ cur = spoolfile.getUsableSpace();
+ }
+ if (cur >= stop) {
+ logger.info(
+ "NODE0503 Free disk space at or above yellow threshold. current=" + cur + YELLOW + stop
+ + TOTAL + tspace);
+ return true;
+ }
}
}
+ return false;
}
- /**
- * Mark the task in spool a success
- */
- public synchronized boolean markTaskSuccess(String spool, String pubId) {
- boolean succeeded = false;
- if (spool != null) {
- DeliveryQueue dq = dqs.get(spool);
- if (dq != null) {
- succeeded = dq.markTaskSuccess(pubId);
+ static class DelItem implements Comparable<DelItem> {
+
+ private String pubid;
+ private String spool;
+
+ public DelItem(String pubid, String spool) {
+ this.pubid = pubid;
+ this.spool = spool;
+ }
+
+ public int compareTo(DelItem other) {
+ int diff = pubid.compareTo(other.pubid);
+ if (diff == 0) {
+ diff = spool.compareTo(other.spool);
}
+ return (diff);
+ }
+
+ public String getPublishId() {
+ return (pubid);
+ }
+
+ public String getSpool() {
+ return (spool);
+ }
+
+ @Override
+ public boolean equals(Object object) {
+ if (this == object) {
+ return true;
+ }
+ if (object == null || getClass() != object.getClass()) {
+ return false;
+ }
+ DelItem delItem = (DelItem) object;
+ return Objects.equals(pubid, delItem.pubid)
+ && Objects.equals(getSpool(), delItem.getSpool());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pubid, getSpool());
}
- return succeeded;
}
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueue.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueue.java
index bef8dab2..0ba9ecfd 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueue.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryQueue.java
@@ -24,8 +24,11 @@
package org.onap.dmaap.datarouter.node;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import java.io.*;
import java.util.*;
+import org.jetbrains.annotations.Nullable;
/**
* Mechanism for monitoring and controlling delivery of files to a destination.
@@ -64,6 +67,7 @@ import java.util.*;
* failure timer is active or if no files are found in a directory scan.
*/
public class DeliveryQueue implements Runnable, DeliveryTaskHelper {
+ private static EELFLogger logger = EELFManager.getInstance().getLogger(DeliveryQueue.class);
private DeliveryQueueHelper deliveryQueueHelper;
private DestInfo destinationInfo;
private Hashtable<String, DeliveryTask> working = new Hashtable<>();
@@ -113,6 +117,7 @@ public class DeliveryQueue implements Runnable, DeliveryTaskHelper {
*/
private synchronized void markSuccess(DeliveryTask task) {
working.remove(task.getPublishId());
+ logger.debug(task.getPublishId() + " marked as success.");
task.clean();
failed = false;
failduration = 0;
@@ -122,6 +127,7 @@ public class DeliveryQueue implements Runnable, DeliveryTaskHelper {
* Mark that a delivery task has expired.
*/
private synchronized void markExpired(DeliveryTask task) {
+ logger.debug(task.getPublishId() + " marked as expired.");
task.clean();
}
@@ -130,6 +136,7 @@ public class DeliveryQueue implements Runnable, DeliveryTaskHelper {
*/
private synchronized void markFailNoRetry(DeliveryTask task) {
working.remove(task.getPublishId());
+ logger.debug(task.getPublishId() + " marked as failed permanently");
task.clean();
failed = false;
failduration = 0;
@@ -159,6 +166,7 @@ public class DeliveryQueue implements Runnable, DeliveryTaskHelper {
*/
private synchronized void markRedirect(DeliveryTask task) {
working.remove(task.getPublishId());
+ logger.debug(task.getPublishId() + " marked as redirected.");
retry.put(task.getPublishId(), task);
}
@@ -167,6 +175,7 @@ public class DeliveryQueue implements Runnable, DeliveryTaskHelper {
*/
private synchronized void markFailWithRetry(DeliveryTask task) {
working.remove(task.getPublishId());
+ logger.debug(task.getPublishId() + " marked as temporarily failed.");
retry.put(task.getPublishId(), task);
fdupdate();
}
@@ -202,53 +211,15 @@ public class DeliveryQueue implements Runnable, DeliveryTaskHelper {
todo = new Vector<>();
String[] files = dir.list();
Arrays.sort(files);
- for (String fname : files) {
- if (!fname.endsWith(".M")) {
- continue;
- }
- String fname2 = fname.substring(0, fname.length() - 2);
- long pidtime = 0;
- int dot = fname2.indexOf('.');
- if (dot < 1) {
- continue;
- }
- try {
- pidtime = Long.parseLong(fname2.substring(0, dot));
- } catch (Exception e) {
- }
- if (pidtime < 1000000000000L) {
- continue;
- }
- if (working.get(fname2) != null) {
- continue;
- }
- DeliveryTask dt = retry.get(fname2);
- if (dt == null) {
- dt = new DeliveryTask(this, fname2);
- }
- todo.add(dt);
- }
+ scanForNextTask(files);
retry = new Hashtable<>();
}
- if (todoindex < todo.size()) {
- DeliveryTask dt = todo.get(todoindex);
- if (dt.isCleaned()) {
- todoindex++;
- continue;
- }
- if (destinationInfo.isPrivilegedSubscriber() && dt.getResumeTime() > System.currentTimeMillis()) {
- retry.put(dt.getPublishId(), dt);
- todoindex++;
- continue;
- }
- if (dt.getDate() >= mindate) {
- return (dt);
- }
- todoindex++;
- reportExpiry(dt);
- continue;
+ DeliveryTask dt = getDeliveryTask(mindate);
+ if (dt != null) {
+ return dt;
}
- return (null);
+ return null;
+
}
}
@@ -359,11 +330,12 @@ public class DeliveryQueue implements Runnable, DeliveryTaskHelper {
* files to deliver
*/
public void run() {
- DeliveryTask t;
+ DeliveryTask task;
long endtime = System.currentTimeMillis() + deliveryQueueHelper.getFairTimeLimit();
int filestogo = deliveryQueueHelper.getFairFileLimit();
- while ((t = getNext()) != null) {
- t.run();
+ while ((task = getNext()) != null) {
+ logger.debug("Processing file: " + task.getPublishId());
+ task.run();
if (--filestogo <= 0 || System.currentTimeMillis() > endtime) {
break;
}
@@ -403,4 +375,62 @@ public class DeliveryQueue implements Runnable, DeliveryTaskHelper {
}
return false;
}
+ private void scanForNextTask(String[] files) {
+ for (String fname : files) {
+ String pubId = getPubId(fname);
+ if (pubId == null) {
+ continue;
+ }
+ DeliveryTask dt = retry.get(pubId);
+ if (dt == null) {
+ dt = new DeliveryTask(this, pubId);
+ }
+ todo.add(dt);
+ }
+ }
+
+ @Nullable
+ private DeliveryTask getDeliveryTask(long mindate) {
+ if (todoindex < todo.size()) {
+ DeliveryTask dt = todo.get(todoindex);
+ if (dt.isCleaned()) {
+ todoindex++;
+ }
+ if (destinationInfo.isPrivilegedSubscriber() && dt.getResumeTime() > System.currentTimeMillis()) {
+ retry.put(dt.getPublishId(), dt);
+ todoindex++;
+ }
+ if (dt.getDate() >= mindate) {
+ return (dt);
+ }
+ todoindex++;
+ reportExpiry(dt);
+ }
+ return null;
+ }
+
+ @Nullable
+ private String getPubId(String fname) {
+ if (!fname.endsWith(".M")) {
+ return null;
+ }
+ String fname2 = fname.substring(0, fname.length() - 2);
+ long pidtime = 0;
+ int dot = fname2.indexOf('.');
+ if (dot < 1) {
+ return null;
+ }
+ try {
+ pidtime = Long.parseLong(fname2.substring(0, dot));
+ } catch (Exception e) {
+ logger.error("Exception", e);
+ }
+ if (pidtime < 1000000000000L) {
+ return null;
+ }
+ if (working.get(fname2) != null) {
+ return null;
+ }
+ return fname2;
+ }
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java
index cca61707..7ed35928 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTask.java
@@ -24,29 +24,40 @@
package org.onap.dmaap.datarouter.node;
-import java.io.*;
-import java.net.*;
-import java.util.*;
-import java.util.zip.GZIPInputStream;
+import static com.att.eelf.configuration.Configuration.MDC_KEY_REQUEST_ID;
+import static org.onap.dmaap.datarouter.node.NodeUtils.isFiletypeGzip;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.ProtocolException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.UUID;
+import java.util.zip.GZIPInputStream;
+import org.jetbrains.annotations.Nullable;
import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
import org.slf4j.MDC;
-import static com.att.eelf.configuration.Configuration.*;
-import static org.onap.dmaap.datarouter.node.NodeUtils.isFiletypeGzip;
-
/**
* A file to be delivered to a destination.
- * <p>
- * A Delivery task represents a work item for the data router - a file that
- * needs to be delivered and provides mechanisms to get information about
- * the file and its delivery data as well as to attempt delivery.
+ *
+ * <p>A Delivery task represents a work item for the data router - a file that needs to be delivered and provides
+ * mechanisms to get information about the file and its delivery data as well as to attempt delivery.
*/
public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
- private static EELFLogger eelfLogger = EELFManager.getInstance()
- .getLogger(DeliveryTask.class);
+
+ private static final String DECOMPRESSION_STATUS = "Decompression_Status";
+ private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(DeliveryTask.class);
private DeliveryTaskHelper deliveryTaskHelper;
private String pubid;
private DestInfo destInfo;
@@ -69,12 +80,11 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
/**
- * Create a delivery task for a given delivery queue and pub ID
+ * Create a delivery task for a given delivery queue and pub ID.
*
* @param deliveryTaskHelper The delivery task helper for the queue this task is in.
- * @param pubid The publish ID for this file. This is used as
- * the base for the file name in the spool directory and is of
- * the form <milliseconds since 1970>.<fqdn of initial data router node>
+ * @param pubid The publish ID for this file. This is used as the base for the file name in the spool directory and
+ * is of the form (milliseconds since 1970).(fqdn of initial data router node)
*/
DeliveryTask(DeliveryTaskHelper deliveryTaskHelper, String pubid) {
this.deliveryTaskHelper = deliveryTaskHelper;
@@ -84,70 +94,70 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
this.followRedirects = destInfo.isFollowRedirects();
feedid = destInfo.getLogData();
spool = destInfo.getSpool();
- String dfn = spool + "/" + pubid;
+ String dfn = spool + File.separator + pubid;
String mfn = dfn + ".M";
- datafile = new File(spool + "/" + pubid);
+ datafile = new File(spool + File.separator + pubid);
metafile = new File(mfn);
boolean monly = destInfo.isMetaDataOnly();
date = Long.parseLong(pubid.substring(0, pubid.indexOf('.')));
resumeTime = System.currentTimeMillis();
- Vector<String[]> hdrv = new Vector<>();
+ ArrayList<String[]> hdrv = new ArrayList<>();
try (BufferedReader br = new BufferedReader(new FileReader(metafile))) {
- String s = br.readLine();
- int i = s.indexOf('\t');
- method = s.substring(0, i);
+ String line = br.readLine();
+ int index = line.indexOf('\t');
+ method = line.substring(0, index);
NodeUtils.setIpAndFqdnForEelf(method);
if (!"DELETE".equals(method) && !monly) {
length = datafile.length();
}
- fileid = s.substring(i + 1);
- while ((s = br.readLine()) != null) {
- i = s.indexOf('\t');
- String h = s.substring(0, i);
- String v = s.substring(i + 1);
- if ("x-dmaap-dr-routing".equalsIgnoreCase(h)) {
- subid = v.replaceAll("[^ ]*/", "");
+ fileid = line.substring(index + 1);
+ while ((line = br.readLine()) != null) {
+ index = line.indexOf('\t');
+ String header = line.substring(0, index);
+ String headerValue = line.substring(index + 1);
+ if ("x-dmaap-dr-routing".equalsIgnoreCase(header)) {
+ subid = headerValue.replaceAll("[^ ]*/", "");
feedid = deliveryTaskHelper.getFeedId(subid.replaceAll(" .*", ""));
}
- if (length == 0 && h.toLowerCase().startsWith("content-")) {
+ if (length == 0 && header.toLowerCase().startsWith("content-")) {
continue;
}
- if (h.equalsIgnoreCase("content-type")) {
- ctype = v;
+ if ("content-type".equalsIgnoreCase(header)) {
+ ctype = headerValue;
}
- if (h.equalsIgnoreCase("x-onap-requestid")) {
- MDC.put(MDC_KEY_REQUEST_ID, v);
+ if ("x-onap-requestid".equalsIgnoreCase(header)) {
+ MDC.put(MDC_KEY_REQUEST_ID, headerValue);
}
- if (h.equalsIgnoreCase("x-invocationid")) {
- MDC.put("InvocationId", v);
- v = UUID.randomUUID().toString();
- newInvocationId = v;
+ if ("x-invocationid".equalsIgnoreCase(header)) {
+ MDC.put("InvocationId", headerValue);
+ headerValue = UUID.randomUUID().toString();
+ newInvocationId = headerValue;
}
- hdrv.add(new String[]{h, v});
+ hdrv.add(new String[]{header, headerValue});
}
} catch (Exception e) {
- eelfLogger.error("Exception "+ Arrays.toString(e.getStackTrace()), e.getMessage());
+ eelfLogger.error("Exception", e);
}
hdrs = hdrv.toArray(new String[hdrv.size()][]);
url = deliveryTaskHelper.getDestURL(fileid);
}
/**
- * Is the object a DeliveryTask with the same publication ID?
+ * Is the object a DeliveryTask with the same publication ID.
*/
- public boolean equals(Object o) {
- if (!(o instanceof DeliveryTask)) {
+ public boolean equals(Object object) {
+ if (!(object instanceof DeliveryTask)) {
return (false);
}
- return (pubid.equals(((DeliveryTask) o).pubid));
+ return (pubid.equals(((DeliveryTask) object).pubid));
}
/**
* Compare the publication IDs.
*/
- public int compareTo(DeliveryTask o) {
- return (pubid.compareTo(o.pubid));
+ public int compareTo(DeliveryTask other) {
+ return (pubid.compareTo(other.pubid));
}
/**
@@ -165,79 +175,49 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
}
/**
- * Get the publish ID
+ * Get the publish ID.
*/
String getPublishId() {
return (pubid);
}
/**
- * Attempt delivery
+ * Attempt delivery.
*/
public void run() {
attempts++;
try {
destInfo = deliveryTaskHelper.getDestinationInfo();
- boolean expect100 = destInfo.isUsing100();
boolean monly = destInfo.isMetaDataOnly();
length = 0;
if (!"DELETE".equals(method) && !monly) {
length = datafile.length();
}
- if (destInfo.isDecompress() && isFiletypeGzip(datafile) && fileid.endsWith(".gz")){
- fileid = fileid.replace(".gz", "");
- }
+ stripSuffixIfIsDecompress();
url = deliveryTaskHelper.getDestURL(fileid);
- URL u = new URL(url);
- HttpURLConnection uc = (HttpURLConnection) u.openConnection();
- uc.setConnectTimeout(60000);
- uc.setReadTimeout(60000);
- uc.setInstanceFollowRedirects(false);
- uc.setRequestMethod(method);
- uc.setRequestProperty("Content-Length", Long.toString(length));
- uc.setRequestProperty("Authorization", destInfo.getAuth());
- uc.setRequestProperty("X-DMAAP-DR-PUBLISH-ID", pubid);
- for (String[] nv : hdrs) {
- uc.addRequestProperty(nv[0], nv[1]);
- }
- if (length > 0) {
- if (expect100) {
- uc.setRequestProperty("Expect", "100-continue");
- }
- uc.setDoOutput(true);
- if (destInfo.isDecompress()) {
- if (isFiletypeGzip(datafile)) {
- sendDecompressedFile(uc);
- } else {
- uc.setRequestProperty("Decompression_Status", "UNSUPPORTED_FORMAT");
- sendFile(uc);
- }
- } else {
- sendFile(uc);
- }
- }
- int rc = uc.getResponseCode();
- String rmsg = uc.getResponseMessage();
- if (rmsg == null) {
- String h0 = uc.getHeaderField(0);
- if (h0 != null) {
- int i = h0.indexOf(' ');
- int j = h0.indexOf(' ', i + 1);
- if (i != -1 && j != -1) {
- rmsg = h0.substring(j + 1);
- }
- }
- }
+ URL urlObj = new URL(url);
+ HttpURLConnection urlConnection = (HttpURLConnection) urlObj.openConnection();
+ urlConnection.setConnectTimeout(60000);
+ urlConnection.setReadTimeout(60000);
+ urlConnection.setInstanceFollowRedirects(false);
+ urlConnection.setRequestMethod(method);
+ urlConnection.setRequestProperty("Content-Length", Long.toString(length));
+ urlConnection.setRequestProperty("Authorization", destInfo.getAuth());
+ urlConnection.setRequestProperty("X-DMAAP-DR-PUBLISH-ID", pubid);
+ boolean expect100 = destInfo.isUsing100();
+ int rc = deliverFileToSubscriber(expect100, urlConnection);
+ String rmsg = urlConnection.getResponseMessage();
+ rmsg = getResponseMessage(urlConnection, rmsg);
String xpubid = null;
InputStream is;
if (rc >= 200 && rc <= 299) {
- is = uc.getInputStream();
- xpubid = uc.getHeaderField("X-DMAAP-DR-PUBLISH-ID");
+ is = urlConnection.getInputStream();
+ xpubid = urlConnection.getHeaderField("X-DMAAP-DR-PUBLISH-ID");
} else {
if (rc >= 300 && rc <= 399) {
- rmsg = uc.getHeaderField("Location");
+ rmsg = urlConnection.getHeaderField("Location");
}
- is = uc.getErrorStream();
+ is = urlConnection.getErrorStream();
}
byte[] buf = new byte[4096];
if (is != null) {
@@ -247,23 +227,22 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
}
deliveryTaskHelper.reportStatus(this, rc, xpubid, rmsg);
} catch (Exception e) {
- eelfLogger.error("Exception "+ Arrays.toString(e.getStackTrace()),e);
+ eelfLogger.error("Exception " + Arrays.toString(e.getStackTrace()), e);
deliveryTaskHelper.reportException(this, e);
}
}
/**
- * To send decompressed gzip to the subscribers
+ * To send decompressed gzip to the subscribers.
*
* @param httpURLConnection connection used to make request
- * @throws IOException
*/
private void sendDecompressedFile(HttpURLConnection httpURLConnection) throws IOException {
byte[] buffer = new byte[8164];
- httpURLConnection.setRequestProperty("Decompression_Status", "SUCCESS");
+ httpURLConnection.setRequestProperty(DECOMPRESSION_STATUS, "SUCCESS");
OutputStream outputStream = getOutputStream(httpURLConnection);
if (outputStream != null) {
- int bytesRead = 0;
+ int bytesRead;
try (InputStream gzipInputStream = new GZIPInputStream(new FileInputStream(datafile))) {
int bufferLength = buffer.length;
while ((bytesRead = gzipInputStream.read(buffer, 0, bufferLength)) > 0) {
@@ -271,8 +250,8 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
}
outputStream.close();
} catch (IOException e) {
- httpURLConnection.setRequestProperty("Decompression_Status", "FAILURE");
- eelfLogger.info("Could not decompress file");
+ httpURLConnection.setRequestProperty(DECOMPRESSION_STATUS, "FAILURE");
+ eelfLogger.info("Could not decompress file", e);
sendFile(httpURLConnection);
}
@@ -283,44 +262,42 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
* To send any file to the subscriber.
*
* @param httpURLConnection connection used to make request
- * @throws IOException
*/
private void sendFile(HttpURLConnection httpURLConnection) throws IOException {
OutputStream os = getOutputStream(httpURLConnection);
- if (os != null) {
- long sofar = 0;
- try (InputStream is = new FileInputStream(datafile)) {
- byte[] buf = new byte[1024 * 1024];
- while (sofar < length) {
- int i = buf.length;
- if (sofar + i > length) {
- i = (int) (length - sofar);
- }
- i = is.read(buf, 0, i);
- if (i <= 0) {
- throw new IOException("Unexpected problem reading data file " + datafile);
- }
- sofar += i;
- os.write(buf, 0, i);
+ if (os == null) {
+ return;
+ }
+ long sofar = 0;
+ try (InputStream is = new FileInputStream(datafile)) {
+ byte[] buf = new byte[1024 * 1024];
+ while (sofar < length) {
+ int len = buf.length;
+ if (sofar + len > length) {
+ len = (int) (length - sofar);
}
- os.close();
- } catch (IOException ioe) {
- deliveryTaskHelper.reportDeliveryExtra(this, sofar);
- throw ioe;
+ len = is.read(buf, 0, len);
+ if (len <= 0) {
+ throw new IOException("Unexpected problem reading data file " + datafile);
+ }
+ sofar += len;
+ os.write(buf, 0, len);
}
+ os.close();
+ } catch (IOException ioe) {
+ deliveryTaskHelper.reportDeliveryExtra(this, sofar);
+ throw ioe;
}
}
/**
- * Get the outputstream that will be used to send data
+ * Get the outputstream that will be used to send data.
*
* @param httpURLConnection connection used to make request
* @return AN Outpustream that can be used to send your data.
- * @throws IOException
*/
private OutputStream getOutputStream(HttpURLConnection httpURLConnection) throws IOException {
OutputStream outputStream = null;
-
try {
outputStream = httpURLConnection.getOutputStream();
} catch (ProtocolException pe) {
@@ -331,22 +308,74 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
return outputStream;
}
+ private void stripSuffixIfIsDecompress() {
+ if (destInfo.isDecompress() && isFiletypeGzip(datafile) && fileid.endsWith(".gz")) {
+ fileid = fileid.replace(".gz", "");
+ }
+ }
+
+ private int deliverFileToSubscriber(boolean expect100, HttpURLConnection uc) throws IOException {
+ for (String[] nv : hdrs) {
+ uc.addRequestProperty(nv[0], nv[1]);
+ }
+ if (length > 0) {
+ if (expect100) {
+ uc.setRequestProperty("Expect", "100-continue");
+ }
+ uc.setDoOutput(true);
+ if (destInfo.isDecompress()) {
+ if (isFiletypeGzip(datafile)) {
+ sendDecompressedFile(uc);
+ } else {
+ uc.setRequestProperty(DECOMPRESSION_STATUS, "UNSUPPORTED_FORMAT");
+ sendFile(uc);
+ }
+ } else {
+ sendFile(uc);
+ }
+ }
+ return uc.getResponseCode();
+ }
+
+ @Nullable
+ private String getResponseMessage(HttpURLConnection uc, String rmsg) {
+ if (rmsg == null) {
+ String h0 = uc.getHeaderField(0);
+ if (h0 != null) {
+ int indexOfSpace1 = h0.indexOf(' ');
+ int indexOfSpace2 = h0.indexOf(' ', indexOfSpace1 + 1);
+ if (indexOfSpace1 != -1 && indexOfSpace2 != -1) {
+ rmsg = h0.substring(indexOfSpace2 + 1);
+ }
+ }
+ }
+ return rmsg;
+ }
+
/**
- * Remove meta and data files
+ * Remove meta and data files.
*/
void clean() {
- datafile.delete();
- metafile.delete();
+ deleteWithRetry(datafile);
+ deleteWithRetry(metafile);
eelfLogger.info(EelfMsgs.INVOKE, newInvocationId);
eelfLogger.info(EelfMsgs.EXIT);
hdrs = null;
}
- /**
- * Set the resume time for a delivery task.
- */
- void setResumeTime(long resumeTime) {
- this.resumeTime = resumeTime;
+ private void deleteWithRetry(File file) {
+ int maxTries = 3;
+ int tryCount = 1;
+ while (tryCount <= maxTries) {
+ try {
+ Files.deleteIfExists(file.toPath());
+ break;
+ } catch (IOException e) {
+ eelfLogger.error("IOException : Failed to delete file :"
+ + file.getName() + " on attempt " + tryCount, e);
+ }
+ tryCount++;
+ }
}
/**
@@ -357,14 +386,21 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
}
/**
- * Has this delivery task been cleaned?
+ * Set the resume time for a delivery task.
+ */
+ void setResumeTime(long resumeTime) {
+ this.resumeTime = resumeTime;
+ }
+
+ /**
+ * Has this delivery task been cleaned.
*/
boolean isCleaned() {
return (hdrs == null);
}
/**
- * Get length of body
+ * Get length of body.
*/
public long getLength() {
return (length);
@@ -378,58 +414,58 @@ public class DeliveryTask implements Runnable, Comparable<DeliveryTask> {
}
/**
- * Get the most recent delivery attempt URL
+ * Get the most recent delivery attempt URL.
*/
public String getURL() {
return (url);
}
/**
- * Get the content type
+ * Get the content type.
*/
String getCType() {
return (ctype);
}
/**
- * Get the method
+ * Get the method.
*/
String getMethod() {
return (method);
}
/**
- * Get the file ID
+ * Get the file ID.
*/
String getFileId() {
return (fileid);
}
/**
- * Get the number of delivery attempts
+ * Get the number of delivery attempts.
*/
int getAttempts() {
return (attempts);
}
/**
- * Get the (space delimited list of) subscription ID for this delivery task
+ * Get the (space delimited list of) subscription ID for this delivery task.
*/
String getSubId() {
return (subid);
}
/**
- * Get the feed ID for this delivery task
+ * Get the feed ID for this delivery task.
*/
String getFeedId() {
return (feedid);
}
/**
- * Get the followRedirects for this delivery task
+ * Get the followRedirects for this delivery task.
*/
- public boolean getFollowRedirects() {
- return(followRedirects);
+ boolean getFollowRedirects() {
+ return (followRedirects);
}
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTaskHelper.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTaskHelper.java
index d4ac8bd6..b9068f2f 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTaskHelper.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DeliveryTaskHelper.java
@@ -26,32 +26,33 @@ package org.onap.dmaap.datarouter.node;
/**
* Interface to allow independent testing of the DeliveryTask code.
- * <p>
- * This interface represents all the configuraiton information and
- * feedback mechanisms that a delivery task needs.
+ *
+ * <p>This interface represents all the configuraiton information and feedback mechanisms that a delivery task needs.
*/
public interface DeliveryTaskHelper {
+
/**
- * Report that a delivery attempt failed due to an exception (like can't connect to remote host)
+ * Report that a delivery attempt failed due to an exception (like can't connect to remote host).
*
- * @param task The task that failed
+ * @param task The task that failed
* @param exception The exception that occurred
*/
void reportException(DeliveryTask task, Exception exception);
/**
- * Report that a delivery attempt completed (successfully or unsuccessfully)
+ * Report that a delivery attempt completed (successfully or unsuccessfully).
*
- * @param task The task that failed
- * @param status The HTTP status
- * @param xpubid The publish ID from the far end (if any)
+ * @param task The task that failed
+ * @param status The HTTP status
+ * @param xpubid The publish ID from the far end (if any)
* @param location The redirection location for a 3XX response
*/
void reportStatus(DeliveryTask task, int status, String xpubid, String location);
/**
- * Report that a delivery attempt either failed while sending data or that an error was returned instead of a 100 Continue.
+ * Report that a delivery attempt either failed while sending data or that an error was returned instead of a 100
+ * Continue.
*
* @param task The task that failed
* @param sent The number of bytes sent or -1 if an error was returned instead of 100 Continue.
@@ -59,14 +60,14 @@ public interface DeliveryTaskHelper {
void reportDeliveryExtra(DeliveryTask task, long sent);
/**
- * Get the destination information for the delivery queue
+ * Get the destination information for the delivery queue.
*
* @return The destination information
*/
DestInfo getDestinationInfo();
/**
- * Given a file ID, get the URL to deliver to
+ * Given a file ID, get the URL to deliver to.
*
* @param fileid The file id
* @return The URL to deliver to
@@ -74,7 +75,7 @@ public interface DeliveryTaskHelper {
String getDestURL(String fileid);
/**
- * Get the feed ID for a subscription
+ * Get the feed ID for a subscription.
*
* @param subid The subscription ID
* @return The feed iD
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java
index 8890fe96..f5fa6e98 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfo.java
@@ -25,9 +25,10 @@
package org.onap.dmaap.datarouter.node;
/**
- * Information for a delivery destination that doesn't change from message to message
+ * Information for a delivery destination that doesn't change from message to message.
*/
public class DestInfo {
+
private String name;
private String spool;
private String subid;
@@ -40,114 +41,33 @@ public class DestInfo {
private boolean privilegedSubscriber;
private boolean decompress;
private boolean followRedirects;
- private String aafInstance;
-
- public static class DestInfoBuilder {
- private String name;
- private String spool;
- private String subid;
- private String logdata;
- private String url;
- private String authuser;
- private String authentication;
- private boolean metaonly;
- private boolean use100;
- private boolean privilegedSubscriber;
- private boolean followRedirects;
- private boolean decompress;
- private NodeConfig.ProvSubscription subscription;
-
- public DestInfoBuilder setName(String name) {
- this.name = name;
- return this;
- }
-
- public DestInfoBuilder setSpool(String spool) {
- this.spool = spool;
- return this;
- }
-
- public DestInfoBuilder setSubid(String subid) {
- this.subid = subid;
- return this;
- }
-
- public DestInfoBuilder setLogdata(String logdata) {
- this.logdata = logdata;
- return this;
- }
-
- public DestInfoBuilder setUrl(String url) {
- this.url = url;
- return this;
- }
-
- public DestInfoBuilder setAuthuser(String authuser) {
- this.authuser = authuser;
- return this;
- }
-
- public DestInfoBuilder setAuthentication(String authentication) {
- this.authentication = authentication;
- return this;
- }
-
- public DestInfoBuilder setMetaonly(boolean metaonly) {
- this.metaonly = metaonly;
- return this;
- }
-
- public DestInfoBuilder setUse100(boolean use100) {
- this.use100 = use100;
- return this;
- }
-
- public DestInfoBuilder setPrivilegedSubscriber(boolean privilegedSubscriber) {
- this.privilegedSubscriber = privilegedSubscriber;
- return this;
- }
-
- public DestInfoBuilder setFollowRedirects(boolean followRedirects) {
- this.followRedirects = followRedirects;
- return this;
- }
-
- public DestInfoBuilder setDecompress(boolean decompress) {
- this.decompress = decompress;
- return this;
- }
-
- public DestInfoBuilder setSubscription(NodeConfig.ProvSubscription subscription) {
- this.subscription = subscription;
- return this;
- }
-
- public DestInfo createDestInfo() {
- return new DestInfo(this);
- }
- }
+ /**
+ * Create a destination information object.
+ *
+ * @param destInfoBuilder DestInfo Object Builder
+ */
public DestInfo(DestInfoBuilder destInfoBuilder) {
- this.name = destInfoBuilder.name;
- this.spool = destInfoBuilder.spool;
- this.subid = destInfoBuilder.subid;
- this.logdata = destInfoBuilder.logdata;
- this.url = destInfoBuilder.url;
- this.authuser = destInfoBuilder.authuser;
- this.authentication = destInfoBuilder.authentication;
- this.metaonly = destInfoBuilder.metaonly;
- this.use100 = destInfoBuilder.use100;
- this.privilegedSubscriber = destInfoBuilder.privilegedSubscriber;
- this.followRedirects = destInfoBuilder.followRedirects;
- this.decompress = destInfoBuilder.decompress;
+ this.name = destInfoBuilder.getName();
+ this.spool = destInfoBuilder.getSpool();
+ this.subid = destInfoBuilder.getSubid();
+ this.logdata = destInfoBuilder.getLogdata();
+ this.url = destInfoBuilder.getUrl();
+ this.authuser = destInfoBuilder.getAuthuser();
+ this.authentication = destInfoBuilder.getAuthentication();
+ this.metaonly = destInfoBuilder.isMetaonly();
+ this.use100 = destInfoBuilder.isUse100();
+ this.privilegedSubscriber = destInfoBuilder.isPrivilegedSubscriber();
+ this.followRedirects = destInfoBuilder.isFollowRedirects();
+ this.decompress = destInfoBuilder.isDecompress();
}
/**
* Create a destination information object.
*
- * @param name n:fqdn or s:subid
- * @param spool The directory where files are spooled.
- * @param subscription The subscription.
+ * @param name n:fqdn or s:subid
+ * @param spool The directory where files are spooled.
+ * @param subscription The subscription.
*/
public DestInfo(String name, String spool, NodeConfig.ProvSubscription subscription) {
this.name = name;
@@ -164,8 +84,8 @@ public class DestInfo {
this.decompress = subscription.isDecompress();
}
- public boolean equals(Object o) {
- return ((o instanceof DestInfo) && ((DestInfo) o).spool.equals(spool));
+ public boolean equals(Object object) {
+ return ((object instanceof DestInfo) && ((DestInfo) object).spool.equals(spool));
}
public int hashCode() {
@@ -173,7 +93,7 @@ public class DestInfo {
}
/**
- * Get the name of this destination
+ * Get the name of this destination.
*/
public String getName() {
return (name);
@@ -217,7 +137,7 @@ public class DestInfo {
}
/**
- * Get the user for authentication
+ * Get the user for authentication.
*
* @return The name of the user for logging
*/
@@ -226,7 +146,7 @@ public class DestInfo {
}
/**
- * Get the authentication header
+ * Get the authentication header.
*
* @return The string to use to authenticate to the recipient.
*/
@@ -235,7 +155,7 @@ public class DestInfo {
}
/**
- * Is this a metadata only delivery?
+ * Is this a metadata only delivery.
*
* @return True if this is a metadata only delivery
*/
@@ -244,7 +164,7 @@ public class DestInfo {
}
/**
- * Should I send expect 100-continue header?
+ * Should I send expect 100-continue header.
*
* @return True if I should.
*/
@@ -253,23 +173,23 @@ public class DestInfo {
}
/**
- * Should we wait to receive a file processed acknowledgement before deleting file
+ * Should we wait to receive a file processed acknowledgement before deleting file.
*/
public boolean isPrivilegedSubscriber() {
return (privilegedSubscriber);
}
/**
- * Should I follow redirects?
- *
- * @return True if I should.
- */
+ * Should I follow redirects.
+ *
+ * @return True if I should.
+ */
public boolean isFollowRedirects() {
return (followRedirects);
}
/**
- * Should i decompress the file before sending it on
+ * Should i decompress the file before sending it on.
*
* @return True if I should.
*/
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfoBuilder.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfoBuilder.java
new file mode 100644
index 00000000..00c5cd8b
--- /dev/null
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/DestInfoBuilder.java
@@ -0,0 +1,149 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node;
+
+public class DestInfoBuilder {
+
+ private String destInfoName;
+ private String destInfoSpool;
+ private String destInfoSubId;
+ private String destInfoLogData;
+ private String destInfoUrl;
+ private String destInfoAuthUser;
+ private String destInfoAuthentication;
+ private boolean destInfoMetaOnly;
+ private boolean destInfoUse100;
+ private boolean destInfoPrivilegedSubscriber;
+ private boolean destInfoFollowRedirects;
+ private boolean destInfoDecompress;
+
+ public String getName() {
+ return destInfoName;
+ }
+
+ public DestInfoBuilder setName(String name) {
+ this.destInfoName = name;
+ return this;
+ }
+
+ public String getSpool() {
+ return destInfoSpool;
+ }
+
+ public DestInfoBuilder setSpool(String spool) {
+ this.destInfoSpool = spool;
+ return this;
+ }
+
+ public String getSubid() {
+ return destInfoSubId;
+ }
+
+ public DestInfoBuilder setSubid(String subid) {
+ this.destInfoSubId = subid;
+ return this;
+ }
+
+ String getLogdata() {
+ return destInfoLogData;
+ }
+
+ DestInfoBuilder setLogdata(String logdata) {
+ this.destInfoLogData = logdata;
+ return this;
+ }
+
+ public String getUrl() {
+ return destInfoUrl;
+ }
+
+ public DestInfoBuilder setUrl(String url) {
+ this.destInfoUrl = url;
+ return this;
+ }
+
+ String getAuthuser() {
+ return destInfoAuthUser;
+ }
+
+ DestInfoBuilder setAuthuser(String authuser) {
+ this.destInfoAuthUser = authuser;
+ return this;
+ }
+
+ String getAuthentication() {
+ return destInfoAuthentication;
+ }
+
+ DestInfoBuilder setAuthentication(String authentication) {
+ this.destInfoAuthentication = authentication;
+ return this;
+ }
+
+ boolean isMetaonly() {
+ return destInfoMetaOnly;
+ }
+
+ DestInfoBuilder setMetaonly(boolean metaonly) {
+ this.destInfoMetaOnly = metaonly;
+ return this;
+ }
+
+ boolean isUse100() {
+ return destInfoUse100;
+ }
+
+ DestInfoBuilder setUse100(boolean use100) {
+ this.destInfoUse100 = use100;
+ return this;
+ }
+
+ boolean isPrivilegedSubscriber() {
+ return destInfoPrivilegedSubscriber;
+ }
+
+ DestInfoBuilder setPrivilegedSubscriber(boolean privilegedSubscriber) {
+ this.destInfoPrivilegedSubscriber = privilegedSubscriber;
+ return this;
+ }
+
+ boolean isFollowRedirects() {
+ return destInfoFollowRedirects;
+ }
+
+ DestInfoBuilder setFollowRedirects(boolean followRedirects) {
+ this.destInfoFollowRedirects = followRedirects;
+ return this;
+ }
+
+ boolean isDecompress() {
+ return destInfoDecompress;
+ }
+
+ DestInfoBuilder setDecompress(boolean decompress) {
+ this.destInfoDecompress = decompress;
+ return this;
+ }
+
+ DestInfo createDestInfo() {
+ return new DestInfo(this);
+ }
+} \ No newline at end of file
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/IsFrom.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/IsFrom.java
index f7cedd22..49852680 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/IsFrom.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/IsFrom.java
@@ -26,39 +26,40 @@ package org.onap.dmaap.datarouter.node;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-
import java.io.IOException;
-import java.util.*;
-import java.net.*;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Arrays;
/**
- * Determine if an IP address is from a machine
+ * Determine if an IP address is from a machine.
*/
public class IsFrom {
+
+ private static EELFLogger logger = EELFManager.getInstance().getLogger(IsFrom.class);
private long nextcheck;
private String[] ips;
private String fqdn;
- private static EELFLogger logger = EELFManager.getInstance().getLogger(IsFrom.class);
/**
- * Configure the JVM DNS cache to have a 10 second TTL. This needs to be called very very early or it won't have any effect.
+ * Create an IsFrom for the specified fully qualified domain name.
*/
- public static void setDNSCache() {
- java.security.Security.setProperty("networkaddress.cache.ttl", "10");
+ public IsFrom(String fqdn) {
+ this.fqdn = fqdn;
}
/**
- * Create an IsFrom for the specified fully qualified domain name.
+ * Configure the JVM DNS cache to have a 10 second TTL. This needs to be called very very early or it won't have
+ * any effect.
*/
- public IsFrom(String fqdn) {
- this.fqdn = fqdn;
+ public static void setDNSCache() {
+ java.security.Security.setProperty("networkaddress.cache.ttl", "10");
}
/**
- * Check if an IP address matches. If it has been more than
- * 10 seconds since DNS was last checked for changes to the
- * IP address(es) of this FQDN, check again. Then check
- * if the specified IP address belongs to the FQDN.
+ * Check if an IP address matches. If it has been more than 10 seconds since DNS was last checked for changes to
+ * the IP address(es) of this FQDN, check again. Then check if the specified IP address belongs to the FQDN.
*/
public synchronized boolean isFrom(String ip) {
long now = System.currentTimeMillis();
@@ -71,7 +72,7 @@ public class IsFrom {
hostAddrArray.add(addr.getHostAddress());
}
} catch (UnknownHostException e) {
- logger.error("IsFrom: UnknownHostEx: " + e.toString(), e.getMessage());
+ logger.error("IsFrom: UnknownHostEx: " + e.toString(), e);
}
ips = hostAddrArray.toArray(new String[0]);
logger.info("IsFrom: DNS ENTRIES FOR FQDN " + fqdn + " : " + Arrays.toString(ips));
@@ -90,15 +91,15 @@ public class IsFrom {
return true;
}
} catch (UnknownHostException e) {
- logger.error("IsFrom: UnknownHostEx: " + e.toString(), e.getMessage());
+ logger.error("IsFrom: UnknownHostEx: " + e.toString(), e);
} catch (IOException e) {
- logger.error("IsFrom: Failed to parse IP : " + ip + " : " + e.toString(), e.getMessage());
+ logger.error("IsFrom: Failed to parse IP : " + ip + " : " + e.toString(), e);
}
return false;
}
/**
- * Return the fully qualified domain name
+ * Return the fully qualified domain name.
*/
public String toString() {
return (fqdn);
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java
index 78a195b1..cf3b29a5 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/LogManager.java
@@ -20,8 +20,11 @@
* * ECOMP is a trademark and service mark of AT&T Intellectual Property.
* *
******************************************************************************/
+
package org.onap.dmaap.datarouter.node;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
@@ -33,17 +36,20 @@ import java.util.Arrays;
import java.util.TimerTask;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.jetbrains.annotations.NotNull;
/**
* Cleanup of old log files.
- * <p>
- * Periodically scan the log directory for log files that are older than the log file retention interval, and delete
+ *
+ * <p>Periodically scan the log directory for log files that are older than the log file retention interval, and delete
* them. In a future release, This class will also be responsible for uploading events logs to the log server to
* support the log query APIs.
*/
public class LogManager extends TimerTask {
+ private static final String EXCEPTION = "Exception";
+ private EELFLogger logger = EELFManager.getInstance().getLogger(LogManager.class);
private NodeConfigManager config;
private Matcher isnodelog;
private Matcher iseventlog;
@@ -51,7 +57,58 @@ public class LogManager extends TimerTask {
private String uploaddir;
private String logdir;
- private class Uploader extends Thread implements DeliveryQueueHelper {
+ /**
+ * Construct a log manager
+ *
+ * <p>The log manager will check for expired log files every 5 minutes at 20 seconds after the 5 minute boundary.
+ * (Actually, the interval is the event log rollover interval, which defaults to 5 minutes).
+ */
+ public LogManager(NodeConfigManager config) {
+ this.config = config;
+ try {
+ isnodelog = Pattern.compile("node\\.log\\.\\d{8}").matcher("");
+ iseventlog = Pattern.compile("events-\\d{12}\\.log").matcher("");
+ } catch (Exception e) {
+ logger.error(EXCEPTION, e);
+ }
+ logdir = config.getLogDir();
+ uploaddir = logdir + "/.spool";
+ (new File(uploaddir)).mkdirs();
+ long now = System.currentTimeMillis();
+ long intvl = StatusLog.parseInterval(config.getEventLogInterval(), 30000);
+ long when = now - now % intvl + intvl + 20000L;
+ config.getTimer().scheduleAtFixedRate(this, when - now, intvl);
+ worker = new Uploader();
+ }
+
+ /**
+ * Trigger check for expired log files and log files to upload.
+ */
+ public void run() {
+ worker.poke();
+ }
+
+ public Uploader getWorker() {
+ return worker;
+ }
+
+ class Uploader extends Thread implements DeliveryQueueHelper {
+
+ private static final String META = "/.meta";
+ private EELFLogger logger = EELFManager.getInstance().getLogger(Uploader.class);
+ private DeliveryQueue dq;
+
+ Uploader() {
+ dq = new DeliveryQueue(this,
+ new DestInfoBuilder().setName("LogUpload").setSpool(uploaddir).setSubid(null).setLogdata(null)
+ .setUrl(null).setAuthuser(config.getMyName()).setAuthentication(config.getMyAuth())
+ .setMetaonly(false).setUse100(false).setPrivilegedSubscriber(false)
+ .setFollowRedirects(false)
+ .setDecompress(false).createDestInfo());
+ setDaemon(true);
+ setName("Log Uploader");
+ start();
+ }
public long getInitFailureTimer() {
return (10000L);
@@ -86,6 +143,7 @@ public class LogManager extends TimerTask {
}
public void handleUnreachable(DestInfo destinationInfo) {
+ throw new UnsupportedOperationException();
}
public boolean handleRedirection(DestInfo destinationInfo, String location, String fileid) {
@@ -100,23 +158,11 @@ public class LogManager extends TimerTask {
return (null);
}
- private DeliveryQueue dq;
-
- public Uploader() {
- dq = new DeliveryQueue(this,
- new DestInfo.DestInfoBuilder().setName("LogUpload").setSpool(uploaddir).setSubid(null).setLogdata(null)
- .setUrl(null).setAuthuser(config.getMyName()).setAuthentication(config.getMyAuth())
- .setMetaonly(false).setUse100(false).setPrivilegedSubscriber(false).setFollowRedirects(false)
- .setDecompress(false).createDestInfo());
- setDaemon(true);
- setName("Log Uploader");
- start();
- }
-
private synchronized void snooze() {
try {
wait(10000);
} catch (Exception e) {
+ logger.error(EXCEPTION, e);
}
}
@@ -124,6 +170,7 @@ public class LogManager extends TimerTask {
notify();
}
+ @Override
public void run() {
while (true) {
scan();
@@ -141,69 +188,48 @@ public class LogManager extends TimerTask {
String curlog = StatusLog.getCurLogFile();
curlog = curlog.substring(curlog.lastIndexOf('/') + 1);
try {
- Writer w = new FileWriter(uploaddir + "/.meta");
- w.write("POST\tlogdata\nContent-Type\ttext/plain\n");
- w.close();
+ Writer writer = new FileWriter(uploaddir + META);
+ writer.write("POST\tlogdata\nContent-Type\ttext/plain\n");
+ writer.close();
BufferedReader br = new BufferedReader(new FileReader(uploaddir + "/.lastqueued"));
lastqueued = br.readLine();
br.close();
} catch (Exception e) {
+ logger.error(EXCEPTION, e);
}
for (String fn : fns) {
if (!isnodelog.reset(fn).matches()) {
if (!iseventlog.reset(fn).matches()) {
continue;
}
- if (lastqueued.compareTo(fn) < 0 && curlog.compareTo(fn) > 0) {
- lastqueued = fn;
- try {
- String pid = config.getPublishId();
- Files.createLink(Paths.get(uploaddir + "/" + pid), Paths.get(logdir + "/" + fn));
- Files.createLink(Paths.get(uploaddir + "/" + pid + ".M"), Paths.get(uploaddir + "/.meta"));
- } catch (Exception e) {
- }
- }
+ lastqueued = setLastQueued(lastqueued, curlog, fn);
}
- File f = new File(dir, fn);
- if (f.lastModified() < threshold) {
- f.delete();
+ File file = new File(dir, fn);
+ if (file.lastModified() < threshold) {
+ file.delete();
}
}
try (Writer w = new FileWriter(uploaddir + "/.lastqueued")) {
- (new File(uploaddir + "/.meta")).delete();
+ (new File(uploaddir + META)).delete();
w.write(lastqueued + "\n");
} catch (Exception e) {
+ logger.error(EXCEPTION, e);
}
}
- }
- /**
- * Construct a log manager
- * <p>
- * The log manager will check for expired log files every 5 minutes at 20 seconds after the 5 minute boundary.
- * (Actually, the interval is the event log rollover interval, which defaults to 5 minutes).
- */
- public LogManager(NodeConfigManager config) {
- this.config = config;
- try {
- isnodelog = Pattern.compile("node\\.log\\.\\d{8}").matcher("");
- iseventlog = Pattern.compile("events-\\d{12}\\.log").matcher("");
- } catch (Exception e) {
+ @NotNull
+ private String setLastQueued(String lastqueued, String curlog, String fn) {
+ if (lastqueued.compareTo(fn) < 0 && curlog.compareTo(fn) > 0) {
+ lastqueued = fn;
+ try {
+ String pid = config.getPublishId();
+ Files.createLink(Paths.get(uploaddir + "/" + pid), Paths.get(logdir + "/" + fn));
+ Files.createLink(Paths.get(uploaddir + "/" + pid + ".M"), Paths.get(uploaddir + META));
+ } catch (Exception e) {
+ logger.error(EXCEPTION, e);
+ }
+ }
+ return lastqueued;
}
- logdir = config.getLogDir();
- uploaddir = logdir + "/.spool";
- (new File(uploaddir)).mkdirs();
- long now = System.currentTimeMillis();
- long intvl = StatusLog.parseInterval(config.getEventLogInterval(), 30000);
- long when = now - now % intvl + intvl + 20000L;
- config.getTimer().scheduleAtFixedRate(this, when - now, intvl);
- worker = new Uploader();
- }
-
- /**
- * Trigger check for expired log files and log files to upload
- */
- public void run() {
- worker.poke();
}
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java
index d455f2d9..127668ff 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java
@@ -26,24 +26,505 @@ package org.onap.dmaap.datarouter.node;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-
import java.io.File;
+import java.util.ArrayList;
import java.util.Arrays;
+import java.util.HashMap;
import java.util.HashSet;
-import java.util.Hashtable;
-import java.util.Vector;
+import org.jetbrains.annotations.NotNull;
/**
* Processed configuration for this node.
- * <p>
- * The NodeConfig represents a processed configuration from the Data Router provisioning server. Each time
+ *
+ * <p>The NodeConfig represents a processed configuration from the Data Router provisioning server. Each time
* configuration data is received from the provisioning server, a new NodeConfig is created and the previous one
* discarded.
*/
public class NodeConfig {
+
+ private static final String PUBLISHER_NOT_PERMITTED = "Publisher not permitted for this feed";
private static EELFLogger logger = EELFManager.getInstance().getLogger(NodeConfig.class);
+ private HashMap<String, String> params = new HashMap<>();
+ private HashMap<String, Feed> feeds = new HashMap<>();
+ private HashMap<String, DestInfo> nodeinfo = new HashMap<>();
+ private HashMap<String, DestInfo> subinfo = new HashMap<>();
+ private HashMap<String, IsFrom> nodes = new HashMap<>();
+ private HashMap<String, ProvSubscription> provSubscriptions = new HashMap<>();
+ private String myname;
+ private String myauth;
+ private DestInfo[] alldests;
+ private int rrcntr;
+
+ /**
+ * Process the raw provisioning data to configure this node.
+ *
+ * @param pd The parsed provisioning data
+ * @param myname My name as seen by external systems
+ * @param spooldir The directory where temporary files live
+ * @param port The port number for URLs
+ * @param nodeauthkey The keying string used to generate node authentication credentials
+ */
+ public NodeConfig(ProvData pd, String myname, String spooldir, int port, String nodeauthkey) {
+ this.myname = myname;
+ for (ProvParam p : pd.getParams()) {
+ params.put(p.getName(), p.getValue());
+ }
+ ArrayList<DestInfo> destInfos = addDestInfoToNodeConfig(pd, myname, spooldir, port, nodeauthkey);
+ PathFinder pf = new PathFinder(myname, nodeinfo.keySet().toArray(new String[0]), pd.getHops());
+ HashMap<String, ArrayList<Redirection>> rdtab = addSubRedirInfoToNodeConfig(pd);
+ HashMap<String, HashMap<String, String>> pfutab = addFeedUsersToNodeConfig(pd);
+ HashMap<String, String> egrtab = addEgressRoutesToNodeConfig(pd, myname);
+ HashMap<String, ArrayList<SubnetMatcher>> pfstab = addFeedSubnetToNodeConfig(pd);
+ HashSet<String> allfeeds = addFeedsToNodeConfig(pd);
+ HashMap<String, StringBuilder> feedTargets = addSubsToNodeConfig(pd, spooldir, destInfos, pf, egrtab, allfeeds);
+ alldests = destInfos.toArray(new DestInfo[0]);
+ addFeedTargetsToNodeConfig(pd, rdtab, pfutab, pfstab, feedTargets);
+ }
+
+ @NotNull
+ private ArrayList<DestInfo> addDestInfoToNodeConfig(ProvData pd, String myname, String spooldir, int port,
+ String nodeauthkey) {
+ ArrayList<DestInfo> destInfos = new ArrayList<>();
+ myauth = NodeUtils.getNodeAuthHdr(myname, nodeauthkey);
+ for (ProvNode pn : pd.getNodes()) {
+ String commonName = pn.getCName();
+ if (nodeinfo.get(commonName) != null) {
+ continue;
+ }
+ DestInfo di = new DestInfoBuilder().setName("n:" + commonName).setSpool(spooldir + "/n/" + commonName)
+ .setSubid(null)
+ .setLogdata("n2n-" + commonName).setUrl("https://" + commonName + ":" + port + "/internal/publish")
+ .setAuthuser(commonName).setAuthentication(myauth).setMetaonly(false).setUse100(true)
+ .setPrivilegedSubscriber(false).setFollowRedirects(false).setDecompress(false).createDestInfo();
+ (new File(di.getSpool())).mkdirs();
+ String auth = NodeUtils.getNodeAuthHdr(commonName, nodeauthkey);
+ destInfos.add(di);
+ nodeinfo.put(commonName, di);
+ nodes.put(auth, new IsFrom(commonName));
+ }
+ return destInfos;
+ }
+
+ @NotNull
+ private HashMap<String, ArrayList<Redirection>> addSubRedirInfoToNodeConfig(ProvData pd) {
+ HashMap<String, ArrayList<Redirection>> rdtab = new HashMap<>();
+ for (ProvForceIngress pfi : pd.getForceIngress()) {
+ ArrayList<Redirection> redirections = rdtab.get(pfi.getFeedId());
+ if (redirections == null) {
+ redirections = new ArrayList<>();
+ rdtab.put(pfi.getFeedId(), redirections);
+ }
+ Redirection redirection = new Redirection();
+ if (pfi.getSubnet() != null) {
+ redirection.snm = new SubnetMatcher(pfi.getSubnet());
+ }
+ redirection.user = pfi.getUser();
+ redirection.nodes = pfi.getNodes();
+ redirections.add(redirection);
+ }
+ return rdtab;
+ }
+
+ @NotNull
+ private HashMap<String, HashMap<String, String>> addFeedUsersToNodeConfig(ProvData pd) {
+ HashMap<String, HashMap<String, String>> pfutab = new HashMap<>();
+ for (ProvFeedUser pfu : pd.getFeedUsers()) {
+ HashMap<String, String> userInfo = pfutab.get(pfu.getFeedId());
+ if (userInfo == null) {
+ userInfo = new HashMap<>();
+ pfutab.put(pfu.getFeedId(), userInfo);
+ }
+ userInfo.put(pfu.getCredentials(), pfu.getUser());
+ }
+ return pfutab;
+ }
+
+ @NotNull
+ private HashMap<String, String> addEgressRoutesToNodeConfig(ProvData pd, String myname) {
+ HashMap<String, String> egrtab = new HashMap<>();
+ for (ProvForceEgress pfe : pd.getForceEgress()) {
+ if (pfe.getNode().equals(myname) || nodeinfo.get(pfe.getNode()) == null) {
+ continue;
+ }
+ egrtab.put(pfe.getSubId(), pfe.getNode());
+ }
+ return egrtab;
+ }
+
+ @NotNull
+ private HashMap<String, ArrayList<SubnetMatcher>> addFeedSubnetToNodeConfig(ProvData pd) {
+ HashMap<String, ArrayList<SubnetMatcher>> pfstab = new HashMap<>();
+ for (ProvFeedSubnet pfs : pd.getFeedSubnets()) {
+ ArrayList<SubnetMatcher> subnetMatchers = pfstab.get(pfs.getFeedId());
+ if (subnetMatchers == null) {
+ subnetMatchers = new ArrayList<>();
+ pfstab.put(pfs.getFeedId(), subnetMatchers);
+ }
+ subnetMatchers.add(new SubnetMatcher(pfs.getCidr()));
+ }
+ return pfstab;
+ }
+
+ @NotNull
+ private HashSet<String> addFeedsToNodeConfig(ProvData pd) {
+ HashSet<String> allfeeds = new HashSet<>();
+ for (ProvFeed pfx : pd.getFeeds()) {
+ if (pfx.getStatus() == null) {
+ allfeeds.add(pfx.getId());
+ }
+ }
+ return allfeeds;
+ }
+
+ @NotNull
+ private HashMap<String, StringBuilder> addSubsToNodeConfig(ProvData pd, String spooldir,
+ ArrayList<DestInfo> destInfos, PathFinder pf, HashMap<String, String> egrtab, HashSet<String> allfeeds) {
+ HashMap<String, StringBuilder> feedTargets = new HashMap<>();
+ for (ProvSubscription provSubscription : pd.getSubscriptions()) {
+ String subId = provSubscription.getSubId();
+ String feedId = provSubscription.getFeedId();
+ if (isFeedOrSubKnown(allfeeds, subId, feedId)) {
+ continue;
+ }
+ int sididx = 999;
+ try {
+ sididx = Integer.parseInt(subId);
+ sididx -= sididx % 100;
+ } catch (Exception e) {
+ logger.error("NODE0517 Exception NodeConfig: " + e);
+ }
+ String subscriptionDirectory = sididx + "/" + subId;
+ DestInfo destinationInfo = new DestInfo("s:" + subId,
+ spooldir + "/s/" + subscriptionDirectory, provSubscription);
+ (new File(destinationInfo.getSpool())).mkdirs();
+ destInfos.add(destinationInfo);
+ provSubscriptions.put(subId, provSubscription);
+ subinfo.put(subId, destinationInfo);
+ String egr = egrtab.get(subId);
+ if (egr != null) {
+ subId = pf.getPath(egr) + subId;
+ }
+ StringBuilder sb = feedTargets.get(feedId);
+ if (sb == null) {
+ sb = new StringBuilder();
+ feedTargets.put(feedId, sb);
+ }
+ sb.append(' ').append(subId);
+ }
+ return feedTargets;
+ }
+
+ private void addFeedTargetsToNodeConfig(ProvData pd, HashMap<String, ArrayList<Redirection>> rdtab,
+ HashMap<String, HashMap<String, String>> pfutab, HashMap<String, ArrayList<SubnetMatcher>> pfstab,
+ HashMap<String, StringBuilder> feedTargets) {
+ for (ProvFeed pfx : pd.getFeeds()) {
+ String fid = pfx.getId();
+ Feed feed = feeds.get(fid);
+ if (feed != null) {
+ continue;
+ }
+ feed = new Feed();
+ feeds.put(fid, feed);
+ feed.createdDate = pfx.getCreatedDate();
+ feed.loginfo = pfx.getLogData();
+ feed.status = pfx.getStatus();
+ /*
+ * AAF changes: TDP EPIC US# 307413
+ * Passing aafInstance from ProvFeed to identify legacy/AAF feeds
+ */
+ feed.aafInstance = pfx.getAafInstance();
+ ArrayList<SubnetMatcher> v1 = pfstab.get(fid);
+ if (v1 == null) {
+ feed.subnets = new SubnetMatcher[0];
+ } else {
+ feed.subnets = v1.toArray(new SubnetMatcher[0]);
+ }
+ HashMap<String, String> h1 = pfutab.get(fid);
+ if (h1 == null) {
+ h1 = new HashMap();
+ }
+ feed.authusers = h1;
+ ArrayList<Redirection> v2 = rdtab.get(fid);
+ if (v2 == null) {
+ feed.redirections = new Redirection[0];
+ } else {
+ feed.redirections = v2.toArray(new Redirection[0]);
+ }
+ StringBuilder sb = feedTargets.get(fid);
+ if (sb == null) {
+ feed.targets = new Target[0];
+ } else {
+ feed.targets = parseRouting(sb.toString());
+ }
+ }
+ }
+
+ /**
+ * Parse a target string into an array of targets.
+ *
+ * @param routing Target string
+ * @return Array of targets.
+ */
+ public Target[] parseRouting(String routing) {
+ routing = routing.trim();
+ if ("".equals(routing)) {
+ return (new Target[0]);
+ }
+ String[] routingTable = routing.split("\\s+");
+ HashMap<String, Target> tmap = new HashMap<>();
+ HashSet<String> subset = new HashSet<>();
+ ArrayList<Target> targets = new ArrayList<>();
+ for (int i = 0; i < routingTable.length; i++) {
+ String target = routingTable[i];
+ int index = target.indexOf('/');
+ if (index == -1) {
+ addTarget(subset, targets, target);
+ } else {
+ addTargetWithRouting(tmap, targets, target, index);
+ }
+ }
+ return (targets.toArray(new Target[0]));
+ }
+
+ /**
+ * Check whether this is a valid node-to-node transfer.
+ *
+ * @param credentials Credentials offered by the supposed node
+ * @param ip IP address the request came from
+ */
+ public boolean isAnotherNode(String credentials, String ip) {
+ IsFrom node = nodes.get(credentials);
+ return (node != null && node.isFrom(ip));
+ }
+
+ /**
+ * Check whether publication is allowed.
+ *
+ * @param feedid The ID of the feed being requested.
+ * @param credentials The offered credentials
+ * @param ip The requesting IP address
+ */
+ public String isPublishPermitted(String feedid, String credentials, String ip) {
+ Feed feed = feeds.get(feedid);
+ String nf = "Feed does not exist";
+ if (feed != null) {
+ nf = feed.status;
+ }
+ if (nf != null) {
+ return (nf);
+ }
+ String user = feed.authusers.get(credentials);
+ if (user == null) {
+ return (PUBLISHER_NOT_PERMITTED);
+ }
+ if (feed.subnets.length == 0) {
+ return (null);
+ }
+ byte[] addr = NodeUtils.getInetAddress(ip);
+ for (SubnetMatcher snm : feed.subnets) {
+ if (snm.matches(addr)) {
+ return (null);
+ }
+ }
+ return (PUBLISHER_NOT_PERMITTED);
+ }
+
+ /**
+ * Check whether publication is allowed for AAF Feed.
+ *
+ * @param feedid The ID of the feed being requested.
+ * @param ip The requesting IP address
+ */
+ public String isPublishPermitted(String feedid, String ip) {
+ Feed feed = feeds.get(feedid);
+ String nf = "Feed does not exist";
+ if (feed != null) {
+ nf = feed.status;
+ }
+ if (nf != null) {
+ return nf;
+ }
+ if (feed.subnets.length == 0) {
+ return null;
+ }
+ byte[] addr = NodeUtils.getInetAddress(ip);
+ for (SubnetMatcher snm : feed.subnets) {
+ if (snm.matches(addr)) {
+ return null;
+ }
+ }
+ return PUBLISHER_NOT_PERMITTED;
+ }
+
/**
- * Raw configuration entry for a data router node
+ * Check whether delete file is allowed.
+ *
+ * @param subId The ID of the subscription being requested.
+ */
+ public boolean isDeletePermitted(String subId) {
+ ProvSubscription provSubscription = provSubscriptions.get(subId);
+ return provSubscription.isPrivilegedSubscriber();
+ }
+
+ /**
+ * Get authenticated user.
+ */
+ public String getAuthUser(String feedid, String credentials) {
+ return (feeds.get(feedid).authusers.get(credentials));
+ }
+
+ /**
+ * AAF changes: TDP EPIC US# 307413 Check AAF_instance for feed ID.
+ *
+ * @param feedid The ID of the feed specified
+ */
+ public String getAafInstance(String feedid) {
+ Feed feed = feeds.get(feedid);
+ return feed.aafInstance;
+ }
+
+ /**
+ * Check if the request should be redirected to a different ingress node.
+ */
+ public String getIngressNode(String feedid, String user, String ip) {
+ Feed feed = feeds.get(feedid);
+ if (feed.redirections.length == 0) {
+ return (null);
+ }
+ byte[] addr = NodeUtils.getInetAddress(ip);
+ for (Redirection r : feed.redirections) {
+ if ((r.user != null && !user.equals(r.user)) || (r.snm != null && !r.snm.matches(addr))) {
+ continue;
+ }
+ for (String n : r.nodes) {
+ if (myname.equals(n)) {
+ return (null);
+ }
+ }
+ if (r.nodes.length == 0) {
+ return (null);
+ }
+ return (r.nodes[rrcntr++ % r.nodes.length]);
+ }
+ return (null);
+ }
+
+ /**
+ * Get a provisioned configuration parameter.
+ */
+ public String getProvParam(String name) {
+ return (params.get(name));
+ }
+
+ /**
+ * Get all the DestInfos.
+ */
+ public DestInfo[] getAllDests() {
+ return (alldests);
+ }
+
+ /**
+ * Get the targets for a feed.
+ *
+ * @param feedid The feed ID
+ * @return The targets this feed should be delivered to
+ */
+ public Target[] getTargets(String feedid) {
+ if (feedid == null) {
+ return (new Target[0]);
+ }
+ Feed feed = feeds.get(feedid);
+ if (feed == null) {
+ return (new Target[0]);
+ }
+ return (feed.targets);
+ }
+
+ /**
+ * Get the creation date for a feed.
+ *
+ * @param feedid The feed ID
+ * @return the timestamp of creation date of feed id passed
+ */
+ public String getCreatedDate(String feedid) {
+ Feed feed = feeds.get(feedid);
+ return (feed.createdDate);
+ }
+
+ /**
+ * Get the feed ID for a subscription.
+ *
+ * @param subid The subscription ID
+ * @return The feed ID
+ */
+ public String getFeedId(String subid) {
+ DestInfo di = subinfo.get(subid);
+ if (di == null) {
+ return (null);
+ }
+ return (di.getLogData());
+ }
+
+ /**
+ * Get the spool directory for a subscription.
+ *
+ * @param subid The subscription ID
+ * @return The spool directory
+ */
+ public String getSpoolDir(String subid) {
+ DestInfo di = subinfo.get(subid);
+ if (di == null) {
+ return (null);
+ }
+ return (di.getSpool());
+ }
+
+ /**
+ * Get the Authorization value this node uses.
+ *
+ * @return The Authorization header value for this node
+ */
+ public String getMyAuth() {
+ return (myauth);
+ }
+
+ private boolean isFeedOrSubKnown(HashSet<String> allfeeds, String subId, String feedId) {
+ return !allfeeds.contains(feedId) || subinfo.get(subId) != null;
+ }
+
+ private void addTargetWithRouting(HashMap<String, Target> tmap, ArrayList<Target> targets, String target,
+ int index) {
+ String node = target.substring(0, index);
+ String rtg = target.substring(index + 1);
+ DestInfo di = nodeinfo.get(node);
+ if (di == null) {
+ targets.add(new Target(null, target));
+ } else {
+ Target tt = tmap.get(node);
+ if (tt == null) {
+ tt = new Target(di, rtg);
+ tmap.put(node, tt);
+ targets.add(tt);
+ } else {
+ tt.addRouting(rtg);
+ }
+ }
+ }
+
+ private void addTarget(HashSet<String> subset, ArrayList<Target> targets, String target) {
+ DestInfo destInfo = subinfo.get(target);
+ if (destInfo == null) {
+ targets.add(new Target(null, target));
+ } else {
+ if (!subset.contains(target)) {
+ subset.add(target);
+ targets.add(new Target(destInfo, null));
+ }
+ }
+ }
+
+ /**
+ * Raw configuration entry for a data router node.
*/
public static class ProvNode {
@@ -59,7 +540,7 @@ public class NodeConfig {
}
/**
- * Get the cname of the node
+ * Get the cname of the node.
*/
public String getCName() {
return (cname);
@@ -67,7 +548,7 @@ public class NodeConfig {
}
/**
- * Raw configuration entry for a provisioning parameter
+ * Raw configuration entry for a provisioning parameter.
*/
public static class ProvParam {
@@ -121,7 +602,7 @@ public class NodeConfig {
* @param id The feed ID of the entry.
* @param logdata String for log entries about the entry.
* @param status The reason why this feed cannot be used (Feed has been deleted, Feed has been suspended) or
- * null if it is valid.
+ * null if it is valid.
*/
public ProvFeed(String id, String logdata, String status, String createdDate, String aafInstance) {
this.id = id;
@@ -134,9 +615,8 @@ public class NodeConfig {
/**
* Get the created date of the data feed.
*/
- public String getCreatedDate()
- {
- return(createdDate);
+ public String getCreatedDate() {
+ return (createdDate);
}
/**
@@ -178,7 +658,7 @@ public class NodeConfig {
private String credentials;
/**
- * Construct a feed user configuration entry
+ * Construct a feed user configuration entry.
*
* @param feedid The feed id.
* @param user The user that will publish to the feed.
@@ -213,7 +693,7 @@ public class NodeConfig {
}
/**
- * Raw configuration entry for a feed subnet
+ * Raw configuration entry for a feed subnet.
*/
public static class ProvFeedSubnet {
@@ -221,7 +701,7 @@ public class NodeConfig {
private String cidr;
/**
- * Construct a feed subnet configuration entry
+ * Construct a feed subnet configuration entry.
*
* @param feedid The feed ID
* @param cidr The CIDR allowed to publish to the feed.
@@ -247,7 +727,7 @@ public class NodeConfig {
}
/**
- * Raw configuration entry for a subscription
+ * Raw configuration entry for a subscription.
*/
public static class ProvSubscription {
@@ -263,21 +743,23 @@ public class NodeConfig {
private boolean decompress;
/**
- * Construct a subscription configuration entry
+ * Construct a subscription configuration entry.
*
* @param subid The subscription ID
* @param feedid The feed ID
* @param url The base delivery URL (not including the fileid)
* @param authuser The user in the credentials used to deliver
* @param credentials The credentials used to authenticate to the delivery URL exactly as they go in the
- * Authorization header.
+ * Authorization header.
* @param metaonly Is this a meta data only subscription?
* @param use100 Should we send Expect: 100-continue?
* @param privilegedSubscriber Can we wait to receive a delete file call before deleting file
* @param followRedirect Is follow redirect of destination enabled?
* @param decompress To see if they want their information compressed or decompressed
*/
- public ProvSubscription(String subid, String feedid, String url, String authuser, String credentials, boolean metaonly, boolean use100, boolean privilegedSubscriber, boolean followRedirect, boolean decompress) {
+ public ProvSubscription(String subid, String feedid, String url, String authuser, String credentials,
+ boolean metaonly, boolean use100, boolean privilegedSubscriber, boolean followRedirect,
+ boolean decompress) {
this.subid = subid;
this.feedid = feedid;
this.url = url;
@@ -291,79 +773,79 @@ public class NodeConfig {
}
/**
- * Get the subscription ID
+ * Get the subscription ID.
*/
public String getSubId() {
return (subid);
}
/**
- * Get the feed ID
+ * Get the feed ID.
*/
public String getFeedId() {
return (feedid);
}
/**
- * Get the delivery URL
+ * Get the delivery URL.
*/
public String getURL() {
return (url);
}
/**
- * Get the user
+ * Get the user.
*/
public String getAuthUser() {
return (authuser);
}
/**
- * Get the delivery credentials
+ * Get the delivery credentials.
*/
public String getCredentials() {
return (credentials);
}
/**
- * Is this a meta data only subscription?
+ * Is this a meta data only subscription.
*/
public boolean isMetaDataOnly() {
return (metaonly);
}
/**
- * Should we send Expect: 100-continue?
+ * Should we send Expect: 100-continue.
*/
public boolean isUsing100() {
return (use100);
}
/**
- * Can we wait to receive a delete file call before deleting file
+ * Can we wait to receive a delete file call before deleting file.
*/
public boolean isPrivilegedSubscriber() {
return (privilegedSubscriber);
}
/**
- * Should i decompress the file before sending it on
- */
+ * Should I decompress the file before sending it on.
+ */
public boolean isDecompress() {
return (decompress);
}
/**
- * New field is added - FOLLOW_REDIRECTS feature iTrack:DATARTR-17 - 1706
- * Get the followRedirect of this destination
+ * New field is added - FOLLOW_REDIRECTS feature iTrack:DATARTR-17 - 1706 Get the followRedirect of this
+ * destination.
*/
boolean getFollowRedirect() {
- return(followRedirect);
+ return (followRedirect);
}
}
/**
- * Raw configuration entry for controlled ingress to the data router node
+ * Raw configuration entry for controlled ingress to the data router node.
*/
public static class ProvForceIngress {
@@ -373,11 +855,11 @@ public class NodeConfig {
private String[] nodes;
/**
- * Construct a forced ingress configuration entry
+ * Construct a forced ingress configuration entry.
*
* @param feedid The feed ID that this entry applies to
* @param subnet The CIDR for which publisher IP addresses this entry applies to or "" if it applies to all
- * publisher IP addresses
+ * publisher IP addresses
* @param user The publishing user this entry applies to or "" if it applies to all publishing users.
* @param nodes The array of FQDNs of the data router nodes to redirect publication attempts to.
*/
@@ -386,7 +868,7 @@ public class NodeConfig {
this.subnet = subnet;
this.user = user;
//Sonar fix
- if(nodes == null) {
+ if (nodes == null) {
this.nodes = new String[0];
} else {
this.nodes = Arrays.copyOf(nodes, nodes.length);
@@ -394,28 +876,28 @@ public class NodeConfig {
}
/**
- * Get the feed ID
+ * Get the feed ID.
*/
public String getFeedId() {
return (feedid);
}
/**
- * Get the subnet
+ * Get the subnet.
*/
public String getSubnet() {
return (subnet);
}
/**
- * Get the user
+ * Get the user.
*/
public String getUser() {
return (user);
}
/**
- * Get the node
+ * Get the node.
*/
public String[] getNodes() {
return (nodes);
@@ -423,7 +905,7 @@ public class NodeConfig {
}
/**
- * Raw configuration entry for controlled egress from the data router
+ * Raw configuration entry for controlled egress from the data router.
*/
public static class ProvForceEgress {
@@ -431,7 +913,7 @@ public class NodeConfig {
private String node;
/**
- * Construct a forced egress configuration entry
+ * Construct a forced egress configuration entry.
*
* @param subid The subscription ID the subscription with forced egress
* @param node The node handling deliveries for this subscription
@@ -442,14 +924,14 @@ public class NodeConfig {
}
/**
- * Get the subscription ID
+ * Get the subscription ID.
*/
public String getSubId() {
return (subid);
}
/**
- * Get the node
+ * Get the node.
*/
public String getNode() {
return (node);
@@ -457,7 +939,7 @@ public class NodeConfig {
}
/**
- * Raw configuration entry for routing within the data router network
+ * Raw configuration entry for routing within the data router network.
*/
public static class ProvHop {
@@ -466,14 +948,7 @@ public class NodeConfig {
private String via;
/**
- * A human readable description of this entry
- */
- public String toString() {
- return ("Hop " + from + "->" + to + " via " + via);
- }
-
- /**
- * Construct a hop entry
+ * Construct a hop entry.
*
* @param from The FQDN of the node with the data to be delivered
* @param to The FQDN of the node that will deliver to the subscriber
@@ -486,21 +961,28 @@ public class NodeConfig {
}
/**
- * Get the from node
+ * A human readable description of this entry.
+ */
+ public String toString() {
+ return ("Hop " + from + "->" + to + " via " + via);
+ }
+
+ /**
+ * Get the from node.
*/
public String getFrom() {
return (from);
}
/**
- * Get the to node
+ * Get the to node.
*/
public String getTo() {
return (to);
}
/**
- * Get the next intermediate node
+ * Get the next intermediate node.
*/
public String getVia() {
return (via);
@@ -519,431 +1001,10 @@ public class NodeConfig {
String loginfo;
String status;
SubnetMatcher[] subnets;
- Hashtable<String, String> authusers = new Hashtable<String, String>();
+ HashMap<String, String> authusers = new HashMap<>();
Redirection[] redirections;
Target[] targets;
String createdDate;
String aafInstance;
}
-
- private Hashtable<String, String> params = new Hashtable<>();
- private Hashtable<String, Feed> feeds = new Hashtable<>();
- private Hashtable<String, DestInfo> nodeinfo = new Hashtable<>();
- private Hashtable<String, DestInfo> subinfo = new Hashtable<>();
- private Hashtable<String, IsFrom> nodes = new Hashtable<>();
- private Hashtable<String, ProvSubscription> provSubscriptions = new Hashtable<>();
- private String myname;
- private String myauth;
- private DestInfo[] alldests;
- private int rrcntr;
-
- /**
- * Process the raw provisioning data to configure this node
- *
- * @param pd The parsed provisioning data
- * @param myname My name as seen by external systems
- * @param spooldir The directory where temporary files live
- * @param port The port number for URLs
- * @param nodeauthkey The keying string used to generate node authentication credentials
- */
- public NodeConfig(ProvData pd, String myname, String spooldir, int port, String nodeauthkey) {
- this.myname = myname;
- for (ProvParam p : pd.getParams()) {
- params.put(p.getName(), p.getValue());
- }
- Vector<DestInfo> destInfos = new Vector<>();
- myauth = NodeUtils.getNodeAuthHdr(myname, nodeauthkey);
- for (ProvNode pn : pd.getNodes()) {
- String cName = pn.getCName();
- if (nodeinfo.get(cName) != null) {
- continue;
- }
- String auth = NodeUtils.getNodeAuthHdr(cName, nodeauthkey);
- DestInfo di = new DestInfo.DestInfoBuilder().setName("n:" + cName).setSpool(spooldir + "/n/" + cName).setSubid(null)
- .setLogdata("n2n-" + cName).setUrl("https://" + cName + ":" + port + "/internal/publish")
- .setAuthuser(cName).setAuthentication(myauth).setMetaonly(false).setUse100(true)
- .setPrivilegedSubscriber(false).setFollowRedirects(false).setDecompress(false).createDestInfo();
- (new File(di.getSpool())).mkdirs();
- destInfos.add(di);
- nodeinfo.put(cName, di);
- nodes.put(auth, new IsFrom(cName));
- }
- PathFinder pf = new PathFinder(myname, nodeinfo.keySet().toArray(new String[0]), pd.getHops());
- Hashtable<String, Vector<Redirection>> rdtab = new Hashtable<>();
- for (ProvForceIngress pfi : pd.getForceIngress()) {
- Vector<Redirection> v = rdtab.get(pfi.getFeedId());
- if (v == null) {
- v = new Vector<>();
- rdtab.put(pfi.getFeedId(), v);
- }
- Redirection r = new Redirection();
- if (pfi.getSubnet() != null) {
- r.snm = new SubnetMatcher(pfi.getSubnet());
- }
- r.user = pfi.getUser();
- r.nodes = pfi.getNodes();
- v.add(r);
- }
- Hashtable<String, Hashtable<String, String>> pfutab = new Hashtable<>();
- for (ProvFeedUser pfu : pd.getFeedUsers()) {
- Hashtable<String, String> t = pfutab.get(pfu.getFeedId());
- if (t == null) {
- t = new Hashtable<>();
- pfutab.put(pfu.getFeedId(), t);
- }
- t.put(pfu.getCredentials(), pfu.getUser());
- }
- Hashtable<String, String> egrtab = new Hashtable<>();
- for (ProvForceEgress pfe : pd.getForceEgress()) {
- if (pfe.getNode().equals(myname) || nodeinfo.get(pfe.getNode()) == null) {
- continue;
- }
- egrtab.put(pfe.getSubId(), pfe.getNode());
- }
- Hashtable<String, Vector<SubnetMatcher>> pfstab = new Hashtable<>();
- for (ProvFeedSubnet pfs : pd.getFeedSubnets()) {
- Vector<SubnetMatcher> v = pfstab.get(pfs.getFeedId());
- if (v == null) {
- v = new Vector<>();
- pfstab.put(pfs.getFeedId(), v);
- }
- v.add(new SubnetMatcher(pfs.getCidr()));
- }
- Hashtable<String, StringBuffer> feedTargets = new Hashtable<>();
- HashSet<String> allfeeds = new HashSet<>();
- for (ProvFeed pfx : pd.getFeeds()) {
- if (pfx.getStatus() == null) {
- allfeeds.add(pfx.getId());
- }
- }
- for (ProvSubscription provSubscription : pd.getSubscriptions()) {
- String subId = provSubscription.getSubId();
- String feedId = provSubscription.getFeedId();
- if (!allfeeds.contains(feedId)) {
- continue;
- }
- if (subinfo.get(subId) != null) {
- continue;
- }
- int sididx = 999;
- try {
- sididx = Integer.parseInt(subId);
- sididx -= sididx % 100;
- } catch (Exception e) {
- logger.error("NODE0517 Exception NodeConfig: "+e);
- }
- String subscriptionDirectory = sididx + "/" + subId;
- DestInfo destinationInfo = new DestInfo("s:" + subId,
- spooldir + "/s/" + subscriptionDirectory, provSubscription);
- (new File(destinationInfo.getSpool())).mkdirs();
- destInfos.add(destinationInfo);
- provSubscriptions.put(subId, provSubscription);
- subinfo.put(subId, destinationInfo);
- String egr = egrtab.get(subId);
- if (egr != null) {
- subId = pf.getPath(egr) + subId;
- }
- StringBuffer sb = feedTargets.get(feedId);
- if (sb == null) {
- sb = new StringBuffer();
- feedTargets.put(feedId, sb);
- }
- sb.append(' ').append(subId);
- }
- alldests = destInfos.toArray(new DestInfo[0]);
- for (ProvFeed pfx : pd.getFeeds()) {
- String fid = pfx.getId();
- Feed f = feeds.get(fid);
- if (f != null) {
- continue;
- }
- f = new Feed();
- feeds.put(fid, f);
- f.createdDate = pfx.getCreatedDate();
- f.loginfo = pfx.getLogData();
- f.status = pfx.getStatus();
- /*
- * AAF changes: TDP EPIC US# 307413
- * Passing aafInstance from ProvFeed to identify legacy/AAF feeds
- */
- f.aafInstance = pfx.getAafInstance();
- Vector<SubnetMatcher> v1 = pfstab.get(fid);
- if (v1 == null) {
- f.subnets = new SubnetMatcher[0];
- } else {
- f.subnets = v1.toArray(new SubnetMatcher[0]);
- }
- Hashtable<String, String> h1 = pfutab.get(fid);
- if (h1 == null) {
- h1 = new Hashtable<String, String>();
- }
- f.authusers = h1;
- Vector<Redirection> v2 = rdtab.get(fid);
- if (v2 == null) {
- f.redirections = new Redirection[0];
- } else {
- f.redirections = v2.toArray(new Redirection[0]);
- }
- StringBuffer sb = feedTargets.get(fid);
- if (sb == null) {
- f.targets = new Target[0];
- } else {
- f.targets = parseRouting(sb.toString());
- }
- }
- }
-
- /**
- * Parse a target string into an array of targets
- *
- * @param routing Target string
- * @return Array of targets.
- */
- public Target[] parseRouting(String routing) {
- routing = routing.trim();
- if ("".equals(routing)) {
- return (new Target[0]);
- }
- String[] xx = routing.split("\\s+");
- Hashtable<String, Target> tmap = new Hashtable<String, Target>();
- HashSet<String> subset = new HashSet<String>();
- Vector<Target> tv = new Vector<Target>();
- Target[] ret = new Target[xx.length];
- for (int i = 0; i < xx.length; i++) {
- String t = xx[i];
- int j = t.indexOf('/');
- if (j == -1) {
- DestInfo di = subinfo.get(t);
- if (di == null) {
- tv.add(new Target(null, t));
- } else {
- if (!subset.contains(t)) {
- subset.add(t);
- tv.add(new Target(di, null));
- }
- }
- } else {
- String node = t.substring(0, j);
- String rtg = t.substring(j + 1);
- DestInfo di = nodeinfo.get(node);
- if (di == null) {
- tv.add(new Target(null, t));
- } else {
- Target tt = tmap.get(node);
- if (tt == null) {
- tt = new Target(di, rtg);
- tmap.put(node, tt);
- tv.add(tt);
- } else {
- tt.addRouting(rtg);
- }
- }
- }
- }
- return (tv.toArray(new Target[0]));
- }
-
- /**
- * Check whether this is a valid node-to-node transfer
- *
- * @param credentials Credentials offered by the supposed node
- * @param ip IP address the request came from
- */
- public boolean isAnotherNode(String credentials, String ip) {
- IsFrom n = nodes.get(credentials);
- return (n != null && n.isFrom(ip));
- }
-
- /**
- * Check whether publication is allowed.
- *
- * @param feedid The ID of the feed being requested.
- * @param credentials The offered credentials
- * @param ip The requesting IP address
- */
- public String isPublishPermitted(String feedid, String credentials, String ip) {
- Feed f = feeds.get(feedid);
- String nf = "Feed does not exist";
- if (f != null) {
- nf = f.status;
- }
- if (nf != null) {
- return (nf);
- }
- String user = f.authusers.get(credentials);
- if (user == null) {
- return ("Publisher not permitted for this feed");
- }
- if (f.subnets.length == 0) {
- return (null);
- }
- byte[] addr = NodeUtils.getInetAddress(ip);
- for (SubnetMatcher snm : f.subnets) {
- if (snm.matches(addr)) {
- return (null);
- }
- }
- return ("Publisher not permitted for this feed");
- }
-
- /**
- * Check whether delete file is allowed.
- *
- * @param subId The ID of the subscription being requested.
- */
- public boolean isDeletePermitted(String subId) {
- ProvSubscription provSubscription = provSubscriptions.get(subId);
- return provSubscription.isPrivilegedSubscriber();
- }
-
- /**
- * Check whether publication is allowed for AAF Feed.
- * @param feedid The ID of the feed being requested.
- * @param ip The requesting IP address
- */
- public String isPublishPermitted(String feedid, String ip) {
- Feed f = feeds.get(feedid);
- String nf = "Feed does not exist";
- if (f != null) {
- nf = f.status;
- }
- if (nf != null) {
- return(nf);
- }
- if (f.subnets.length == 0) {
- return(null);
- }
- byte[] addr = NodeUtils.getInetAddress(ip);
- for (SubnetMatcher snm: f.subnets) {
- if (snm.matches(addr)) {
- return(null);
- }
- }
- return("Publisher not permitted for this feed");
- }
-
- /**
- * Get authenticated user
- */
- public String getAuthUser(String feedid, String credentials) {
- return (feeds.get(feedid).authusers.get(credentials));
- }
-
- /**
- * AAF changes: TDP EPIC US# 307413
- * Check AAF_instance for feed ID
- * @param feedid The ID of the feed specified
- */
- public String getAafInstance(String feedid) {
- Feed f = feeds.get(feedid);
- return f.aafInstance;
- }
-
- /**
- * Check if the request should be redirected to a different ingress node
- */
- public String getIngressNode(String feedid, String user, String ip) {
- Feed f = feeds.get(feedid);
- if (f.redirections.length == 0) {
- return (null);
- }
- byte[] addr = NodeUtils.getInetAddress(ip);
- for (Redirection r : f.redirections) {
- if (r.user != null && !user.equals(r.user)) {
- continue;
- }
- if (r.snm != null && !r.snm.matches(addr)) {
- continue;
- }
- for (String n : r.nodes) {
- if (myname.equals(n)) {
- return (null);
- }
- }
- if (r.nodes.length == 0) {
- return (null);
- }
- return (r.nodes[rrcntr++ % r.nodes.length]);
- }
- return (null);
- }
-
- /**
- * Get a provisioned configuration parameter
- */
- public String getProvParam(String name) {
- return (params.get(name));
- }
-
- /**
- * Get all the DestInfos
- */
- public DestInfo[] getAllDests() {
- return (alldests);
- }
-
- /**
- * Get the targets for a feed
- *
- * @param feedid The feed ID
- * @return The targets this feed should be delivered to
- */
- public Target[] getTargets(String feedid) {
- if (feedid == null) {
- return (new Target[0]);
- }
- Feed f = feeds.get(feedid);
- if (f == null) {
- return (new Target[0]);
- }
- return (f.targets);
- }
-
- /**
- * Get the creation date for a feed
- * @param feedid The feed ID
- * @return the timestamp of creation date of feed id passed
- */
- public String getCreatedDate(String feedid) {
- Feed f = feeds.get(feedid);
- return(f.createdDate);
- }
-
- /**
- * Get the feed ID for a subscription
- *
- * @param subid The subscription ID
- * @return The feed ID
- */
- public String getFeedId(String subid) {
- DestInfo di = subinfo.get(subid);
- if (di == null) {
- return (null);
- }
- return (di.getLogData());
- }
-
- /**
- * Get the spool directory for a subscription
- *
- * @param subid The subscription ID
- * @return The spool directory
- */
- public String getSpoolDir(String subid) {
- DestInfo di = subinfo.get(subid);
- if (di == null) {
- return (null);
- }
- return (di.getSpool());
- }
-
- /**
- * Get the Authorization value this node uses
- *
- * @return The Authorization header value for this node
- */
- public String getMyAuth() {
- return (myauth);
- }
-
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java
index 16099e62..786befce 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfigManager.java
@@ -26,8 +26,6 @@ package org.onap.dmaap.datarouter.node;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
-
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
@@ -35,22 +33,24 @@ import java.io.Reader;
import java.net.URL;
import java.util.Properties;
import java.util.Timer;
+import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
/**
* Maintain the configuration of a Data Router node
- * <p>
- * The NodeConfigManager is the single point of contact for servlet, delivery, event logging, and log retention
+ *
+ * <p>The NodeConfigManager is the single point of contact for servlet, delivery, event logging, and log retention
* subsystems to access configuration information.
- * <p>
- * There are two basic sets of configuration data. The static local configuration data, stored in a local configuration
- * file (created as part of installation by SWM), and the dynamic global configuration data fetched from the data router
- * provisioning server.
+ *
+ * <p>There are two basic sets of configuration data. The static local configuration data, stored in a local
+ * configuration file (created as part of installation by SWM), and the dynamic global configuration data fetched from
+ * the data router provisioning server.
*/
public class NodeConfigManager implements DeliveryQueueHelper {
- private static EELFLogger eelfLogger = EELFManager.getInstance()
- .getLogger(NodeConfigManager.class);
+ private static final String CHANGE_ME = "changeme";
+ private static final String NODE_CONFIG_MANAGER = "NodeConfigManager";
+ private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(NodeConfigManager.class);
private static NodeConfigManager base = new NodeConfigManager();
private Timer timer = new Timer("Node Configuration Timer", true);
@@ -94,7 +94,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
private String eventlogsuffix;
private String eventloginterval;
private boolean followredirects;
- private String [] enabledprotocols;
+ private String[] enabledprotocols;
private String aafType;
private String aafInstance;
private String aafAction;
@@ -103,14 +103,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
/**
- * Get the default node configuration manager
- */
- public static NodeConfigManager getInstance() {
- return base;
- }
-
- /**
- * Initialize the configuration of a Data Router node
+ * Initialize the configuration of a Data Router node.
*/
private NodeConfigManager() {
@@ -120,8 +113,10 @@ public class NodeConfigManager implements DeliveryQueueHelper {
drNodeProperties.load(new FileInputStream(System
.getProperty("org.onap.dmaap.datarouter.node.properties", "/opt/app/datartr/etc/node.properties")));
} catch (Exception e) {
- NodeUtils.setIpAndFqdnForEelf("NodeConfigManager");
- eelfLogger.error(EelfMsgs.MESSAGE_PROPERTIES_LOAD_ERROR, System.getProperty("org.onap.dmaap.datarouter.node.properties", "/opt/app/datartr/etc/node.properties"));
+ NodeUtils.setIpAndFqdnForEelf(NODE_CONFIG_MANAGER);
+ eelfLogger.error(EelfMsgs.MESSAGE_PROPERTIES_LOAD_ERROR, e,
+ System.getProperty("org.onap.dmaap.datarouter.node.properties",
+ "/opt/app/datartr/etc/node.properties"));
}
provurl = drNodeProperties.getProperty("ProvisioningURL", "https://dmaap-dr-prov:8443/internal/prov");
/*
@@ -143,8 +138,8 @@ public class NodeConfigManager implements DeliveryQueueHelper {
try {
provhost = (new URL(provurl)).getHost();
} catch (Exception e) {
- NodeUtils.setIpAndFqdnForEelf("NodeConfigManager");
- eelfLogger.error(EelfMsgs.MESSAGE_BAD_PROV_URL, provurl);
+ NodeUtils.setIpAndFqdnForEelf(NODE_CONFIG_MANAGER);
+ eelfLogger.error(EelfMsgs.MESSAGE_BAD_PROV_URL, e, provurl);
System.exit(1);
}
eelfLogger.info("NODE0303 Provisioning server is " + provhost);
@@ -153,8 +148,6 @@ public class NodeConfigManager implements DeliveryQueueHelper {
gfport = Integer.parseInt(drNodeProperties.getProperty("IntHttpPort", "8080"));
svcport = Integer.parseInt(drNodeProperties.getProperty("IntHttpsPort", "8443"));
port = Integer.parseInt(drNodeProperties.getProperty("ExtHttpsPort", "443"));
- long minpfinterval = Long.parseLong(drNodeProperties.getProperty("MinProvFetchInterval", "10000"));
- long minrsinterval = Long.parseLong(drNodeProperties.getProperty("MinRedirSaveInterval", "10000"));
spooldir = drNodeProperties.getProperty("SpoolDir", "spool");
File fdir = new File(spooldir + "/f");
fdir.mkdirs();
@@ -168,14 +161,14 @@ public class NodeConfigManager implements DeliveryQueueHelper {
logretention = Long.parseLong(drNodeProperties.getProperty("LogRetention", "30")) * 86400000L;
eventlogprefix = logdir + "/events";
eventlogsuffix = ".log";
- String redirfile = drNodeProperties.getProperty("RedirectionFile", "etc/redirections.dat");
+ redirfile = drNodeProperties.getProperty("RedirectionFile", "etc/redirections.dat");
kstype = drNodeProperties.getProperty("KeyStoreType", "jks");
ksfile = drNodeProperties.getProperty("KeyStoreFile", "etc/keystore");
- kspass = drNodeProperties.getProperty("KeyStorePassword", "changeme");
- kpass = drNodeProperties.getProperty("KeyPassword", "changeme");
+ kspass = drNodeProperties.getProperty("KeyStorePassword", CHANGE_ME);
+ kpass = drNodeProperties.getProperty("KeyPassword", CHANGE_ME);
tstype = drNodeProperties.getProperty("TrustStoreType", "jks");
tsfile = drNodeProperties.getProperty("TrustStoreFile");
- tspass = drNodeProperties.getProperty("TrustStorePassword", "changeme");
+ tspass = drNodeProperties.getProperty("TrustStorePassword", CHANGE_ME);
if (tsfile != null && tsfile.length() > 0) {
System.setProperty("javax.net.ssl.trustStoreType", tstype);
System.setProperty("javax.net.ssl.trustStore", tsfile);
@@ -185,13 +178,15 @@ public class NodeConfigManager implements DeliveryQueueHelper {
quiesce = new File(drNodeProperties.getProperty("QuiesceFile", "etc/SHUTDOWN"));
myname = NodeUtils.getCanonicalName(kstype, ksfile, kspass);
if (myname == null) {
- NodeUtils.setIpAndFqdnForEelf("NodeConfigManager");
+ NodeUtils.setIpAndFqdnForEelf(NODE_CONFIG_MANAGER);
eelfLogger.error(EelfMsgs.MESSAGE_KEYSTORE_FETCH_ERROR, ksfile);
eelfLogger.error("NODE0309 Unable to fetch canonical name from keystore file " + ksfile);
System.exit(1);
}
eelfLogger.info("NODE0304 My certificate says my name is " + myname);
pid = new PublishId(myname);
+ long minrsinterval = Long.parseLong(drNodeProperties.getProperty("MinRedirSaveInterval", "10000"));
+ long minpfinterval = Long.parseLong(drNodeProperties.getProperty("MinProvFetchInterval", "10000"));
rdmgr = new RedirManager(redirfile, minrsinterval, timer);
pfetcher = new RateLimitedOperation(minpfinterval, timer) {
public void run() {
@@ -202,6 +197,13 @@ public class NodeConfigManager implements DeliveryQueueHelper {
pfetcher.request();
}
+ /**
+ * Get the default node configuration manager.
+ */
+ public static NodeConfigManager getInstance() {
+ return base;
+ }
+
private void localconfig() {
followredirects = Boolean.parseBoolean(getProvParam("FOLLOW_REDIRECTS", "false"));
eventloginterval = getProvParam("LOGROLL_INTERVAL", "30s");
@@ -218,42 +220,53 @@ public class NodeConfigManager implements DeliveryQueueHelper {
try {
initfailuretimer = (long) (Double.parseDouble(getProvParam("DELIVERY_INIT_RETRY_INTERVAL")) * 1000);
} catch (Exception e) {
+ eelfLogger.trace("Error parsing DELIVERY_INIT_RETRY_INTERVAL", e);
}
try {
- waitForFileProcessFailureTimer = (long) (Double.parseDouble(getProvParam("DELIVERY_FILE_PROCESS_INTERVAL")) * 1000);
+ waitForFileProcessFailureTimer = (long) (Double.parseDouble(getProvParam("DELIVERY_FILE_PROCESS_INTERVAL"))
+ * 1000);
} catch (Exception e) {
+ eelfLogger.trace("Error parsing DELIVERY_FILE_PROCESS_INTERVAL", e);
}
try {
maxfailuretimer = (long) (Double.parseDouble(getProvParam("DELIVERY_MAX_RETRY_INTERVAL")) * 1000);
} catch (Exception e) {
+ eelfLogger.trace("Error parsing DELIVERY_MAX_RETRY_INTERVAL", e);
}
try {
expirationtimer = (long) (Double.parseDouble(getProvParam("DELIVERY_MAX_AGE")) * 1000);
} catch (Exception e) {
+ eelfLogger.trace("Error parsing DELIVERY_MAX_AGE", e);
}
try {
failurebackoff = Double.parseDouble(getProvParam("DELIVERY_RETRY_RATIO"));
} catch (Exception e) {
+ eelfLogger.trace("Error parsing DELIVERY_RETRY_RATIO", e);
}
try {
deliverythreads = Integer.parseInt(getProvParam("DELIVERY_THREADS"));
} catch (Exception e) {
+ eelfLogger.trace("Error parsing DELIVERY_THREADS", e);
}
try {
fairfilelimit = Integer.parseInt(getProvParam("FAIR_FILE_LIMIT"));
} catch (Exception e) {
+ eelfLogger.trace("Error parsing FAIR_FILE_LIMIT", e);
}
try {
fairtimelimit = (long) (Double.parseDouble(getProvParam("FAIR_TIME_LIMIT")) * 1000);
} catch (Exception e) {
+ eelfLogger.trace("Error parsing FAIR_TIME_LIMIT", e);
}
try {
fdpstart = Double.parseDouble(getProvParam("FREE_DISK_RED_PERCENT")) / 100.0;
} catch (Exception e) {
+ eelfLogger.trace("Error parsing FREE_DISK_RED_PERCENT", e);
}
try {
fdpstop = Double.parseDouble(getProvParam("FREE_DISK_YELLOW_PERCENT")) / 100.0;
} catch (Exception e) {
+ eelfLogger.trace("Error parsing FREE_DISK_YELLOW_PERCENT", e);
}
if (fdpstart < 0.01) {
fdpstart = 0.01;
@@ -272,26 +285,30 @@ public class NodeConfigManager implements DeliveryQueueHelper {
private void fetchconfig() {
try {
eelfLogger.info("NodeConfigMan.fetchConfig: provurl:: " + provurl);
- Reader r = new InputStreamReader((new URL(provurl)).openStream());
- config = new NodeConfig(new ProvData(r), myname, spooldir, port, nak);
+ Reader reader = new InputStreamReader((new URL(provurl)).openStream());
+ config = new NodeConfig(new ProvData(reader), myname, spooldir, port, nak);
localconfig();
configtasks.startRun();
- Runnable rr;
- while ((rr = configtasks.next()) != null) {
- try {
- rr.run();
- } catch (Exception e) {
- eelfLogger.error("NODE0518 Exception fetchconfig: " + e);
- }
- }
+ runTasks();
} catch (Exception e) {
NodeUtils.setIpAndFqdnForEelf("fetchconfigs");
eelfLogger.error(EelfMsgs.MESSAGE_CONF_FAILED, e.toString());
- eelfLogger.error("NODE0306 Configuration failed " + e.toString() + " - try again later", e.getMessage());
+ eelfLogger.error("NODE0306 Configuration failed " + e.toString() + " - try again later", e);
pfetcher.request();
}
}
+ private void runTasks() {
+ Runnable rr;
+ while ((rr = configtasks.next()) != null) {
+ try {
+ rr.run();
+ } catch (Exception e) {
+ eelfLogger.error("NODE0518 Exception fetchconfig: " + e);
+ }
+ }
+ }
+
/**
* Process a gofetch request from a particular IP address. If the IP address is not an IP address we would go to to
* fetch the provisioning data, ignore the request. If the data has been fetched very recently (default 10
@@ -307,14 +324,14 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Am I configured?
+ * Am I configured.
*/
public boolean isConfigured() {
return (config != null);
}
/**
- * Am I shut down?
+ * Am I shut down.
*/
public boolean isShutdown() {
return (quiesce.exists());
@@ -331,7 +348,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Given a set of credentials and an IP address, is this request from another node?
+ * Given a set of credentials and an IP address, is this request from another node.
*
* @param credentials Credentials offered by the supposed node
* @param ip IP address the request came from
@@ -354,16 +371,6 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Check whether delete file is allowed.
- *
- * @param subId The ID of the subscription being requested
- * @return True if the delete file is permitted for the subscriber.
- */
- public boolean isDeletePermitted(String subId) {
- return (config.isDeletePermitted(subId));
- }
-
- /**
* Check whether publication is allowed for AAF Feed.
*
* @param feedid The ID of the feed being requested
@@ -371,7 +378,17 @@ public class NodeConfigManager implements DeliveryQueueHelper {
* @return True if the IP and credentials are valid for the specified feed.
*/
public String isPublishPermitted(String feedid, String ip) {
- return(config.isPublishPermitted(feedid, ip));
+ return (config.isPublishPermitted(feedid, ip));
+ }
+
+ /**
+ * Check whether delete file is allowed.
+ *
+ * @param subId The ID of the subscription being requested
+ * @return True if the delete file is permitted for the subscriber.
+ */
+ public boolean isDeletePermitted(String subId) {
+ return (config.isDeletePermitted(subId));
}
/**
@@ -386,12 +403,16 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * AAF changes: TDP EPIC US# 307413
- * Check AAF_instance for feed ID in NodeConfig
+ * AAF changes: TDP EPIC US# 307413 Check AAF_instance for feed ID in NodeConfig.
+ *
* @param feedid The ID of the feed specified
*/
public String getAafInstance(String feedid) {
- return(config.getAafInstance(feedid));
+ return (config.getAafInstance(feedid));
+ }
+
+ public String getAafInstance() {
+ return aafInstance;
}
/**
@@ -407,7 +428,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Get a provisioned configuration parameter (from the provisioning server configuration)
+ * Get a provisioned configuration parameter (from the provisioning server configuration).
*
* @param name The name of the parameter
* @return The value of the parameter or null if it is not defined.
@@ -417,7 +438,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Get a provisioned configuration parameter (from the provisioning server configuration)
+ * Get a provisioned configuration parameter (from the provisioning server configuration).
*
* @param name The name of the parameter
* @param defaultValue The value to use if the parameter is not defined
@@ -432,7 +453,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Generate a publish ID
+ * Generate a publish ID.
*/
public String getPublishId() {
return (pid.next());
@@ -446,14 +467,14 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Register a task to run whenever the configuration changes
+ * Register a task to run whenever the configuration changes.
*/
public void registerConfigTask(Runnable task) {
configtasks.addTask(task);
}
/**
- * Deregister a task to run whenever the configuration changes
+ * Deregister a task to run whenever the configuration changes.
*/
public void deregisterConfigTask(Runnable task) {
configtasks.removeTask(task);
@@ -476,14 +497,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Is a destination redirected?
- */
- public boolean isDestRedirected(DestInfo destinfo) {
- return (followredirects && rdmgr.isRedirected(destinfo.getSubId()));
- }
-
- /**
- * Set up redirection on receipt of a 3XX from a target URL
+ * Set up redirection on receipt of a 3XX from a target URL.
*/
public boolean handleRedirection(DestInfo destinationInfo, String redirto, String fileid) {
fileid = "/" + fileid;
@@ -500,24 +514,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Set up redirection on receipt of a 3XX from a target URL
- */
- public boolean handleRedirectionSubLevel(DeliveryTask task, DestInfo destinfo, String redirto, String fileid) {
- fileid = "/" + fileid;
- String subid = destinfo.getSubId();
- String purl = destinfo.getURL();
- if (task.getFollowRedirects() && subid != null && redirto.endsWith(fileid)) {
- redirto = redirto.substring(0, redirto.length() - fileid.length());
- if (!redirto.equals(purl)) {
- rdmgr.redirect(subid, purl, redirto);
- return true;
- }
- }
- return false;
- }
-
- /**
- * Handle unreachable target URL
+ * Handle unreachable target URL.
*/
public void handleUnreachable(DestInfo destinationInfo) {
String subid = destinationInfo.getSubId();
@@ -527,35 +524,35 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Get the timeout before retrying after an initial delivery failure
+ * Get the timeout before retrying after an initial delivery failure.
*/
public long getInitFailureTimer() {
return (initfailuretimer);
}
/**
- * Get the timeout before retrying after delivery and wait for file processing
+ * Get the timeout before retrying after delivery and wait for file processing.
*/
public long getWaitForFileProcessFailureTimer() {
return (waitForFileProcessFailureTimer);
}
/**
- * Get the maximum timeout between delivery attempts
+ * Get the maximum timeout between delivery attempts.
*/
public long getMaxFailureTimer() {
return (maxfailuretimer);
}
/**
- * Get the ratio between consecutive delivery attempts
+ * Get the ratio between consecutive delivery attempts.
*/
public double getFailureBackoff() {
return (failurebackoff);
}
/**
- * Get the expiration timer for deliveries
+ * Get the expiration timer for deliveries.
*/
public long getExpirationTimer() {
return (expirationtimer);
@@ -576,7 +573,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Get the targets for a feed
+ * Get the targets for a feed.
*
* @param feedid The feed ID
* @return The targets this feed should be delivered to
@@ -586,149 +583,160 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Get the creation date for a feed
- * @param feedid The feed ID
- * @return the timestamp of creation date of feed id passed
+ * Get the spool directory for temporary files.
*/
- public String getCreatedDate(String feedid) {
- return(config.getCreatedDate(feedid));
+ public String getSpoolDir() {
+ return (spooldir + "/f");
}
/**
- * Get the spool directory for temporary files
+ * Get the spool directory for a subscription.
*/
- public String getSpoolDir() {
- return (spooldir + "/f");
+ public String getSpoolDir(String subid, String remoteaddr) {
+ if (provcheck.isFrom(remoteaddr)) {
+ String sdir = config.getSpoolDir(subid);
+ if (sdir != null) {
+ eelfLogger.info("NODE0310 Received subscription reset request for subscription " + subid
+ + " from provisioning server " + remoteaddr);
+ } else {
+ eelfLogger.info("NODE0311 Received subscription reset request for unknown subscription " + subid
+ + " from provisioning server " + remoteaddr);
+ }
+ return (sdir);
+ } else {
+ eelfLogger.info("NODE0312 Received subscription reset request from unexpected server " + remoteaddr);
+ return (null);
+ }
}
/**
- * Get the base directory for spool directories
+ * Get the base directory for spool directories.
*/
public String getSpoolBase() {
return (spooldir);
}
/**
- * Get the key store type
+ * Get the key store type.
*/
public String getKSType() {
return (kstype);
}
/**
- * Get the key store file
+ * Get the key store file.
*/
public String getKSFile() {
return (ksfile);
}
/**
- * Get the key store password
+ * Get the key store password.
*/
public String getKSPass() {
return (kspass);
}
/**
- * Get the key password
+ * Get the key password.
*/
public String getKPass() {
return (kpass);
}
/**
- * Get the http port
+ * Get the http port.
*/
public int getHttpPort() {
return (gfport);
}
/**
- * Get the https port
+ * Get the https port.
*/
public int getHttpsPort() {
return (svcport);
}
/**
- * Get the externally visible https port
+ * Get the externally visible https port.
*/
public int getExtHttpsPort() {
return (port);
}
/**
- * Get the external name of this machine
+ * Get the external name of this machine.
*/
public String getMyName() {
return (myname);
}
/**
- * Get the number of threads to use for delivery
+ * Get the number of threads to use for delivery.
*/
public int getDeliveryThreads() {
return (deliverythreads);
}
/**
- * Get the URL for uploading the event log data
+ * Get the URL for uploading the event log data.
*/
public String getEventLogUrl() {
return (eventlogurl);
}
/**
- * Get the prefix for the names of event log files
+ * Get the prefix for the names of event log files.
*/
public String getEventLogPrefix() {
return (eventlogprefix);
}
/**
- * Get the suffix for the names of the event log files
+ * Get the suffix for the names of the event log files.
*/
public String getEventLogSuffix() {
return (eventlogsuffix);
}
/**
- * Get the interval between event log file rollovers
+ * Get the interval between event log file rollovers.
*/
public String getEventLogInterval() {
return (eventloginterval);
}
/**
- * Should I follow redirects from subscribers?
+ * Should I follow redirects from subscribers.
*/
public boolean isFollowRedirects() {
return (followredirects);
}
/**
- * Get the directory where the event and node log files live
+ * Get the directory where the event and node log files live.
*/
public String getLogDir() {
return (logdir);
}
/**
- * How long do I keep log files (in milliseconds)
+ * How long do I keep log files (in milliseconds).
*/
public long getLogRetention() {
return (logretention);
}
/**
- * Get the timer
+ * Get the timer.
*/
public Timer getTimer() {
return (timer);
}
/**
- * Get the feed ID for a subscription
+ * Get the feed ID for a subscription.
*
* @param subid The subscription ID
* @return The feed ID
@@ -738,7 +746,7 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Get the authorization string this node uses
+ * Get the authorization string this node uses.
*
* @return The Authorization string for this node
*/
@@ -763,72 +771,33 @@ public class NodeConfigManager implements DeliveryQueueHelper {
}
/**
- * Disable and enable protocols
- * */
+ * Disable and enable protocols.
+ */
public String[] getEnabledprotocols() {
return enabledprotocols;
}
- public void setEnabledprotocols(String[] enabledprotocols) {
- this.enabledprotocols = enabledprotocols.clone();
- }
-
- /**
- * Get the spool directory for a subscription
- */
- public String getSpoolDir(String subid, String remoteaddr) {
- if (provcheck.isFrom(remoteaddr)) {
- String sdir = config.getSpoolDir(subid);
- if (sdir != null) {
- eelfLogger.info("NODE0310 Received subscription reset request for subscription " + subid
- + " from provisioning server " + remoteaddr);
- } else {
- eelfLogger.info("NODE0311 Received subscription reset request for unknown subscription " + subid
- + " from provisioning server " + remoteaddr);
- }
- return (sdir);
- } else {
- eelfLogger.info("NODE0312 Received subscription reset request from unexpected server " + remoteaddr);
- return (null);
- }
- }
public String getAafType() {
return aafType;
}
- public void setAafType(String aafType) {
- this.aafType = aafType;
- }
- public String getAafInstance() {
- return aafInstance;
- }
- public void setAafInstance(String aafInstance) {
- this.aafInstance = aafInstance;
- }
+
public String getAafAction() {
return aafAction;
}
- public void setAafAction(String aafAction) {
- this.aafAction = aafAction;
- }
+
/*
* Get aafURL from SWM variable
* */
public String getAafURL() {
return aafURL;
}
- public void setAafURL(String aafURL) {
- this.aafURL = aafURL;
- }
- public boolean getCadiEnabeld() {
+ public boolean getCadiEnabled() {
return cadiEnabled;
}
- public void setCadiEnabled(boolean cadiEnabled) {
- this.cadiEnabled = cadiEnabled;
- }
/**
- * Builds the permissions string to be verified
+ * Builds the permissions string to be verified.
*
* @param aafInstance The aaf instance
* @return The permissions
@@ -837,12 +806,12 @@ public class NodeConfigManager implements DeliveryQueueHelper {
try {
String type = getAafType();
String action = getAafAction();
- if (aafInstance == null || aafInstance.equals("")) {
+ if ("".equals(aafInstance)) {
aafInstance = getAafInstance();
}
return type + "|" + aafInstance + "|" + action;
} catch (Exception e) {
- eelfLogger.error("NODE0543 NodeConfigManager.getPermission: ", e.getMessage());
+ eelfLogger.error("NODE0543 NodeConfigManager.getPermission: ", e);
}
return null;
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeMain.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeMain.java
index 058295d3..abec7393 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeMain.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeMain.java
@@ -23,79 +23,42 @@
package org.onap.dmaap.datarouter.node;
-
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.EnumSet;
+import java.util.Properties;
+import javax.servlet.DispatcherType;
+import javax.servlet.ServletException;
import org.eclipse.jetty.http.HttpVersion;
-import org.eclipse.jetty.server.*;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.SecureRequestCustomizer;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.SslConnectionFactory;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.onap.aaf.cadi.PropAccess;
-import javax.servlet.DispatcherType;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.EnumSet;
-import java.util.Properties;
-
/**
- * The main starting point for the Data Router node
+ * The main starting point for the Data Router node.
*/
public class NodeMain {
- private NodeMain() {
- }
-
private static EELFLogger nodeMainLogger = EELFManager.getInstance().getLogger(NodeMain.class);
-
- class Inner {
- InputStream getCadiProps() {
- InputStream in = null;
- try {
- in = getClass().getClassLoader().getResourceAsStream("drNodeCadi.properties");
- } catch (Exception e) {
- nodeMainLogger.error("Exception in Inner.getCadiProps() method " + e.getMessage());
- }
- return in;
- }
- }
-
- private static class WaitForConfig implements Runnable {
-
- private NodeConfigManager localNodeConfigManager;
-
- WaitForConfig(NodeConfigManager ncm) {
- this.localNodeConfigManager = ncm;
- }
-
- public synchronized void run() {
- notify();
- }
-
- synchronized void waitForConfig() {
- localNodeConfigManager.registerConfigTask(this);
- while (!localNodeConfigManager.isConfigured()) {
- nodeMainLogger.info("NODE0003 Waiting for Node Configuration");
- try {
- wait();
- } catch (Exception exception) {
- nodeMainLogger
- .error("NodeMain: waitForConfig exception. Exception Message:- " + exception.toString(),
- exception);
- }
- }
- localNodeConfigManager.deregisterConfigTask(this);
- nodeMainLogger.info("NODE0004 Node Configuration Data Received");
- }
- }
-
private static Delivery delivery;
private static NodeConfigManager nodeConfigManager;
+ private NodeMain() {
+ }
+
/**
- * Reset the retry timer for a subscription
+ * Reset the retry timer for a subscription.
*/
static void resetQueue(String subid, String ip) {
delivery.resetQueue(nodeConfigManager.getSpoolDir(subid, ip));
@@ -103,9 +66,9 @@ public class NodeMain {
/**
* Start the data router.
- * <p>
- * The location of the node configuration file can be set using the org.onap.dmaap.datarouter.node.properties system
- * property. By default, it is "/opt/app/datartr/etc/node.properties".
+ *
+ * <p>The location of the node configuration file can be set using the org.onap.dmaap.datarouter.node.properties
+ * system property. By default, it is "/opt/app/datartr/etc/node.properties".
*/
public static void main(String[] args) throws Exception {
nodeMainLogger.info("NODE0001 Data Router Node Starting");
@@ -123,7 +86,8 @@ public class NodeMain {
httpConfiguration.setRequestHeaderSize(2048);
// HTTP connector
- try (ServerConnector httpServerConnector = new ServerConnector(server, new HttpConnectionFactory(httpConfiguration))) {
+ try (ServerConnector httpServerConnector = new ServerConnector(server,
+ new HttpConnectionFactory(httpConfiguration))) {
httpServerConnector.setPort(nodeConfigManager.getHttpPort());
httpServerConnector.setIdleTimeout(2000);
@@ -134,7 +98,8 @@ public class NodeMain {
sslContextFactory.setKeyStorePassword(nodeConfigManager.getKSPass());
sslContextFactory.setKeyManagerPassword(nodeConfigManager.getKPass());
- //SP-6 : Fixes for SDV scan to exclude/remove DES/3DES ciphers are taken care by upgrading jdk in descriptor.xml
+ //SP-6: Fixes for SDV scan to exclude/remove DES/3DES
+ // ciphers are taken care by upgrading jdk in descriptor.xml
sslContextFactory.setExcludeCipherSuites(
"SSL_RSA_WITH_DES_CBC_SHA",
"SSL_DHE_RSA_WITH_DES_CBC_SHA",
@@ -147,9 +112,12 @@ public class NodeMain {
sslContextFactory.addExcludeProtocols("SSLv3");
sslContextFactory.setIncludeProtocols(nodeConfigManager.getEnabledprotocols());
- nodeMainLogger.info("NODE00004 Unsupported protocols node server:-" + String.join(",", sslContextFactory.getExcludeProtocols()));
- nodeMainLogger.info("NODE00004 Supported protocols node server:-" + String.join(",", sslContextFactory.getIncludeProtocols()));
- nodeMainLogger.info("NODE00004 Unsupported ciphers node server:-" + String.join(",", sslContextFactory.getExcludeCipherSuites()));
+ nodeMainLogger.info("NODE00004 Unsupported protocols node server:-"
+ + String.join(",", sslContextFactory.getExcludeProtocols()));
+ nodeMainLogger.info("NODE00004 Supported protocols node server:-"
+ + String.join(",", sslContextFactory.getIncludeProtocols()));
+ nodeMainLogger.info("NODE00004 Unsupported ciphers node server:-"
+ + String.join(",", sslContextFactory.getExcludeCipherSuites()));
HttpConfiguration httpsConfiguration = new HttpConfiguration(httpConfiguration);
httpsConfiguration.setRequestHeaderSize(8192);
@@ -174,20 +142,8 @@ public class NodeMain {
servletContextHandler.addServlet(new ServletHolder(new NodeServlet(delivery)), "/*");
//CADI Filter activation check
- if (nodeConfigManager.getCadiEnabeld()) {
- Properties cadiProperties = new Properties();
- try {
- Inner obj = new NodeMain().new Inner();
- InputStream in = obj.getCadiProps();
- cadiProperties.load(in);
- } catch (IOException e1) {
- nodeMainLogger.error("NODE00005 Exception in NodeMain.Main() loading CADI properties " + e1.getMessage());
- }
- cadiProperties.setProperty("aaf_locate_url", nodeConfigManager.getAafURL());
- nodeMainLogger.info("NODE00005 aaf_url set to - " + cadiProperties.getProperty("aaf_url"));
-
- PropAccess access = new PropAccess(cadiProperties);
- servletContextHandler.addFilter(new FilterHolder(new DRNodeCadiFilter(true, access)), "/*", EnumSet.of(DispatcherType.REQUEST));
+ if (nodeConfigManager.getCadiEnabled()) {
+ enableCadi(servletContextHandler);
}
server.setHandler(servletContextHandler);
@@ -199,9 +155,69 @@ public class NodeMain {
server.start();
nodeMainLogger.info("NODE00006 Node Server started-" + server.getState());
} catch (Exception e) {
- nodeMainLogger.info("NODE00006 Jetty failed to start. Reporting will we unavailable", e.getMessage());
+ nodeMainLogger.info("NODE00006 Jetty failed to start. Reporting will we unavailable: " + e.getMessage(), e);
}
server.join();
nodeMainLogger.info("NODE00007 Node Server joined - " + server.getState());
}
+
+ private static void enableCadi(ServletContextHandler servletContextHandler) throws ServletException {
+ Properties cadiProperties = new Properties();
+ try {
+ Inner obj = new NodeMain().new Inner();
+ InputStream in = obj.getCadiProps();
+ cadiProperties.load(in);
+ } catch (IOException e1) {
+ nodeMainLogger
+ .error("NODE00005 Exception in NodeMain.Main() loading CADI properties " + e1.getMessage(), e1);
+ }
+ cadiProperties.setProperty("aaf_locate_url", nodeConfigManager.getAafURL());
+ nodeMainLogger.info("NODE00005 aaf_url set to - " + cadiProperties.getProperty("aaf_url"));
+
+ PropAccess access = new PropAccess(cadiProperties);
+ servletContextHandler.addFilter(new FilterHolder(new DRNodeCadiFilter(true, access)), "/*", EnumSet
+ .of(DispatcherType.REQUEST));
+ }
+
+ private static class WaitForConfig implements Runnable {
+
+ private NodeConfigManager localNodeConfigManager;
+
+ WaitForConfig(NodeConfigManager ncm) {
+ this.localNodeConfigManager = ncm;
+ }
+
+ public synchronized void run() {
+ notify();
+ }
+
+ synchronized void waitForConfig() {
+ localNodeConfigManager.registerConfigTask(this);
+ while (!localNodeConfigManager.isConfigured()) {
+ nodeMainLogger.info("NODE0003 Waiting for Node Configuration");
+ try {
+ wait();
+ } catch (Exception exception) {
+ nodeMainLogger
+ .error("NodeMain: waitForConfig exception. Exception Message:- " + exception.toString(),
+ exception);
+ }
+ }
+ localNodeConfigManager.deregisterConfigTask(this);
+ nodeMainLogger.info("NODE0004 Node Configuration Data Received");
+ }
+ }
+
+ class Inner {
+
+ InputStream getCadiProps() {
+ InputStream in = null;
+ try {
+ in = getClass().getClassLoader().getResourceAsStream("drNodeCadi.properties");
+ } catch (Exception e) {
+ nodeMainLogger.error("Exception in Inner.getCadiProps() method ", e);
+ }
+ return in;
+ }
+ }
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java
index a9842116..3b82484a 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeServlet.java
@@ -24,15 +24,10 @@
package org.onap.dmaap.datarouter.node;
+import static org.onap.dmaap.datarouter.node.NodeUtils.sendResponseError;
+
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import org.jetbrains.annotations.Nullable;
-import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
-import org.slf4j.MDC;
-
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
@@ -45,13 +40,17 @@ import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Enumeration;
import java.util.regex.Pattern;
-
-import static org.onap.dmaap.datarouter.node.NodeUtils.sendResponseError;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.jetbrains.annotations.Nullable;
+import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
+import org.slf4j.MDC;
/**
- * Servlet for handling all http and https requests to the data router node
- * <p>
- * Handled requests are:
+ * Servlet for handling all http and https requests to the data router node.
+ *
+ * <p>Handled requests are:
* <br>
* GET http://<i>node</i>/internal/fetchProv - fetch the provisioning data
* <br>
@@ -61,29 +60,33 @@ import static org.onap.dmaap.datarouter.node.NodeUtils.sendResponseError;
*/
public class NodeServlet extends HttpServlet {
+ private static final String FROM = " from ";
+ private static final String INVALID_REQUEST_URI = "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.";
+ private static final String IO_EXCEPTION = "IOException";
+ private static final String ON_BEHALF_OF = "X-DMAAP-DR-ON-BEHALF-OF";
private static NodeConfigManager config;
- private static Pattern MetaDataPattern;
+ private static Pattern metaDataPattern;
private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(NodeServlet.class);
- private final Delivery delivery;
static {
final String ws = "\\s*";
// assume that \\ and \" have been replaced by X
final String string = "\"[^\"]*\"";
- //String string = "\"(?:[^\"\\\\]|\\\\.)*\"";
final String number = "[+-]?(?:\\.\\d+|(?:0|[1-9]\\d*)(?:\\.\\d*)?)(?:[eE][+-]?\\d+)?";
final String value = "(?:" + string + "|" + number + "|null|true|false)";
final String item = string + ws + ":" + ws + value + ws;
final String object = ws + "\\{" + ws + "(?:" + item + "(?:" + "," + ws + item + ")*)?\\}" + ws;
- MetaDataPattern = Pattern.compile(object, Pattern.DOTALL);
+ metaDataPattern = Pattern.compile(object, Pattern.DOTALL);
}
+ private final Delivery delivery;
+
NodeServlet(Delivery delivery) {
this.delivery = delivery;
}
/**
- * Get the NodeConfigurationManager
+ * Get the NodeConfigurationManager.
*/
@Override
public void init() {
@@ -91,7 +94,7 @@ public class NodeServlet extends HttpServlet {
eelfLogger.info("NODE0101 Node Servlet Configured");
}
- private boolean down(HttpServletResponse resp) throws IOException {
+ private boolean down(HttpServletResponse resp) {
if (config.isShutdown() || !config.isConfigured()) {
sendResponseError(resp, HttpServletResponse.SC_SERVICE_UNAVAILABLE, eelfLogger);
eelfLogger.info("NODE0102 Rejecting request: Service is being quiesced");
@@ -101,7 +104,7 @@ public class NodeServlet extends HttpServlet {
}
/**
- * Handle a GET for /internal/fetchProv
+ * Handle a GET for /internal/fetchProv.
*/
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) {
@@ -109,15 +112,10 @@ public class NodeServlet extends HttpServlet {
NodeUtils.setRequestIdAndInvocationId(req);
eelfLogger.info(EelfMsgs.ENTRY);
try {
- eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader("X-DMAAP-DR-ON-BEHALF-OF"),
+ eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(ON_BEHALF_OF),
getIdFromPath(req) + "");
- try {
- if (down(resp)) {
- return;
- }
-
- } catch (IOException ioe) {
- eelfLogger.error("IOException" + ioe.getMessage());
+ if (down(resp)) {
+ return;
}
String path = req.getPathInfo();
String qs = req.getQueryString();
@@ -138,7 +136,7 @@ public class NodeServlet extends HttpServlet {
}
}
- eelfLogger.info("NODE0103 Rejecting invalid GET of " + path + " from " + ip);
+ eelfLogger.info("NODE0103 Rejecting invalid GET of " + path + FROM + ip);
sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, eelfLogger);
} finally {
eelfLogger.info(EelfMsgs.EXIT);
@@ -146,50 +144,55 @@ public class NodeServlet extends HttpServlet {
}
/**
- * Handle all PUT requests
+ * Handle all PUT requests.
*/
@Override
protected void doPut(HttpServletRequest req, HttpServletResponse resp) {
NodeUtils.setIpAndFqdnForEelf("doPut");
NodeUtils.setRequestIdAndInvocationId(req);
eelfLogger.info(EelfMsgs.ENTRY);
- eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader("X-DMAAP-DR-ON-BEHALF-OF"),
+ eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(ON_BEHALF_OF),
getIdFromPath(req) + "");
try {
common(req, resp, true);
} catch (IOException ioe) {
- eelfLogger.error("IOException" + ioe.getMessage());
+ eelfLogger.error(IO_EXCEPTION, ioe);
eelfLogger.info(EelfMsgs.EXIT);
}
}
/**
- * Handle all DELETE requests
+ * Handle all DELETE requests.
*/
@Override
protected void doDelete(HttpServletRequest req, HttpServletResponse resp) {
NodeUtils.setIpAndFqdnForEelf("doDelete");
NodeUtils.setRequestIdAndInvocationId(req);
eelfLogger.info(EelfMsgs.ENTRY);
- eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader("X-DMAAP-DR-ON-BEHALF-OF"),
+ eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(ON_BEHALF_OF),
getIdFromPath(req) + "");
try {
common(req, resp, false);
} catch (IOException ioe) {
- eelfLogger.error("IOException " + ioe.getMessage());
+ eelfLogger.error(IO_EXCEPTION, ioe);
eelfLogger.info(EelfMsgs.EXIT);
}
}
private void common(HttpServletRequest req, HttpServletResponse resp, boolean isput) throws IOException {
+ final String PUBLISH = "/publish/";
+ final String INTERNAL_PUBLISH = "/internal/publish/";
+ final String HTTPS = "https://";
+ final String USER = " user ";
String fileid = getFileId(req, resp);
- if (fileid == null) return;
+ if (fileid == null) {
+ return;
+ }
String feedid = null;
String user = null;
String ip = req.getRemoteAddr();
String lip = req.getLocalAddr();
String pubid = null;
- String xpubid = null;
String rcvd = NodeUtils.logts(System.currentTimeMillis()) + ";from=" + ip + ";by=" + lip;
Target[] targets = null;
boolean isAAFFeed = false;
@@ -199,37 +202,38 @@ public class NodeServlet extends HttpServlet {
}
String credentials = req.getHeader("Authorization");
if (credentials == null) {
- eelfLogger.error("NODE0106 Rejecting unauthenticated PUT or DELETE of " + req.getPathInfo() + " from " + req
+ eelfLogger.error("NODE0306 Rejecting unauthenticated PUT or DELETE of " + req.getPathInfo() + FROM + req
.getRemoteAddr());
resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Authorization header required");
eelfLogger.info(EelfMsgs.EXIT);
return;
}
- if (fileid.startsWith("/publish/")) {
+ if (fileid.startsWith(PUBLISH)) {
fileid = fileid.substring(9);
- int i = fileid.indexOf('/');
- if (i == -1 || i == fileid.length() - 1) {
- eelfLogger.error("NODE0105 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + " from " + req
+ int index = fileid.indexOf('/');
+ if (index == -1 || index == fileid.length() - 1) {
+ eelfLogger.error("NODE0205 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + FROM + req
.getRemoteAddr());
resp.sendError(HttpServletResponse.SC_NOT_FOUND,
"Invalid request URI. Expecting <feed-publishing-url>/<fileid>. Possible missing fileid.");
eelfLogger.info(EelfMsgs.EXIT);
return;
}
- feedid = fileid.substring(0, i);
+ feedid = fileid.substring(0, index);
- if (config.getCadiEnabeld()) {
+ if (config.getCadiEnabled()) {
String path = req.getPathInfo();
if (!path.startsWith("/internal") && feedid != null) {
String aafInstance = config.getAafInstance(feedid);
- if (!(aafInstance.equalsIgnoreCase("legacy"))) {
+ if (!("legacy".equalsIgnoreCase(aafInstance))) {
isAAFFeed = true;
String permission = config.getPermission(aafInstance);
eelfLogger.info("NodeServlet.common() permission string - " + permission);
//Check in CADI Framework API if user has AAF permission or not
if (!req.isUserInRole(permission)) {
String message = "AAF disallows access to permission string - " + permission;
- eelfLogger.error("NODE0106 Rejecting unauthenticated PUT or DELETE of " + req.getPathInfo() + " from " + req.getRemoteAddr());
+ eelfLogger.error("NODE0307 Rejecting unauthenticated PUT or DELETE of " + req.getPathInfo()
+ + FROM + req.getRemoteAddr());
resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
eelfLogger.info(EelfMsgs.EXIT);
return;
@@ -238,11 +242,10 @@ public class NodeServlet extends HttpServlet {
}
}
- fileid = fileid.substring(i + 1);
+ fileid = fileid.substring(index + 1);
pubid = config.getPublishId();
- xpubid = req.getHeader("X-DMAAP-DR-PUBLISH-ID");
targets = config.getTargets(feedid);
- } else if (fileid.startsWith("/internal/publish/")) {
+ } else if (fileid.startsWith(INTERNAL_PUBLISH)) {
if (!config.isAnotherNode(credentials, ip)) {
eelfLogger.error("NODE0107 Rejecting unauthorized node-to-node transfer attempt from " + ip);
resp.sendError(HttpServletResponse.SC_FORBIDDEN);
@@ -254,18 +257,18 @@ public class NodeServlet extends HttpServlet {
user = "datartr"; // SP6 : Added usr as datartr to avoid null entries for internal routing
targets = config.parseRouting(req.getHeader("X-DMAAP-DR-ROUTING"));
} else {
- eelfLogger.error("NODE0105 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + " from " + req
+ eelfLogger.error("NODE0204 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + FROM + req
.getRemoteAddr());
resp.sendError(HttpServletResponse.SC_NOT_FOUND,
- "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
+ INVALID_REQUEST_URI);
eelfLogger.info(EelfMsgs.EXIT);
return;
}
if (fileid.indexOf('/') != -1) {
- eelfLogger.error("NODE0105 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + " from " + req
+ eelfLogger.error("NODE0202 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + FROM + req
.getRemoteAddr());
resp.sendError(HttpServletResponse.SC_NOT_FOUND,
- "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
+ INVALID_REQUEST_URI);
eelfLogger.info(EelfMsgs.EXIT);
return;
}
@@ -278,14 +281,16 @@ public class NodeServlet extends HttpServlet {
if (xp != 443) {
hp = hp + ":" + xp;
}
- String logurl = "https://" + hp + "/internal/publish/" + fileid;
+ String logurl = HTTPS + hp + INTERNAL_PUBLISH + fileid;
if (feedid != null) {
- logurl = "https://" + hp + "/publish/" + feedid + "/" + fileid;
+ logurl = HTTPS + hp + PUBLISH + feedid + "/" + fileid;
//Cadi code starts
if (!isAAFFeed) {
String reason = config.isPublishPermitted(feedid, credentials, ip);
if (reason != null) {
- eelfLogger.error("NODE0111 Rejecting unauthorized publish attempt to feed " + PathUtil.cleanString(feedid) + " fileid " + PathUtil.cleanString(fileid) + " from " + PathUtil.cleanString(ip) + " reason " + PathUtil.cleanString(reason));
+ eelfLogger.error("NODE0111 Rejecting unauthorized publish attempt to feed " + PathUtil
+ .cleanString(feedid) + " fileid " + PathUtil.cleanString(fileid) + FROM + PathUtil
+ .cleanString(ip) + " reason " + PathUtil.cleanString(reason));
resp.sendError(HttpServletResponse.SC_FORBIDDEN, reason);
eelfLogger.info(EelfMsgs.EXIT);
return;
@@ -294,9 +299,12 @@ public class NodeServlet extends HttpServlet {
} else {
String reason = config.isPublishPermitted(feedid, ip);
if (reason != null) {
- eelfLogger.error("NODE0111 Rejecting unauthorized publish attempt to feed " + PathUtil.cleanString(feedid) + " fileid " + PathUtil.cleanString(fileid) + " from " + PathUtil.cleanString(ip) + " reason Invalid AAF user- " + PathUtil.cleanString(reason));
+ eelfLogger.error("NODE0111 Rejecting unauthorized publish attempt to feed " + PathUtil
+ .cleanString(feedid) + " fileid " + PathUtil.cleanString(fileid) + FROM + PathUtil
+ .cleanString(ip) + " reason Invalid AAF user- " + PathUtil.cleanString(reason));
String message = "Invalid AAF user- " + PathUtil.cleanString(reason);
- eelfLogger.info("NODE0106 Rejecting unauthenticated PUT or DELETE of " + PathUtil.cleanString(req.getPathInfo()) + " from " + PathUtil.cleanString(req.getRemoteAddr()));
+ eelfLogger.info("NODE0308 Rejecting unauthenticated PUT or DELETE of " + PathUtil
+ .cleanString(req.getPathInfo()) + FROM + PathUtil.cleanString(req.getRemoteAddr()));
resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
return;
}
@@ -316,25 +324,26 @@ public class NodeServlet extends HttpServlet {
if (iport != 443) {
port = ":" + iport;
}
- String redirto = "https://" + newnode + port + "/publish/" + feedid + "/" + fileid;
- eelfLogger.info("NODE0108 Redirecting publish attempt for feed " + PathUtil.cleanString(feedid) + " user " + PathUtil.cleanString(user) + " ip " + PathUtil.cleanString(ip) + " to " + PathUtil.cleanString(redirto)); //Fortify scan fixes - log forging
+ String redirto = HTTPS + newnode + port + PUBLISH + feedid + "/" + fileid;
+ eelfLogger
+ .info("NODE0108 Redirecting publish attempt for feed " + PathUtil.cleanString(feedid) + USER
+ + PathUtil.cleanString(user) + " ip " + PathUtil.cleanString(ip) + " to " + PathUtil
+ .cleanString(redirto)); //Fortify scan fixes - log forging
resp.sendRedirect(PathUtil.cleanString(redirto)); //Fortify scan fixes-open redirect - 2 issues
eelfLogger.info(EelfMsgs.EXIT);
return;
}
resp.setHeader("X-DMAAP-DR-PUBLISH-ID", pubid);
}
- if (req.getPathInfo().startsWith("/internal/publish/")) {
+ if (req.getPathInfo().startsWith(INTERNAL_PUBLISH)) {
feedid = req.getHeader("X-DMAAP-DR-FEED-ID");
}
String fbase = PathUtil.cleanString(config.getSpoolDir() + "/" + pubid); //Fortify scan fixes-Path manipulation
File data = new File(fbase);
File meta = new File(fbase + ".M");
- OutputStream dos = null;
Writer mw = null;
- InputStream is = null;
try {
- StringBuffer mx = new StringBuffer();
+ StringBuilder mx = new StringBuilder();
mx.append(req.getMethod()).append('\t').append(fileid).append('\n');
Enumeration hnames = req.getHeaderNames();
String ctype = null;
@@ -343,13 +352,13 @@ public class NodeServlet extends HttpServlet {
while (hnames.hasMoreElements()) {
String hn = (String) hnames.nextElement();
String hnlc = hn.toLowerCase();
- if ((isput && ("content-type".equals(hnlc) ||
- "content-language".equals(hnlc) ||
- "content-md5".equals(hnlc) ||
- "content-range".equals(hnlc))) ||
- "x-dmaap-dr-meta".equals(hnlc) ||
- (feedid == null && "x-dmaap-dr-received".equals(hnlc)) ||
- (hnlc.startsWith("x-") && !hnlc.startsWith("x-dmaap-dr-"))) {
+ if ((isput && ("content-type".equals(hnlc)
+ || "content-language".equals(hnlc)
+ || "content-md5".equals(hnlc)
+ || "content-range".equals(hnlc)))
+ || "x-dmaap-dr-meta".equals(hnlc)
+ || (feedid == null && "x-dmaap-dr-received".equals(hnlc))
+ || (hnlc.startsWith("x-") && !hnlc.startsWith("x-dmaap-dr-"))) {
Enumeration hvals = req.getHeaders(hn);
while (hvals.hasMoreElements()) {
String hv = (String) hvals.nextElement();
@@ -364,13 +373,17 @@ public class NodeServlet extends HttpServlet {
}
if ("x-dmaap-dr-meta".equals(hnlc)) {
if (hv.length() > 4096) {
- eelfLogger.error("NODE0109 Rejecting publish attempt with metadata too long for feed " + PathUtil.cleanString(feedid) + " user " + PathUtil.cleanString(user) + " ip " + PathUtil.cleanString(ip)); //Fortify scan fixes - log forging
+ eelfLogger.error("NODE0109 Rejecting publish attempt with metadata too long for feed "
+ + PathUtil.cleanString(feedid) + USER + PathUtil.cleanString(user) + " ip "
+ + PathUtil.cleanString(ip)); //Fortify scan fixes - log forging
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Metadata too long");
eelfLogger.info(EelfMsgs.EXIT);
return;
}
- if (!MetaDataPattern.matcher(hv.replaceAll("\\\\.", "X")).matches()) {
- eelfLogger.error("NODE0109 Rejecting publish attempt with malformed metadata for feed " + PathUtil.cleanString(feedid) + " user " + PathUtil.cleanString(user) + " ip " + PathUtil.cleanString(ip)); //Fortify scan fixes - log forging
+ if (!metaDataPattern.matcher(hv.replaceAll("\\\\.", "X")).matches()) {
+ eelfLogger.error("NODE0109 Rejecting publish attempt with malformed metadata for feed "
+ + PathUtil.cleanString(feedid) + USER + PathUtil.cleanString(user) + " ip "
+ + PathUtil.cleanString(ip)); //Fortify scan fixes - log forging
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Malformed metadata");
eelfLogger.info(EelfMsgs.EXIT);
return;
@@ -388,28 +401,12 @@ public class NodeServlet extends HttpServlet {
}
mx.append("X-DMAAP-DR-RECEIVED\t").append(rcvd).append('\n');
String metadata = mx.toString();
- byte[] buf = new byte[1024 * 1024];
- int i;
- try {
- is = req.getInputStream();
- dos = new FileOutputStream(data);
- while ((i = is.read(buf)) > 0) {
- dos.write(buf, 0, i);
- }
- is.close();
- is = null;
- dos.close();
- dos = null;
- } catch (IOException ioe) {
- long exlen = -1;
- try {
- exlen = Long.parseLong(req.getHeader("Content-Length"));
- } catch (Exception e) {
- eelfLogger.error("NODE0529 Exception common: " + e);
- }
- StatusLog.logPubFail(pubid, feedid, logurl, req.getMethod(), ctype, exlen, data.length(), ip, user, ioe.getMessage());
- eelfLogger.info(EelfMsgs.EXIT);
- throw ioe;
+ long exlen = getExlen(req);
+ String message = writeInputStreamToFile(req, data);
+ if (message != null) {
+ StatusLog.logPubFail(pubid, feedid, logurl, req.getMethod(), ctype, exlen, data.length(), ip, user,
+ message);
+ throw new IOException(message);
}
Path dpath = Paths.get(fbase);
for (Target t : targets) {
@@ -418,7 +415,8 @@ public class NodeServlet extends HttpServlet {
// TODO: unknown destination
continue;
}
- String dbase = PathUtil.cleanString(di.getSpool() + "/" + pubid); //Fortify scan fixes-Path Manipulation
+ String dbase = PathUtil
+ .cleanString(di.getSpool() + "/" + pubid); //Fortify scan fixes-Path Manipulation
Files.createLink(Paths.get(dbase), dpath);
mw = new FileWriter(meta);
mw.write(metadata);
@@ -427,45 +425,28 @@ public class NodeServlet extends HttpServlet {
}
mw.close();
meta.renameTo(new File(dbase + ".M"));
-
}
resp.setStatus(HttpServletResponse.SC_NO_CONTENT);
try {
resp.getOutputStream().close();
} catch (IOException ioe) {
- long exlen = -1;
- try {
- exlen = Long.parseLong(req.getHeader("Content-Length"));
- } catch (Exception e) {
- eelfLogger.error("NODE00000 Exception common: " + e.getMessage());
- }
- StatusLog.logPubFail(pubid, feedid, logurl, req.getMethod(), ctype, exlen, data.length(), ip, user, ioe.getMessage());
+ StatusLog.logPubFail(pubid, feedid, logurl, req.getMethod(), ctype, exlen, data.length(), ip, user,
+ ioe.getMessage());
//Fortify scan fixes - log forging
- eelfLogger.error("NODE0110 IO Exception while closing IO stream " + PathUtil.cleanString(feedid) + " user " + PathUtil.cleanString(user) + " ip " + PathUtil.cleanString(ip) + " " + ioe.toString(), ioe);
-
+ eelfLogger.error("NODE0110 IO Exception while closing IO stream " + PathUtil.cleanString(feedid)
+ + USER + PathUtil.cleanString(user) + " ip " + PathUtil.cleanString(ip) + " " + ioe
+ .toString(), ioe);
throw ioe;
}
- StatusLog.logPub(pubid, feedid, logurl, req.getMethod(), ctype, data.length(), ip, user, HttpServletResponse.SC_NO_CONTENT);
+ StatusLog.logPub(pubid, feedid, logurl, req.getMethod(), ctype, data.length(), ip, user,
+ HttpServletResponse.SC_NO_CONTENT);
} catch (IOException ioe) {
- eelfLogger.error("NODE0110 IO Exception receiving publish attempt for feed " + feedid + " user " + user + " ip " + ip + " " + ioe.toString(), ioe);
+ eelfLogger.error("NODE0110 IO Exception receiving publish attempt for feed " + feedid + USER + user
+ + " ip " + ip + " " + ioe.toString(), ioe);
eelfLogger.info(EelfMsgs.EXIT);
throw ioe;
} finally {
- if (is != null) {
- try {
- is.close();
- } catch (Exception e) {
- eelfLogger.error("NODE0530 Exception common: " + e);
- }
- }
- if (dos != null) {
- try {
- dos.close();
- } catch (Exception e) {
- eelfLogger.error("NODE0531 Exception common: " + e);
- }
- }
if (mw != null) {
try {
mw.close();
@@ -486,22 +467,49 @@ public class NodeServlet extends HttpServlet {
}
}
+ private String writeInputStreamToFile(HttpServletRequest req, File data) {
+ byte[] buf = new byte[1024 * 1024];
+ int bytesRead;
+ try (OutputStream dos = new FileOutputStream(data);
+ InputStream is = req.getInputStream()) {
+ while ((bytesRead = is.read(buf)) > 0) {
+ dos.write(buf, 0, bytesRead);
+ }
+ } catch (IOException ioe) {
+ eelfLogger.error("NODE0530 Exception common: " + ioe, ioe);
+ eelfLogger.info(EelfMsgs.EXIT);
+ return ioe.getMessage();
+ }
+ return null;
+ }
+
+ private long getExlen(HttpServletRequest req) {
+ long exlen = -1;
+ try {
+ exlen = Long.parseLong(req.getHeader("Content-Length"));
+ } catch (Exception e) {
+ eelfLogger.error("NODE0529 Exception common: " + e);
+ }
+ return exlen;
+ }
+
private void deleteFile(HttpServletRequest req, HttpServletResponse resp, String fileid, String pubid) {
+ final String FROM_DR_MESSAGE = ".M) from DR Node: ";
try {
fileid = fileid.substring(8);
- int i = fileid.indexOf('/');
- if (i == -1 || i == fileid.length() - 1) {
- eelfLogger.error("NODE0112 Rejecting bad URI for DELETE of " + req.getPathInfo() + " from " + req
+ int index = fileid.indexOf('/');
+ if (index == -1 || index == fileid.length() - 1) {
+ eelfLogger.error("NODE0112 Rejecting bad URI for DELETE of " + req.getPathInfo() + FROM + req
.getRemoteAddr());
resp.sendError(HttpServletResponse.SC_NOT_FOUND,
"Invalid request URI. Expecting <subId>/<pubId>.");
eelfLogger.info(EelfMsgs.EXIT);
return;
}
- String subscriptionId = fileid.substring(0, i);
+ String subscriptionId = fileid.substring(0, index);
int subId = Integer.parseInt(subscriptionId);
- pubid = fileid.substring(i + 1);
- String errorMessage = "Unable to delete files (" + pubid + ", " + pubid + ".M) from DR Node: "
+ pubid = fileid.substring(index + 1);
+ String errorMessage = "Unable to delete files (" + pubid + ", " + pubid + FROM_DR_MESSAGE
+ config.getMyName() + ".";
int subIdDir = subId - (subId % 100);
if (!isAuthorizedToDelete(resp, subscriptionId, errorMessage)) {
@@ -509,7 +517,7 @@ public class NodeServlet extends HttpServlet {
}
boolean result = delivery.markTaskSuccess(config.getSpoolBase() + "/s/" + subIdDir + "/" + subId, pubid);
if (result) {
- eelfLogger.info("NODE0115 Successfully deleted files (" + pubid + ", " + pubid + ".M) from DR Node: "
+ eelfLogger.info("NODE0115 Successfully deleted files (" + pubid + ", " + pubid + FROM_DR_MESSAGE
+ config.getMyName());
resp.setStatus(HttpServletResponse.SC_OK);
eelfLogger.info(EelfMsgs.EXIT);
@@ -519,8 +527,8 @@ public class NodeServlet extends HttpServlet {
eelfLogger.info(EelfMsgs.EXIT);
}
} catch (IOException ioe) {
- eelfLogger.error("NODE0117 Unable to delete files (" + pubid + ", " + pubid + ".M) from DR Node: "
- + config.getMyName() + ". Error: " + ioe.getMessage());
+ eelfLogger.error("NODE0117 Unable to delete files (" + pubid + ", " + pubid + FROM_DR_MESSAGE
+ + config.getMyName(), ioe);
eelfLogger.info(EelfMsgs.EXIT);
}
}
@@ -533,7 +541,7 @@ public class NodeServlet extends HttpServlet {
}
if (!req.isSecure()) {
eelfLogger.error(
- "NODE0104 Rejecting insecure PUT or DELETE of " + req.getPathInfo() + " from " + req
+ "NODE0104 Rejecting insecure PUT or DELETE of " + req.getPathInfo() + FROM + req
.getRemoteAddr());
resp.sendError(HttpServletResponse.SC_FORBIDDEN, "https required on publish requests");
eelfLogger.info(EelfMsgs.EXIT);
@@ -541,17 +549,18 @@ public class NodeServlet extends HttpServlet {
}
String fileid = req.getPathInfo();
if (fileid == null) {
- eelfLogger.error("NODE0105 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + " from " + req
+ eelfLogger.error("NODE0201 Rejecting bad URI for PUT or DELETE of " + req.getPathInfo() + FROM + req
.getRemoteAddr());
resp.sendError(HttpServletResponse.SC_NOT_FOUND,
- "Invalid request URI. Expecting <feed-publishing-url>/<fileid>.");
+ INVALID_REQUEST_URI);
eelfLogger.info(EelfMsgs.EXIT);
return null;
}
return fileid;
}
- private boolean isAuthorizedToDelete(HttpServletResponse resp, String subscriptionId, String errorMessage) throws IOException {
+ private boolean isAuthorizedToDelete(HttpServletResponse resp, String subscriptionId, String errorMessage)
+ throws IOException {
try {
boolean deletePermitted = config.isDeletePermitted(subscriptionId);
if (!deletePermitted) {
@@ -562,7 +571,8 @@ public class NodeServlet extends HttpServlet {
return false;
}
} catch (NullPointerException npe) {
- eelfLogger.error("NODE0114 " + errorMessage + " Error: Subscription " + subscriptionId + " does not exist");
+ eelfLogger.error("NODE0114 " + errorMessage + " Error: Subscription " + subscriptionId
+ + " does not exist", npe);
resp.sendError(HttpServletResponse.SC_NOT_FOUND);
eelfLogger.info(EelfMsgs.EXIT);
return false;
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java
index 4601f99c..d4fc7dbe 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeUtils.java
@@ -24,15 +24,22 @@
package org.onap.dmaap.datarouter.node;
+import static com.att.eelf.configuration.Configuration.MDC_KEY_REQUEST_ID;
+import static com.att.eelf.configuration.Configuration.MDC_SERVER_FQDN;
+import static com.att.eelf.configuration.Configuration.MDC_SERVER_IP_ADDRESS;
+import static com.att.eelf.configuration.Configuration.MDC_SERVICE_NAME;
+
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.security.KeyStore;
+import java.security.KeyStoreException;
import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.text.SimpleDateFormat;
import java.util.Date;
@@ -47,10 +54,8 @@ import org.apache.commons.lang3.StringUtils;
import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
import org.slf4j.MDC;
-import static com.att.eelf.configuration.Configuration.*;
-
/**
- * Utility functions for the data router node
+ * Utility functions for the data router node.
*/
public class NodeUtils {
@@ -61,9 +66,9 @@ public class NodeUtils {
}
/**
- * Base64 encode a byte array
+ * Base64 encode a byte array.
*
- * @param raw The bytes to be encoded
+ * @param raw The bytes to be encoded
* @return The encoded string
*/
public static String base64Encode(byte[] raw) {
@@ -71,7 +76,7 @@ public class NodeUtils {
}
/**
- * Given a user and password, generate the credentials
+ * Given a user and password, generate the credentials.
*
* @param user User name
* @param password User password
@@ -85,7 +90,7 @@ public class NodeUtils {
}
/**
- * Given a node name, generate the credentials
+ * Given a node name, generate the credentials.
*
* @param node Node name
*/
@@ -117,16 +122,12 @@ public class NodeUtils {
KeyStore ks;
try {
ks = KeyStore.getInstance(kstype);
- try (FileInputStream fileInputStream = new FileInputStream(ksfile)) {
- ks.load(fileInputStream, kspass.toCharArray());
- } catch (IOException ioException) {
- eelfLogger.error("IOException occurred while opening FileInputStream: " + ioException.getMessage(),
- ioException);
+ if (loadKeyStore(ksfile, kspass, ks)) {
return (null);
}
} catch (Exception e) {
setIpAndFqdnForEelf("getCanonicalName");
- eelfLogger.error(EelfMsgs.MESSAGE_KEYSTORE_LOAD_ERROR, ksfile, e.toString());
+ eelfLogger.error(EelfMsgs.MESSAGE_KEYSTORE_LOAD_ERROR, e, ksfile);
return (null);
}
return (getCanonicalName(ks));
@@ -142,32 +143,19 @@ public class NodeUtils {
try {
Enumeration<String> aliases = ks.aliases();
while (aliases.hasMoreElements()) {
- String s = aliases.nextElement();
- if (ks.entryInstanceOf(s, KeyStore.PrivateKeyEntry.class)) {
- X509Certificate c = (X509Certificate) ks.getCertificate(s);
- if (c != null) {
- String subject = c.getSubjectX500Principal().getName();
- String[] parts = subject.split(",");
- if (parts.length < 1) {
- return (null);
- }
- subject = parts[5].trim();
- if (!subject.startsWith("CN=")) {
- return (null);
-
- }
- return (subject.substring(3));
- }
+ String name = getNameFromSubject(ks, aliases);
+ if (name != null) {
+ return name;
}
}
} catch (Exception e) {
- eelfLogger.error("NODE0402 Error extracting my name from my keystore file " + e.toString(), e.getMessage());
+ eelfLogger.error("NODE0402 Error extracting my name from my keystore file " + e.toString(), e);
}
return (null);
}
/**
- * Given a string representation of an IP address, get the corresponding byte array
+ * Given a string representation of an IP address, get the corresponding byte array.
*
* @param ip The IP address as a string
* @return The IP address as a byte array or null if the address is invalid
@@ -184,48 +172,48 @@ public class NodeUtils {
}
/**
- * Given a uri with parameters, split out the feed ID and file ID
+ * Given a uri with parameters, split out the feed ID and file ID.
*/
public static String[] getFeedAndFileID(String uriandparams) {
int end = uriandparams.length();
- int i = uriandparams.indexOf('#');
- if (i != -1 && i < end) {
- end = i;
+ int index = uriandparams.indexOf('#');
+ if (index != -1 && index < end) {
+ end = index;
}
- i = uriandparams.indexOf('?');
- if (i != -1 && i < end) {
- end = i;
+ index = uriandparams.indexOf('?');
+ if (index != -1 && index < end) {
+ end = index;
}
end = uriandparams.lastIndexOf('/', end);
if (end < 2) {
return (null);
}
- i = uriandparams.lastIndexOf('/', end - 1);
- if (i == -1) {
+ index = uriandparams.lastIndexOf('/', end - 1);
+ if (index == -1) {
return (null);
}
- return (new String[]{uriandparams.substring(i + 1, end), uriandparams.substring(end + 1)});
+ return (new String[]{uriandparams.substring(index + 1, end), uriandparams.substring(end + 1)});
}
/**
* Escape fields that might contain vertical bar, backslash, or newline by replacing them with backslash p,
* backslash e and backslash n.
*/
- public static String loge(String s) {
- if (s == null) {
- return (s);
+ public static String loge(String string) {
+ if (string == null) {
+ return (string);
}
- return (s.replaceAll("\\\\", "\\\\e").replaceAll("\\|", "\\\\p").replaceAll("\n", "\\\\n"));
+ return (string.replaceAll("\\\\", "\\\\e").replaceAll("\\|", "\\\\p").replaceAll("\n", "\\\\n"));
}
/**
* Undo what loge does.
*/
- public static String unloge(String s) {
- if (s == null) {
- return (s);
+ public static String unloge(String string) {
+ if (string == null) {
+ return (string);
}
- return (s.replaceAll("\\\\p", "\\|").replaceAll("\\\\n", "\n").replaceAll("\\\\e", "\\\\"));
+ return (string.replaceAll("\\\\p", "\\|").replaceAll("\\\\n", "\n").replaceAll("\\\\e", "\\\\"));
}
/**
@@ -244,9 +232,9 @@ public class NodeUtils {
return (logDate.format(when));
}
- /* Method prints method name, server FQDN and IP Address of the machine in EELF logs
- * @Method - setIpAndFqdnForEelf - Rally:US664892
- * @Params - method, prints method name in EELF log.
+ /** Method prints method name, server FQDN and IP Address of the machine in EELF logs.
+ *
+ * @param method Prints method name in EELF log.
*/
public static void setIpAndFqdnForEelf(String method) {
MDC.clear();
@@ -262,9 +250,9 @@ public class NodeUtils {
}
- /* Method sets RequestIs and InvocationId for se in EELF logs
- * @Method - setIpAndFqdnForEelf
- * @Params - Req, Request used to get RequestId and InvocationId
+ /** Method sets RequestIs and InvocationId for se in EELF logs.
+ *
+ * @param req Request used to get RequestId and InvocationId.
*/
public static void setRequestIdAndInvocationId(HttpServletRequest req) {
String reqId = req.getHeader("X-ONAP-RequestID");
@@ -279,30 +267,65 @@ public class NodeUtils {
MDC.put("InvocationId", invId);
}
+ /**
+ * Sends error as response with error code input.
+ */
public static void sendResponseError(HttpServletResponse response, int errorCode, EELFLogger intlogger) {
try {
response.sendError(errorCode);
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("IOException", ioe);
}
}
/**
- * Method to check to see if file is of type gzip
+ * Method to check to see if file is of type gzip.
*
- * @param file The name of the file to be checked
- * @return True if the file is of type gzip
+ * @param file The name of the file to be checked
+ * @return True if the file is of type gzip
*/
- public static boolean isFiletypeGzip(File file){
- try(FileInputStream fileInputStream = new FileInputStream(file);
- GZIPInputStream gzip = new GZIPInputStream(fileInputStream)) {
+ public static boolean isFiletypeGzip(File file) {
+ try (FileInputStream fileInputStream = new FileInputStream(file);
+ GZIPInputStream gzip = new GZIPInputStream(fileInputStream)) {
return true;
- }catch (IOException e){
+ } catch (IOException e) {
eelfLogger.error("NODE0403 " + file.toString() + " Not in gzip(gz) format: " + e.toString() + e);
return false;
}
}
+ private static boolean loadKeyStore(String ksfile, String kspass, KeyStore ks)
+ throws NoSuchAlgorithmException, CertificateException {
+ try (FileInputStream fileInputStream = new FileInputStream(ksfile)) {
+ ks.load(fileInputStream, kspass.toCharArray());
+ } catch (IOException ioException) {
+ eelfLogger.error("IOException occurred while opening FileInputStream: " + ioException.getMessage(),
+ ioException);
+ return true;
+ }
+ return false;
+ }
+
+
+ private static String getNameFromSubject(KeyStore ks, Enumeration<String> aliases) throws KeyStoreException {
+ String alias = aliases.nextElement();
+ if (ks.entryInstanceOf(alias, KeyStore.PrivateKeyEntry.class)) {
+ X509Certificate cert = (X509Certificate) ks.getCertificate(alias);
+ if (cert != null) {
+ String subject = cert.getSubjectX500Principal().getName();
+ String[] parts = subject.split(",");
+ if (parts.length < 1) {
+ return null;
+ }
+ subject = parts[5].trim();
+ if (!subject.startsWith("CN=")) {
+ return null;
+ }
+ return subject.substring(3);
+ }
+ }
+ return null;
+ }
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java
index fec2ca39..fe3fdb6e 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathFinder.java
@@ -24,43 +24,70 @@
package org.onap.dmaap.datarouter.node;
+import java.util.ArrayList;
+import java.util.HashMap;
import java.util.HashSet;
-import java.util.Hashtable;
-import java.util.Vector;
+import org.jetbrains.annotations.Nullable;
+import org.onap.dmaap.datarouter.node.NodeConfig.ProvHop;
/**
* Given a set of node names and next hops, identify and ignore any cycles and figure out the sequence of next hops to
- * get from this node to any other node
+ * get from this node to any other node.
*/
-public class PathFinder {
+class PathFinder {
- private static class Hop {
+ private ArrayList<String> errors = new ArrayList<>();
+ private HashMap<String, String> routes = new HashMap<>();
- boolean mark;
- boolean bad;
- NodeConfig.ProvHop basis;
+ /**
+ * Find routes from a specified origin to all of the nodes given a set of specified next hops.
+ *
+ * @param origin where we start
+ * @param nodes where we can go
+ * @param hops detours along the way
+ */
+ PathFinder(String origin, String[] nodes, NodeConfig.ProvHop[] hops) {
+ HashSet<String> known = new HashSet<>();
+ HashMap<String, HashMap<String, Hop>> ht = new HashMap<>();
+ for (String n : nodes) {
+ known.add(n);
+ ht.put(n, new HashMap<>());
+ }
+ for (NodeConfig.ProvHop ph : hops) {
+ Hop hop = getHop(known, ht, ph);
+ if (hop == null) {
+ continue;
+ }
+ if (ph.getVia().equals(ph.getTo())) {
+ errors.add(ph + " gives destination as via");
+ hop.bad = true;
+ }
+ }
+ for (String n : known) {
+ if (n.equals(origin)) {
+ routes.put(n, "");
+ }
+ routes.put(n, plot(origin, n, ht.get(n)) + "/");
+ }
}
- private Vector<String> errors = new Vector<String>();
- private Hashtable<String, String> routes = new Hashtable<String, String>();
-
/**
- * Get list of errors encountered while finding paths
+ * Get list of errors encountered while finding paths.
*
* @return array of error descriptions
*/
- public String[] getErrors() {
- return (errors.toArray(new String[errors.size()]));
+ String[] getErrors() {
+ return (errors.toArray(new String[0]));
}
/**
- * Get the route from this node to the specified node
+ * Get the route from this node to the specified node.
*
* @param destination node
* @return list of node names separated by and ending with "/"
*/
- public String getPath(String destination) {
+ String getPath(String destination) {
String ret = routes.get(destination);
if (ret == null) {
return ("");
@@ -68,13 +95,12 @@ public class PathFinder {
return (ret);
}
- private String plot(String from, String to, Hashtable<String, Hop> info) {
+ private String plot(String from, String to, HashMap<String, Hop> info) {
Hop nh = info.get(from);
if (nh == null || nh.bad) {
return (to);
}
if (nh.mark) {
- // loop detected;
while (!nh.bad) {
nh.bad = true;
errors.add(nh.basis + " is part of a cycle");
@@ -83,63 +109,46 @@ public class PathFinder {
return (to);
}
nh.mark = true;
- String x = plot(nh.basis.getVia(), to, info);
+ String route = plot(nh.basis.getVia(), to, info);
nh.mark = false;
if (nh.bad) {
return (to);
}
- return (nh.basis.getVia() + "/" + x);
+ return (nh.basis.getVia() + "/" + route);
}
- /**
- * Find routes from a specified origin to all of the nodes given a set of specified next hops.
- *
- * @param origin where we start
- * @param nodes where we can go
- * @param hops detours along the way
- */
- public PathFinder(String origin, String[] nodes, NodeConfig.ProvHop[] hops) {
- HashSet<String> known = new HashSet<String>();
- Hashtable<String, Hashtable<String, Hop>> ht = new Hashtable<String, Hashtable<String, Hop>>();
- for (String n : nodes) {
- known.add(n);
- ht.put(n, new Hashtable<String, Hop>());
+ @Nullable
+ private Hop getHop(HashSet<String> known, HashMap<String, HashMap<String, Hop>> ht, ProvHop ph) {
+ if (!known.contains(ph.getFrom())) {
+ errors.add(ph + " references unknown from node");
+ return null;
}
- for (NodeConfig.ProvHop ph : hops) {
- if (!known.contains(ph.getFrom())) {
- errors.add(ph + " references unknown from node");
- continue;
- }
- if (!known.contains(ph.getTo())) {
- errors.add(ph + " references unknown destination node");
- continue;
- }
- Hashtable<String, Hop> ht2 = ht.get(ph.getTo());
- Hop h = ht2.get(ph.getFrom());
- if (h != null) {
- h.bad = true;
- errors.add(ph + " gives duplicate next hop - previous via was " + h.basis.getVia());
- continue;
- }
- h = new Hop();
- h.basis = ph;
- ht2.put(ph.getFrom(), h);
- if (!known.contains(ph.getVia())) {
- errors.add(ph + " references unknown via node");
- h.bad = true;
- continue;
- }
- if (ph.getVia().equals(ph.getTo())) {
- errors.add(ph + " gives destination as via");
- h.bad = true;
- continue;
- }
+ if (!known.contains(ph.getTo())) {
+ errors.add(ph + " references unknown destination node");
+ return null;
}
- for (String n : known) {
- if (n.equals(origin)) {
- routes.put(n, "");
- }
- routes.put(n, plot(origin, n, ht.get(n)) + "/");
+ HashMap<String, Hop> ht2 = ht.get(ph.getTo());
+ Hop hop = ht2.get(ph.getFrom());
+ if (hop != null) {
+ hop.bad = true;
+ errors.add(ph + " gives duplicate next hop - previous via was " + hop.basis.getVia());
+ return null;
+ }
+ hop = new Hop();
+ hop.basis = ph;
+ ht2.put(ph.getFrom(), hop);
+ if (!known.contains(ph.getVia())) {
+ errors.add(ph + " references unknown via node");
+ hop.bad = true;
+ return null;
}
+ return hop;
+ }
+
+ private static class Hop {
+
+ boolean mark;
+ boolean bad;
+ NodeConfig.ProvHop basis;
}
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java
index a4034410..d67c9094 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PathUtil.java
@@ -1,67 +1,84 @@
-/**
- * -
+/*-
* ============LICENSE_START=======================================================
- * Copyright (C) 2019 Nordix Foundation.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
- * <p>
+ *
* SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+
package org.onap.dmaap.datarouter.node;
/**
- * FORTIFY SCAN FIXES
+ * FORTIFY SCAN FIXES.
* <p>This Utility is used for Fortify fixes. It Validates the path url formed from
- * the string passed in the request parameters.</p>
- *
+ * the string passed in the request parameters.</p>
*/
class PathUtil {
+ private PathUtil() {
+ throw new IllegalStateException("Utility Class");
+ }
+
/**
* This method takes String as the parameter and return the filtered path string.
- * @param aString String to clean
+ *
+ * @param string String to clean
* @return A cleaned String
*/
- static String cleanString(String aString) {
- if (aString == null) return null;
- String cleanString = "";
- for (int i = 0; i < aString.length(); ++i) {
- cleanString += cleanChar(aString.charAt(i));
+ static String cleanString(String string) {
+ if (string == null) {
+ return null;
+ }
+ StringBuilder cleanString = new StringBuilder();
+ for (int i = 0; i < string.length(); ++i) {
+ cleanString.append(cleanChar(string.charAt(i)));
}
- return cleanString;
+ return cleanString.toString();
}
/**
* This method filters the valid special characters in path string.
- * @param aChar The char to be cleaned
+ *
+ * @param character The char to be cleaned
* @return The cleaned char
*/
- private static char cleanChar(char aChar) {
+ private static char cleanChar(char character) {
// 0 - 9
for (int i = 48; i < 58; ++i) {
- if (aChar == i) return (char) i;
+ if (character == i) {
+ return (char) i;
+ }
}
// 'A' - 'Z'
for (int i = 65; i < 91; ++i) {
- if (aChar == i) return (char) i;
+ if (character == i) {
+ return (char) i;
+ }
}
// 'a' - 'z'
for (int i = 97; i < 123; ++i) {
- if (aChar == i) return (char) i;
+ if (character == i) {
+ return (char) i;
+ }
}
+ return getValidCharacter(character);
+ }
+
+ private static char getValidCharacter(char character) {
// other valid characters
- switch (aChar) {
+ switch (character) {
case '/':
return '/';
case '.':
@@ -82,7 +99,8 @@ class PathUtil {
return '_';
case ' ':
return ' ';
+ default:
+ return '%';
}
- return '%';
}
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java
index 1af7dda4..03e952c1 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java
@@ -24,22 +24,36 @@
package org.onap.dmaap.datarouter.node;
-import java.io.*;
-import java.util.*;
-
-import org.json.*;
-import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
-
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import org.jetbrains.annotations.Nullable;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.onap.dmaap.datarouter.node.NodeConfig.ProvFeed;
+import org.onap.dmaap.datarouter.node.NodeConfig.ProvFeedSubnet;
+import org.onap.dmaap.datarouter.node.NodeConfig.ProvFeedUser;
+import org.onap.dmaap.datarouter.node.NodeConfig.ProvForceEgress;
+import org.onap.dmaap.datarouter.node.NodeConfig.ProvForceIngress;
+import org.onap.dmaap.datarouter.node.NodeConfig.ProvHop;
+import org.onap.dmaap.datarouter.node.NodeConfig.ProvNode;
+import org.onap.dmaap.datarouter.node.NodeConfig.ProvParam;
+import org.onap.dmaap.datarouter.node.NodeConfig.ProvSubscription;
+import org.onap.dmaap.datarouter.node.eelf.EelfMsgs;
/**
* Parser for provisioning data from the provisioning server.
- * <p>
- * The ProvData class uses a Reader for the text configuration from the
- * provisioning server to construct arrays of raw configuration entries.
+ *
+ * <p>The ProvData class uses a Reader for the text configuration from the provisioning server to construct arrays of
+ * raw configuration entries.
*/
public class ProvData {
+
+ private static final String FEED_ID = "feedid";
private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(ProvData.class);
private NodeConfig.ProvNode[] pn;
private NodeConfig.ProvParam[] pp;
@@ -51,208 +65,39 @@ public class ProvData {
private NodeConfig.ProvForceEgress[] pfe;
private NodeConfig.ProvHop[] ph;
- private static String[] gvasa(JSONArray a, int index) {
- return (gvasa(a.get(index)));
- }
-
- private static String[] gvasa(JSONObject o, String key) {
- return (gvasa(o.opt(key)));
- }
-
- private static String[] gvasa(Object o) {
- if (o instanceof JSONArray) {
- JSONArray a = (JSONArray) o;
- Vector<String> v = new Vector<String>();
- for (int i = 0; i < a.length(); i++) {
- String s = gvas(a, i);
- if (s != null) {
- v.add(s);
- }
- }
- return (v.toArray(new String[v.size()]));
- } else {
- String s = gvas(o);
- if (s == null) {
- return (new String[0]);
- } else {
- return (new String[]{s});
- }
- }
- }
-
- private static String gvas(JSONArray a, int index) {
- return (gvas(a.get(index)));
- }
-
- private static String gvas(JSONObject o, String key) {
- return (gvas(o.opt(key)));
- }
-
- private static String gvas(Object o) {
- if (o instanceof Boolean || o instanceof Number || o instanceof String) {
- return (o.toString());
- }
- return (null);
- }
-
/**
- * Construct raw provisioing data entries from the text (JSON)
- * provisioning document received from the provisioning server
+ * Construct raw provisioing data entries from the text (JSON) provisioning document received from the provisioning
+ * server.
*
- * @param r The reader for the JSON text.
+ * @param reader The reader for the JSON text.
*/
- public ProvData(Reader r) throws IOException {
- Vector<NodeConfig.ProvNode> pnv = new Vector<NodeConfig.ProvNode>();
- Vector<NodeConfig.ProvParam> ppv = new Vector<NodeConfig.ProvParam>();
- Vector<NodeConfig.ProvFeed> pfv = new Vector<NodeConfig.ProvFeed>();
- Vector<NodeConfig.ProvFeedUser> pfuv = new Vector<NodeConfig.ProvFeedUser>();
- Vector<NodeConfig.ProvFeedSubnet> pfsnv = new Vector<NodeConfig.ProvFeedSubnet>();
- Vector<NodeConfig.ProvSubscription> psv = new Vector<NodeConfig.ProvSubscription>();
- Vector<NodeConfig.ProvForceIngress> pfiv = new Vector<NodeConfig.ProvForceIngress>();
- Vector<NodeConfig.ProvForceEgress> pfev = new Vector<NodeConfig.ProvForceEgress>();
- Vector<NodeConfig.ProvHop> phv = new Vector<NodeConfig.ProvHop>();
+ public ProvData(Reader reader) throws IOException {
+ ArrayList<ProvNode> pnv = new ArrayList<>();
+ ArrayList<NodeConfig.ProvParam> ppv = new ArrayList<>();
+ ArrayList<NodeConfig.ProvFeed> pfv = new ArrayList<>();
+ ArrayList<NodeConfig.ProvFeedUser> pfuv = new ArrayList<>();
+ ArrayList<NodeConfig.ProvFeedSubnet> pfsnv = new ArrayList<>();
+ ArrayList<NodeConfig.ProvSubscription> psv = new ArrayList<>();
+ ArrayList<NodeConfig.ProvForceIngress> pfiv = new ArrayList<>();
+ ArrayList<NodeConfig.ProvForceEgress> pfev = new ArrayList<>();
+ ArrayList<NodeConfig.ProvHop> phv = new ArrayList<>();
try {
- JSONTokener jtx = new JSONTokener(r);
+ JSONTokener jtx = new JSONTokener(reader);
JSONObject jcfg = new JSONObject(jtx);
char c = jtx.nextClean();
if (c != '\0') {
throw new JSONException("Spurious characters following configuration");
}
- r.close();
- JSONArray jfeeds = jcfg.optJSONArray("feeds");
- if (jfeeds != null) {
- for (int fx = 0; fx < jfeeds.length(); fx++) {
- JSONObject jfeed = jfeeds.getJSONObject(fx);
- String stat = null;
- if (jfeed.optBoolean("suspend", false)) {
- stat = "Feed is suspended";
- }
- if (jfeed.optBoolean("deleted", false)) {
- stat = "Feed is deleted";
- }
- String fid = gvas(jfeed, "feedid");
- String fname = gvas(jfeed, "name");
- String fver = gvas(jfeed, "version");
- String createdDate = gvas(jfeed, "created_date");
- /*
- * START - AAF changes
- * TDP EPIC US# 307413
- * Passing aafInstance to ProvFeed from feeds json passed by prov to identify legacy/AAF feeds
- */
- String aafInstance = gvas(jfeed, "aaf_instance");
- pfv.add(new NodeConfig.ProvFeed(fid, fname + "//" + fver, stat,createdDate, aafInstance));
- /*
- * END - AAF changes
- */
- JSONObject jauth = jfeed.optJSONObject("authorization");
- if (jauth == null) {
- continue;
- }
- JSONArray jeids = jauth.optJSONArray("endpoint_ids");
- if (jeids != null) {
- for (int ux = 0; ux < jeids.length(); ux++) {
- JSONObject ju = jeids.getJSONObject(ux);
- String login = gvas(ju, "id");
- String password = gvas(ju, "password");
- pfuv.add(new NodeConfig.ProvFeedUser(fid, login, NodeUtils.getAuthHdr(login, password)));
- }
- }
- JSONArray jeips = jauth.optJSONArray("endpoint_addrs");
- if (jeips != null) {
- for (int ix = 0; ix < jeips.length(); ix++) {
- String sn = gvas(jeips, ix);
- pfsnv.add(new NodeConfig.ProvFeedSubnet(fid, sn));
- }
- }
- }
- }
- JSONArray jsubs = jcfg.optJSONArray("subscriptions");
- if (jsubs != null) {
- for (int sx = 0; sx < jsubs.length(); sx++) {
- JSONObject jsub = jsubs.getJSONObject(sx);
- if (jsub.optBoolean("suspend", false)) {
- continue;
- }
- String sid = gvas(jsub, "subid");
- String fid = gvas(jsub, "feedid");
- JSONObject jdel = jsub.getJSONObject("delivery");
- String delurl = gvas(jdel, "url");
- String id = gvas(jdel, "user");
- String password = gvas(jdel, "password");
- boolean monly = jsub.getBoolean("metadataOnly");
- boolean use100 = jdel.getBoolean("use100");
- boolean privilegedSubscriber = jsub.getBoolean("privilegedSubscriber");
- boolean decompress = jsub.getBoolean("decompress");
- boolean followRedirect = jsub.getBoolean("follow_redirect");
- psv.add(new NodeConfig.ProvSubscription(sid, fid, delurl, id, NodeUtils.getAuthHdr(id, password), monly, use100, privilegedSubscriber, followRedirect, decompress));
- }
- }
- JSONObject jparams = jcfg.optJSONObject("parameters");
- if (jparams != null) {
- for (String pname : JSONObject.getNames(jparams)) {
- String pvalue = gvas(jparams, pname);
- if (pvalue != null) {
- ppv.add(new NodeConfig.ProvParam(pname, pvalue));
- }
- }
- String sfx = gvas(jparams, "PROV_DOMAIN");
- JSONArray jnodes = jparams.optJSONArray("NODES");
- if (jnodes != null) {
- for (int nx = 0; nx < jnodes.length(); nx++) {
- String nn = gvas(jnodes, nx);
- if (nn.indexOf('.') == -1) {
- nn = nn + "." + sfx;
- }
- pnv.add(new NodeConfig.ProvNode(nn));
- }
- }
- }
- JSONArray jingresses = jcfg.optJSONArray("ingress");
- if (jingresses != null) {
- for (int fx = 0; fx < jingresses.length(); fx++) {
- JSONObject jingress = jingresses.getJSONObject(fx);
- String fid = gvas(jingress, "feedid");
- String subnet = gvas(jingress, "subnet");
- String user = gvas(jingress, "user");
- String[] nodes = gvasa(jingress, "node");
- if (fid == null || "".equals(fid)) {
- continue;
- }
- if ("".equals(subnet)) {
- subnet = null;
- }
- if ("".equals(user)) {
- user = null;
- }
- pfiv.add(new NodeConfig.ProvForceIngress(fid, subnet, user, nodes));
- }
- }
- JSONObject jegresses = jcfg.optJSONObject("egress");
- if (jegresses != null && JSONObject.getNames(jegresses) != null) {
- for (String esid : JSONObject.getNames(jegresses)) {
- String enode = gvas(jegresses, esid);
- if (esid != null && enode != null && !"".equals(esid) && !"".equals(enode)) {
- pfev.add(new NodeConfig.ProvForceEgress(esid, enode));
- }
- }
- }
- JSONArray jhops = jcfg.optJSONArray("routing");
- if (jhops != null) {
- for (int fx = 0; fx < jhops.length(); fx++) {
- JSONObject jhop = jhops.getJSONObject(fx);
- String from = gvas(jhop, "from");
- String to = gvas(jhop, "to");
- String via = gvas(jhop, "via");
- if (from == null || to == null || via == null || "".equals(from) || "".equals(to) || "".equals(via)) {
- continue;
- }
- phv.add(new NodeConfig.ProvHop(from, to, via));
- }
- }
+ reader.close();
+ addJSONFeeds(pfv, pfuv, pfsnv, jcfg);
+ addJSONSubs(psv, jcfg);
+ addJSONParams(pnv, ppv, jcfg);
+ addJSONRoutingInformation(pfiv, pfev, phv, jcfg);
} catch (JSONException jse) {
NodeUtils.setIpAndFqdnForEelf("ProvData");
eelfLogger.error(EelfMsgs.MESSAGE_PARSING_ERROR, jse.toString());
- eelfLogger.error("NODE0201 Error parsing configuration data from provisioning server " + jse.toString(), jse);
+ eelfLogger
+ .error("NODE0201 Error parsing configuration data from provisioning server " + jse.toString(), jse);
throw new IOException(jse.toString(), jse);
}
pn = pnv.toArray(new NodeConfig.ProvNode[pnv.size()]);
@@ -266,66 +111,294 @@ public class ProvData {
ph = phv.toArray(new NodeConfig.ProvHop[phv.size()]);
}
+ private static String[] gvasa(JSONObject object, String key) {
+ return (gvasa(object.opt(key)));
+ }
+
+ private static String[] gvasa(Object object) {
+ if (object instanceof JSONArray) {
+ JSONArray jsonArray = (JSONArray) object;
+ ArrayList<String> array = new ArrayList<>();
+ for (int i = 0; i < jsonArray.length(); i++) {
+ String string = gvas(jsonArray, i);
+ if (string != null) {
+ array.add(string);
+ }
+ }
+ return (array.toArray(new String[array.size()]));
+ } else {
+ String string = gvas(object);
+ if (string == null) {
+ return (new String[0]);
+ } else {
+ return (new String[]{string});
+ }
+ }
+ }
+
+ private static String gvas(JSONArray array, int index) {
+ return (gvas(array.get(index)));
+ }
+
+ private static String gvas(JSONObject object, String key) {
+ return (gvas(object.opt(key)));
+ }
+
+ private static String gvas(Object object) {
+ if (object instanceof Boolean || object instanceof Number || object instanceof String) {
+ return (object.toString());
+ }
+ return (null);
+ }
+
/**
- * Get the raw node configuration entries
+ * Get the raw node configuration entries.
*/
public NodeConfig.ProvNode[] getNodes() {
return (pn);
}
/**
- * Get the raw parameter configuration entries
+ * Get the raw parameter configuration entries.
*/
public NodeConfig.ProvParam[] getParams() {
return (pp);
}
/**
- * Ge the raw feed configuration entries
+ * Ge the raw feed configuration entries.
*/
public NodeConfig.ProvFeed[] getFeeds() {
return (pf);
}
/**
- * Get the raw feed user configuration entries
+ * Get the raw feed user configuration entries.
*/
public NodeConfig.ProvFeedUser[] getFeedUsers() {
return (pfu);
}
/**
- * Get the raw feed subnet configuration entries
+ * Get the raw feed subnet configuration entries.
*/
public NodeConfig.ProvFeedSubnet[] getFeedSubnets() {
return (pfsn);
}
/**
- * Get the raw subscription entries
+ * Get the raw subscription entries.
*/
public NodeConfig.ProvSubscription[] getSubscriptions() {
return (ps);
}
/**
- * Get the raw forced ingress entries
+ * Get the raw forced ingress entries.
*/
public NodeConfig.ProvForceIngress[] getForceIngress() {
return (pfi);
}
/**
- * Get the raw forced egress entries
+ * Get the raw forced egress entries.
*/
public NodeConfig.ProvForceEgress[] getForceEgress() {
return (pfe);
}
/**
- * Get the raw next hop entries
+ * Get the raw next hop entries.
*/
public NodeConfig.ProvHop[] getHops() {
return (ph);
}
+
+ @Nullable
+ private String getFeedStatus(JSONObject jfeed) {
+ String stat = null;
+ if (jfeed.optBoolean("suspend", false)) {
+ stat = "Feed is suspended";
+ }
+ if (jfeed.optBoolean("deleted", false)) {
+ stat = "Feed is deleted";
+ }
+ return stat;
+ }
+
+ private void addJSONFeeds(ArrayList<ProvFeed> pfv, ArrayList<ProvFeedUser> pfuv, ArrayList<ProvFeedSubnet> pfsnv,
+ JSONObject jcfg) {
+ JSONArray jfeeds = jcfg.optJSONArray("feeds");
+ if (jfeeds != null) {
+ for (int fx = 0; fx < jfeeds.length(); fx++) {
+ addJSONFeed(pfv, pfuv, pfsnv, jfeeds, fx);
+ }
+ }
+ }
+
+ private void addJSONFeed(ArrayList<ProvFeed> pfv, ArrayList<ProvFeedUser> pfuv, ArrayList<ProvFeedSubnet> pfsnv,
+ JSONArray jfeeds, int fx) {
+ JSONObject jfeed = jfeeds.getJSONObject(fx);
+ String stat = getFeedStatus(jfeed);
+ String fid = gvas(jfeed, FEED_ID);
+ String fname = gvas(jfeed, "name");
+ String fver = gvas(jfeed, "version");
+ String createdDate = gvas(jfeed, "created_date");
+ /*
+ * START - AAF changes
+ * TDP EPIC US# 307413
+ * Passing aafInstance to ProvFeed from feeds json passed by prov to identify legacy/AAF feeds
+ */
+ String aafInstance = gvas(jfeed, "aaf_instance");
+ pfv.add(new ProvFeed(fid, fname + "//" + fver, stat, createdDate, aafInstance));
+ /*
+ * END - AAF changes
+ */
+ addJSONFeedAuthArrays(pfuv, pfsnv, jfeed, fid);
+ }
+
+ private void addJSONFeedAuthArrays(ArrayList<ProvFeedUser> pfuv, ArrayList<ProvFeedSubnet> pfsnv, JSONObject jfeed,
+ String fid) {
+ JSONObject jauth = jfeed.optJSONObject("authorization");
+ if (jauth == null) {
+ return;
+ }
+ JSONArray jeids = jauth.optJSONArray("endpoint_ids");
+ if (jeids != null) {
+ for (int ux = 0; ux < jeids.length(); ux++) {
+ JSONObject ju = jeids.getJSONObject(ux);
+ String login = gvas(ju, "id");
+ String password = gvas(ju, "password");
+ pfuv.add(new ProvFeedUser(fid, login, NodeUtils.getAuthHdr(login, password)));
+ }
+ }
+ JSONArray jeips = jauth.optJSONArray("endpoint_addrs");
+ if (jeips != null) {
+ for (int ix = 0; ix < jeips.length(); ix++) {
+ String sn = gvas(jeips, ix);
+ pfsnv.add(new ProvFeedSubnet(fid, sn));
+ }
+ }
+ }
+
+ private void addJSONSubs(ArrayList<ProvSubscription> psv, JSONObject jcfg) {
+ JSONArray jsubs = jcfg.optJSONArray("subscriptions");
+ if (jsubs != null) {
+ for (int sx = 0; sx < jsubs.length(); sx++) {
+ addJSONSub(psv, jsubs, sx);
+ }
+ }
+ }
+
+ private void addJSONSub(ArrayList<ProvSubscription> psv, JSONArray jsubs, int sx) {
+ JSONObject jsub = jsubs.getJSONObject(sx);
+ if (jsub.optBoolean("suspend", false)) {
+ return;
+ }
+ String sid = gvas(jsub, "subid");
+ String fid = gvas(jsub, FEED_ID);
+ JSONObject jdel = jsub.getJSONObject("delivery");
+ String delurl = gvas(jdel, "url");
+ String id = gvas(jdel, "user");
+ String password = gvas(jdel, "password");
+ boolean monly = jsub.getBoolean("metadataOnly");
+ boolean use100 = jdel.getBoolean("use100");
+ boolean privilegedSubscriber = jsub.getBoolean("privilegedSubscriber");
+ boolean decompress = jsub.getBoolean("decompress");
+ boolean followRedirect = jsub.getBoolean("follow_redirect");
+ psv.add(new ProvSubscription(sid, fid, delurl, id, NodeUtils.getAuthHdr(id, password), monly, use100,
+ privilegedSubscriber, followRedirect, decompress));
+ }
+
+ private void addJSONParams(ArrayList<ProvNode> pnv, ArrayList<ProvParam> ppv, JSONObject jcfg) {
+ JSONObject jparams = jcfg.optJSONObject("parameters");
+ if (jparams != null) {
+ for (String pname : JSONObject.getNames(jparams)) {
+ addJSONParam(ppv, jparams, pname);
+ }
+ addJSONNodesToParams(pnv, jparams);
+ }
+ }
+
+ private void addJSONParam(ArrayList<ProvParam> ppv, JSONObject jparams, String pname) {
+ String pvalue = gvas(jparams, pname);
+ if (pvalue != null) {
+ ppv.add(new ProvParam(pname, pvalue));
+ }
+ }
+
+ private void addJSONNodesToParams(ArrayList<ProvNode> pnv, JSONObject jparams) {
+ String sfx = gvas(jparams, "PROV_DOMAIN");
+ JSONArray jnodes = jparams.optJSONArray("NODES");
+ if (jnodes != null) {
+ for (int nx = 0; nx < jnodes.length(); nx++) {
+ String nn = gvas(jnodes, nx);
+ if (nn == null) {
+ continue;
+ }
+ if (nn.indexOf('.') == -1) {
+ nn = nn + "." + sfx;
+ }
+ pnv.add(new ProvNode(nn));
+ }
+ }
+ }
+
+ private void addJSONRoutingInformation(ArrayList<ProvForceIngress> pfiv, ArrayList<ProvForceEgress> pfev,
+ ArrayList<ProvHop> phv, JSONObject jcfg) {
+ JSONArray jingresses = jcfg.optJSONArray("ingress");
+ if (jingresses != null) {
+ for (int fx = 0; fx < jingresses.length(); fx++) {
+ addJSONIngressRoute(pfiv, jingresses, fx);
+ }
+ }
+ JSONObject jegresses = jcfg.optJSONObject("egress");
+ if (jegresses != null && JSONObject.getNames(jegresses) != null) {
+ for (String esid : JSONObject.getNames(jegresses)) {
+ addJSONEgressRoute(pfev, jegresses, esid);
+ }
+ }
+ JSONArray jhops = jcfg.optJSONArray("routing");
+ if (jhops != null) {
+ for (int fx = 0; fx < jhops.length(); fx++) {
+ addJSONRoutes(phv, jhops, fx);
+ }
+ }
+ }
+
+ private void addJSONIngressRoute(ArrayList<ProvForceIngress> pfiv, JSONArray jingresses, int fx) {
+ JSONObject jingress = jingresses.getJSONObject(fx);
+ String fid = gvas(jingress, FEED_ID);
+ String subnet = gvas(jingress, "subnet");
+ String user = gvas(jingress, "user");
+ if (fid == null || "".equals(fid)) {
+ return;
+ }
+ if ("".equals(subnet)) {
+ subnet = null;
+ }
+ if ("".equals(user)) {
+ user = null;
+ }
+ String[] nodes = gvasa(jingress, "node");
+ pfiv.add(new ProvForceIngress(fid, subnet, user, nodes));
+ }
+
+ private void addJSONEgressRoute(ArrayList<ProvForceEgress> pfev, JSONObject jegresses, String esid) {
+ String enode = gvas(jegresses, esid);
+ if (esid != null && enode != null && !"".equals(esid) && !"".equals(enode)) {
+ pfev.add(new ProvForceEgress(esid, enode));
+ }
+ }
+
+ private void addJSONRoutes(ArrayList<ProvHop> phv, JSONArray jhops, int fx) {
+ JSONObject jhop = jhops.getJSONObject(fx);
+ String from = gvas(jhop, "from");
+ String to = gvas(jhop, "to");
+ String via = gvas(jhop, "via");
+ if (from == null || to == null || via == null || "".equals(from) || "".equals(to) || "".equals(via)) {
+ return;
+ }
+ phv.add(new ProvHop(from, to, via));
+ }
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PublishId.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PublishId.java
index 3d4908e8..d1d2abb3 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PublishId.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/PublishId.java
@@ -25,14 +25,15 @@
package org.onap.dmaap.datarouter.node;
/**
- * Generate publish IDs
+ * Generate publish IDs.
*/
public class PublishId {
+
private long nextuid;
private String myname;
/**
- * Generate publish IDs for the specified name
+ * Generate publish IDs for the specified name.
*
* @param myname Unique identifier for this publish ID generator (usually fqdn of server)
*/
@@ -41,7 +42,8 @@ public class PublishId {
}
/**
- * Generate a Data Router Publish ID that uniquely identifies the particular invocation of the Publish API for log correlation purposes.
+ * Generate a Data Router Publish ID that uniquely identifies the particular invocation of the Publish API for log
+ * correlation purposes.
*/
public synchronized String next() {
long now = System.currentTimeMillis();
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/RateLimitedOperation.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/RateLimitedOperation.java
index 42af8ca0..02704553 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/RateLimitedOperation.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/RateLimitedOperation.java
@@ -24,13 +24,15 @@
package org.onap.dmaap.datarouter.node;
-import java.util.*;
+import java.util.Timer;
+import java.util.TimerTask;
/**
- * Execute an operation no more frequently than a specified interval
+ * Execute an operation no more frequently than a specified interval.
*/
public abstract class RateLimitedOperation implements Runnable {
+
private boolean marked; // a timer task exists
private boolean executing; // the operation is currently in progress
private boolean remark; // a request was made while the operation was in progress
@@ -39,33 +41,19 @@ public abstract class RateLimitedOperation implements Runnable {
private long mininterval;
/**
- * Create a rate limited operation
+ * Create a rate limited operation.
*
- * @param mininterval The minimum number of milliseconds after the last execution starts before a new execution can begin
- * @param timer The timer used to perform deferred executions
+ * @param mininterval The minimum number of milliseconds after the last execution starts before a new execution can
+ * begin
+ * @param timer The timer used to perform deferred executions
*/
public RateLimitedOperation(long mininterval, Timer timer) {
this.timer = timer;
this.mininterval = mininterval;
}
- private class deferred extends TimerTask {
- public void run() {
- execute();
- }
- }
-
- private synchronized void unmark() {
- marked = false;
- }
-
- private void execute() {
- unmark();
- request();
- }
-
/**
- * Request that the operation be performed by this thread or at a later time by the timer
+ * Request that the operation be performed by this thread or at a later time by the timer.
*/
public void request() {
if (premark()) {
@@ -73,7 +61,8 @@ public abstract class RateLimitedOperation implements Runnable {
}
do {
run();
- } while (demark());
+ }
+ while (demark());
}
private synchronized boolean premark() {
@@ -90,7 +79,7 @@ public abstract class RateLimitedOperation implements Runnable {
if (last + mininterval > now) {
// too soon - schedule a timer
marked = true;
- timer.schedule(new deferred(), last + mininterval - now);
+ timer.schedule(new Deferred(), last + mininterval - now);
return (true);
}
last = now;
@@ -107,4 +96,20 @@ public abstract class RateLimitedOperation implements Runnable {
}
return (false);
}
+
+ private class Deferred extends TimerTask {
+
+ public void run() {
+ execute();
+ }
+
+ private void execute() {
+ unmark();
+ request();
+ }
+
+ private synchronized void unmark() {
+ marked = false;
+ }
+ }
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/RedirManager.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/RedirManager.java
index 7e4078f8..b4a3f0a7 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/RedirManager.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/RedirManager.java
@@ -24,101 +24,100 @@
package org.onap.dmaap.datarouter.node;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import java.io.BufferedReader;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.OutputStream;
-import java.util.Hashtable;
+import java.util.HashMap;
+import java.util.Map;
import java.util.Timer;
/**
- * Track redirections of subscriptions
+ * Track redirections of subscriptions.
*/
-public class RedirManager {
+class RedirManager {
- private Hashtable<String, String> sid2primary = new Hashtable<String, String>();
- private Hashtable<String, String> sid2secondary = new Hashtable<String, String>();
+ private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(RedirManager.class);
+ private RateLimitedOperation op;
+ private HashMap<String, String> sid2primary = new HashMap<>();
+ private HashMap<String, String> sid2secondary = new HashMap<>();
private String redirfile;
- RateLimitedOperation op;
/**
* Create a mechanism for maintaining subscription redirections.
*
* @param redirfile The file to store the redirection information.
- * @param mininterval The minimum number of milliseconds between writes to the redirection
- * information file.
+ * @param mininterval The minimum number of milliseconds between writes to the redirection information file.
* @param timer The timer thread used to run delayed file writes.
*/
- public RedirManager(String redirfile, long mininterval, Timer timer) {
+ RedirManager(String redirfile, long mininterval, Timer timer) {
this.redirfile = redirfile;
op = new RateLimitedOperation(mininterval, timer) {
public void run() {
try {
- StringBuffer sb = new StringBuffer();
- for (String s : sid2primary.keySet()) {
- sb.append(s).append(' ').append(sid2primary.get(s)).append(' ')
- .append(sid2secondary.get(s)).append('\n');
+ StringBuilder sb = new StringBuilder();
+ for (Map.Entry<String, String> entry : sid2primary.entrySet()) {
+ String key = entry.getKey();
+ String value = entry.getValue();
+ sb.append(key).append(' ').append(value).append(' ')
+ .append(sid2secondary.get(key)).append('\n');
}
try (OutputStream os = new FileOutputStream(RedirManager.this.redirfile)) {
os.write(sb.toString().getBytes());
}
} catch (Exception e) {
+ eelfLogger.error("Exception", e);
}
}
};
try {
- String s;
+ String line;
try (BufferedReader br = new BufferedReader(new FileReader(redirfile))) {
- while ((s = br.readLine()) != null) {
- s = s.trim();
- String[] sx = s.split(" ");
- if (s.startsWith("#") || sx.length != 3) {
- continue;
- }
- sid2primary.put(sx[0], sx[1]);
- sid2secondary.put(sx[0], sx[2]);
+ while ((line = br.readLine()) != null) {
+ addSubRedirInfo(line);
}
}
} catch (Exception e) {
- // missing file is normal
+ eelfLogger.debug("Missing file is normal", e);
}
}
/**
- * Set up redirection. If a request is to be sent to subscription ID sid, and that is
- * configured to go to URL primary, instead, go to secondary.
+ * Set up redirection. If a request is to be sent to subscription ID sid, and that is configured to go to URL
+ * primary, instead, go to secondary.
*
* @param sid The subscription ID to be redirected
* @param primary The URL associated with that subscription ID
* @param secondary The replacement URL to use instead
*/
- public synchronized void redirect(String sid, String primary, String secondary) {
+ synchronized void redirect(String sid, String primary, String secondary) {
sid2primary.put(sid, primary);
sid2secondary.put(sid, secondary);
op.request();
}
/**
- * Cancel redirection. If a request is to be sent to subscription ID sid, send it to its
- * primary URL.
+ * Cancel redirection. If a request is to be sent to subscription ID sid, send it to its primary URL.
*
* @param sid The subscription ID to remove from the table.
*/
- public synchronized void forget(String sid) {
+ synchronized void forget(String sid) {
sid2primary.remove(sid);
sid2secondary.remove(sid);
op.request();
}
/**
- * Look up where to send a subscription. If the primary has changed or there is no redirection,
- * use the primary. Otherwise, redirect to the secondary URL.
+ * Look up where to send a subscription. If the primary has changed or there is no redirection, use the primary.
+ * Otherwise, redirect to the secondary URL.
*
* @param sid The subscription ID to look up.
* @param primary The configured primary URL.
* @return The destination URL to really use.
*/
- public synchronized String lookup(String sid, String primary) {
+ synchronized String lookup(String sid, String primary) {
String oprim = sid2primary.get(sid);
if (primary.equals(oprim)) {
return (sid2secondary.get(sid));
@@ -129,9 +128,19 @@ public class RedirManager {
}
/**
- * Is a subscription redirected?
+ * Is a subscription redirected.
*/
- public synchronized boolean isRedirected(String sid) {
+ synchronized boolean isRedirected(String sid) {
return (sid != null && sid2secondary.get(sid) != null);
}
+
+ private void addSubRedirInfo(String subRedirInfo) {
+ subRedirInfo = subRedirInfo.trim();
+ String[] sx = subRedirInfo.split(" ");
+ if (subRedirInfo.startsWith("#") || sx.length != 3) {
+ return;
+ }
+ sid2primary.put(sx[0], sx[1]);
+ sid2secondary.put(sx[0], sx[2]);
+ }
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/StatusLog.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/StatusLog.java
index c8a7bd0c..53e53145 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/StatusLog.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/StatusLog.java
@@ -23,19 +23,29 @@
package org.onap.dmaap.datarouter.node;
-import java.util.regex.*;
-import java.util.*;
-import java.io.*;
-import java.nio.file.*;
-import java.text.*;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
/**
- * Logging for data router delivery events (PUB/DEL/EXP)
+ * Logging for data router delivery events (PUB/DEL/EXP).
*/
public class StatusLog {
+
+ private static final String EXCEPTION = "Exception";
+ private static EELFLogger eelfLogger = EELFManager.getInstance().getLogger(StatusLog.class);
private static StatusLog instance = new StatusLog();
- private HashSet<String> toship = new HashSet<String>();
- private SimpleDateFormat filedate;
+ private SimpleDateFormat filedate = new SimpleDateFormat("-yyyyMMddHHmm");
+
private String prefix = "logs/events";
private String suffix = ".log";
private String plainfile;
@@ -43,89 +53,81 @@ public class StatusLog {
private long nexttime;
private OutputStream os;
private long intvl;
- private NodeConfigManager config = NodeConfigManager.getInstance();
+ private static NodeConfigManager config = NodeConfigManager.getInstance();
- {
- try {
- filedate = new SimpleDateFormat("-yyyyMMddHHmm");
- } catch (Exception e) {
- }
+ private StatusLog() {
}
/**
- * Parse an interval of the form xxhyymzzs and round it to the nearest whole fraction of 24 hours.If no units are specified, assume seconds.
+ * Parse an interval of the form xxhyymzzs and round it to the nearest whole fraction of 24 hours.If no units are
+ * specified, assume seconds.
*/
public static long parseInterval(String interval, int def) {
try {
- Matcher m = Pattern.compile("(?:(\\d+)[Hh])?(?:(\\d+)[Mm])?(?:(\\d+)[Ss]?)?").matcher(interval);
- if (m.matches()) {
- int dur = 0;
- String x = m.group(1);
- if (x != null) {
- dur += 3600 * Integer.parseInt(x);
- }
- x = m.group(2);
- if (x != null) {
- dur += 60 * Integer.parseInt(x);
- }
- x = m.group(3);
- if (x != null) {
- dur += Integer.parseInt(x);
- }
- if (dur < 60) {
- dur = 60;
- }
+ Matcher matcher = Pattern.compile("(?:(\\d+)[Hh])?(?:(\\d+)[Mm])?(?:(\\d+)[Ss]?)?").matcher(interval);
+ if (matcher.matches()) {
+ int dur = getDur(matcher);
int best = 86400;
int dist = best - dur;
if (dur > best) {
dist = dur - best;
}
- int base = 1;
- for (int i = 0; i < 8; i++) {
- int base2 = base;
- base *= 2;
- for (int j = 0; j < 4; j++) {
- int base3 = base2;
- base2 *= 3;
- for (int k = 0; k < 3; k++) {
- int cur = base3;
- base3 *= 5;
- int ndist = cur - dur;
- if (dur > cur) {
- ndist = dur - cur;
- }
- if (ndist < dist) {
- best = cur;
- dist = ndist;
- }
- }
- }
- }
+ best = getBest(dur, best, dist);
def = best * 1000;
}
} catch (Exception e) {
+ eelfLogger.error(EXCEPTION, e);
}
return (def);
}
- private synchronized void checkRoll(long now) throws IOException {
- if (now >= nexttime) {
- if (os != null) {
- os.close();
- os = null;
+ private static int getBest(int dur, int best, int dist) {
+ int base = 1;
+ for (int i = 0; i < 8; i++) {
+ int base2 = base;
+ base *= 2;
+ for (int j = 0; j < 4; j++) {
+ int base3 = base2;
+ base2 *= 3;
+ for (int k = 0; k < 3; k++) {
+ int cur = base3;
+ base3 *= 5;
+ int ndist = cur - dur;
+ if (dur > cur) {
+ ndist = dur - cur;
+ }
+ if (ndist < dist) {
+ best = cur;
+ dist = ndist;
+ }
+ }
}
- intvl = parseInterval(config.getEventLogInterval(), 300000);
- prefix = config.getEventLogPrefix();
- suffix = config.getEventLogSuffix();
- nexttime = now - now % intvl + intvl;
- curfile = prefix + filedate.format(new Date(nexttime - intvl)) + suffix;
- plainfile = prefix + suffix;
- notify();
}
+ return best;
+ }
+
+ private static int getDur(Matcher matcher) {
+ int dur = 0;
+ String match = matcher.group(1);
+ if (match != null) {
+ dur += 3600 * Integer.parseInt(match);
+ }
+ match = matcher.group(2);
+ if (match != null) {
+ dur += 60 * Integer.parseInt(match);
+ }
+ match = matcher.group(3);
+ if (match != null) {
+ dur += Integer.parseInt(match);
+ }
+ if (dur < 60) {
+ dur = 60;
+ }
+ return dur;
}
/**
- * Get the name of the current log file
+ * Get the name of the current log file.
*
* @return The full path name of the current event log file
*/
@@ -133,109 +135,107 @@ public class StatusLog {
try {
instance.checkRoll(System.currentTimeMillis());
} catch (Exception e) {
+ eelfLogger.error(EXCEPTION, e);
}
return (instance.curfile);
}
- private synchronized void log(String s) {
- try {
- long now = System.currentTimeMillis();
- checkRoll(now);
- if (os == null) {
- os = new FileOutputStream(curfile, true);
- (new File(plainfile)).delete();
- Files.createLink(Paths.get(plainfile), Paths.get(curfile));
- }
- os.write((NodeUtils.logts(new Date(now)) + '|' + s + '\n').getBytes());
- os.flush();
- } catch (IOException ioe) {
- }
- }
-
/**
* Log a received publication attempt.
*
- * @param pubid The publish ID assigned by the node
+ * @param pubid The publish ID assigned by the node
* @param feedid The feed id given by the publisher
* @param requrl The URL of the received request
* @param method The method (DELETE or PUT) in the received request
- * @param ctype The content type (if method is PUT and clen > 0)
- * @param clen The content length (if method is PUT)
- * @param srcip The IP address of the publisher
- * @param user The identity of the publisher
+ * @param ctype The content type (if method is PUT and clen > 0)
+ * @param clen The content length (if method is PUT)
+ * @param srcip The IP address of the publisher
+ * @param user The identity of the publisher
* @param status The status returned to the publisher
*/
- public static void logPub(String pubid, String feedid, String requrl, String method, String ctype, long clen, String srcip, String user, int status) {
- instance.log("PUB|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + "|" + srcip + "|" + user + "|" + status);
+ public static void logPub(String pubid, String feedid, String requrl, String method, String ctype, long clen,
+ String srcip, String user, int status) {
+ instance.log(
+ "PUB|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + "|" + srcip
+ + "|" + user + "|" + status);
}
/**
- * Log a data transfer error receiving a publication attempt
+ * Log a data transfer error receiving a publication attempt.
*
- * @param pubid The publish ID assigned by the node
+ * @param pubid The publish ID assigned by the node
* @param feedid The feed id given by the publisher
* @param requrl The URL of the received request
* @param method The method (DELETE or PUT) in the received request
- * @param ctype The content type (if method is PUT and clen > 0)
- * @param clen The expected content length (if method is PUT)
- * @param rcvd The content length received
- * @param srcip The IP address of the publisher
- * @param user The identity of the publisher
- * @param error The error message from the IO exception
+ * @param ctype The content type (if method is PUT and clen > 0)
+ * @param clen The expected content length (if method is PUT)
+ * @param rcvd The content length received
+ * @param srcip The IP address of the publisher
+ * @param user The identity of the publisher
+ * @param error The error message from the IO exception
*/
- public static void logPubFail(String pubid, String feedid, String requrl, String method, String ctype, long clen, long rcvd, String srcip, String user, String error) {
- instance.log("PBF|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + "|" + rcvd + "|" + srcip + "|" + user + "|" + error);
+ public static void logPubFail(String pubid, String feedid, String requrl, String method, String ctype, long clen,
+ long rcvd, String srcip, String user, String error) {
+ instance.log("PBF|" + pubid + "|" + feedid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + "|" + rcvd
+ + "|" + srcip + "|" + user + "|" + error);
}
/**
* Log a delivery attempt.
*
- * @param pubid The publish ID assigned by the node
+ * @param pubid The publish ID assigned by the node
* @param feedid The feed ID
- * @param subid The (space delimited list of) subscription ID
+ * @param subid The (space delimited list of) subscription ID
* @param requrl The URL used in the attempt
* @param method The method (DELETE or PUT) in the attempt
- * @param ctype The content type (if method is PUT, not metaonly, and clen > 0)
- * @param clen The content length (if PUT and not metaonly)
- * @param user The identity given to the subscriber
+ * @param ctype The content type (if method is PUT, not metaonly, and clen > 0)
+ * @param clen The content length (if PUT and not metaonly)
+ * @param user The identity given to the subscriber
* @param status The status returned by the subscriber or -1 if an exeception occured trying to connect
* @param xpubid The publish ID returned by the subscriber
*/
- public static void logDel(String pubid, String feedid, String subid, String requrl, String method, String ctype, long clen, String user, int status, String xpubid) {
+ public static void logDel(String pubid, String feedid, String subid, String requrl, String method, String ctype,
+ long clen, String user, int status, String xpubid) {
if (feedid == null) {
return;
}
- instance.log("DEL|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + "|" + user + "|" + status + "|" + xpubid);
+ instance.log(
+ "DEL|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen
+ + "|" + user + "|" + status + "|" + xpubid);
}
/**
- * Log delivery attempts expired
+ * Log delivery attempts expired.
*
- * @param pubid The publish ID assigned by the node
- * @param feedid The feed ID
- * @param subid The (space delimited list of) subscription ID
- * @param requrl The URL that would be delivered to
- * @param method The method (DELETE or PUT) in the request
- * @param ctype The content type (if method is PUT, not metaonly, and clen > 0)
- * @param clen The content length (if PUT and not metaonly)
- * @param reason The reason the attempts were discontinued
+ * @param pubid The publish ID assigned by the node
+ * @param feedid The feed ID
+ * @param subid The (space delimited list of) subscription ID
+ * @param requrl The URL that would be delivered to
+ * @param method The method (DELETE or PUT) in the request
+ * @param ctype The content type (if method is PUT, not metaonly, and clen > 0)
+ * @param clen The content length (if PUT and not metaonly)
+ * @param reason The reason the attempts were discontinued
* @param attempts The number of attempts made
*/
- public static void logExp(String pubid, String feedid, String subid, String requrl, String method, String ctype, long clen, String reason, int attempts) {
+ public static void logExp(String pubid, String feedid, String subid, String requrl, String method, String ctype,
+ long clen, String reason, int attempts) {
if (feedid == null) {
return;
}
- instance.log("EXP|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen + "|" + reason + "|" + attempts);
+ instance.log(
+ "EXP|" + pubid + "|" + feedid + "|" + subid + "|" + requrl + "|" + method + "|" + ctype + "|" + clen
+ + "|" + reason + "|" + attempts);
}
/**
* Log extra statistics about unsuccessful delivery attempts.
*
- * @param pubid The publish ID assigned by the node
+ * @param pubid The publish ID assigned by the node
* @param feedid The feed ID
- * @param subid The (space delimited list of) subscription ID
- * @param clen The content length
- * @param sent The # of bytes sent or -1 if subscriber returned an error instead of 100 Continue, otherwise, the number of bytes sent before an error occurred.
+ * @param subid The (space delimited list of) subscription ID
+ * @param clen The content length
+ * @param sent The # of bytes sent or -1 if subscriber returned an error instead of 100 Continue, otherwise, the
+ * number of bytes sent before an error occurred.
*/
public static void logDelExtra(String pubid, String feedid, String subid, long clen, long sent) {
if (feedid == null) {
@@ -244,6 +244,35 @@ public class StatusLog {
instance.log("DLX|" + pubid + "|" + feedid + "|" + subid + "|" + clen + "|" + sent);
}
- private StatusLog() {
+ private synchronized void checkRoll(long now) throws IOException {
+ if (now >= nexttime) {
+ if (os != null) {
+ os.close();
+ os = null;
+ }
+ intvl = parseInterval(config.getEventLogInterval(), 300000);
+ prefix = config.getEventLogPrefix();
+ suffix = config.getEventLogSuffix();
+ nexttime = now - now % intvl + intvl;
+ curfile = prefix + filedate.format(new Date(nexttime - intvl)) + suffix;
+ plainfile = prefix + suffix;
+ notify();
+ }
+ }
+
+ private synchronized void log(String string) {
+ try {
+ long now = System.currentTimeMillis();
+ checkRoll(now);
+ if (os == null) {
+ os = new FileOutputStream(curfile, true);
+ (new File(plainfile)).delete();
+ Files.createLink(Paths.get(plainfile), Paths.get(curfile));
+ }
+ os.write((NodeUtils.logts(new Date(now)) + '|' + string + '\n').getBytes());
+ os.flush();
+ } catch (IOException ioe) {
+ eelfLogger.error("IOException", ioe);
+ }
}
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java
index 6f74df48..2f510120 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/SubnetMatcher.java
@@ -25,33 +25,34 @@
package org.onap.dmaap.datarouter.node;
/**
- * Compare IP addresses as byte arrays to a subnet specified as a CIDR
+ * Compare IP addresses as byte arrays to a subnet specified as a CIDR.
*/
public class SubnetMatcher {
+
private byte[] sn;
private int len;
private int mask;
/**
- * Construct a subnet matcher given a CIDR
+ * Construct a subnet matcher given a CIDR.
*
* @param subnet The CIDR to match
*/
public SubnetMatcher(String subnet) {
- int i = subnet.lastIndexOf('/');
- if (i == -1) {
+ int index = subnet.lastIndexOf('/');
+ if (index == -1) {
sn = NodeUtils.getInetAddress(subnet);
len = sn.length;
} else {
- len = Integer.parseInt(subnet.substring(i + 1));
- sn = NodeUtils.getInetAddress(subnet.substring(0, i));
+ len = Integer.parseInt(subnet.substring(index + 1));
+ sn = NodeUtils.getInetAddress(subnet.substring(0, index));
mask = ((0xff00) >> (len % 8)) & 0xff;
len /= 8;
}
}
/**
- * Is the IP address in the CIDR?
+ * Is the IP address in the CIDR.
*
* @param addr the IP address as bytes in network byte order
* @return true if the IP address matches.
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Target.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Target.java
index eb10876e..475c876c 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Target.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/Target.java
@@ -25,17 +25,18 @@
package org.onap.dmaap.datarouter.node;
/**
- * A destination to deliver a message
+ * A destination to deliver a message.
*/
public class Target {
+
private DestInfo destinfo;
private String routing;
/**
- * A destination to deliver a message
+ * A destination to deliver a message.
*
* @param destinfo Either info for a subscription ID or info for a node-to-node transfer
- * @param routing For a node-to-node transfer, what to do when it gets there.
+ * @param routing For a node-to-node transfer, what to do when it gets there.
*/
public Target(DestInfo destinfo, String routing) {
this.destinfo = destinfo;
@@ -43,21 +44,21 @@ public class Target {
}
/**
- * Add additional routing
+ * Add additional routing.
*/
public void addRouting(String routing) {
this.routing = this.routing + " " + routing;
}
/**
- * Get the destination information for this target
+ * Get the destination information for this target.
*/
public DestInfo getDestInfo() {
return (destinfo);
}
/**
- * Get the next hop information for this target
+ * Get the next hop information for this target.
*/
public String getRouting() {
return (routing);
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/TaskList.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/TaskList.java
index 33e4f801..a77277f2 100644
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/TaskList.java
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/TaskList.java
@@ -24,63 +24,54 @@
package org.onap.dmaap.datarouter.node;
-import java.util.*;
+import java.util.HashSet;
+import java.util.Iterator;
/**
- * Manage a list of tasks to be executed when an event occurs.
- * This makes the following guarantees:
+ * Manage a list of tasks to be executed when an event occurs. This makes the following guarantees:
* <ul>
* <li>Tasks can be safely added and removed in the middle of a run.</li>
* <li>No task will be returned more than once during a run.</li>
* <li>No task will be returned when it is not, at that moment, in the list of tasks.</li>
* <li>At the moment when next() returns null, all tasks on the list have been returned during the run.</li>
- * <li>Initially and once next() returns null during a run, next() will continue to return null until startRun() is called.
+ * <li>Initially and once next() returns null during a run, next() will continue to return null until startRun() is
+ * called.
* </ul>
*/
-public class TaskList {
+class TaskList {
+
private Iterator<Runnable> runlist;
- private HashSet<Runnable> tasks = new HashSet<Runnable>();
+ private HashSet<Runnable> tasks = new HashSet<>();
private HashSet<Runnable> togo;
private HashSet<Runnable> sofar;
private HashSet<Runnable> added;
private HashSet<Runnable> removed;
/**
- * Construct a new TaskList
- */
- public TaskList() {
- }
-
- /**
* Start executing the sequence of tasks.
*/
- public synchronized void startRun() {
- sofar = new HashSet<Runnable>();
- added = new HashSet<Runnable>();
- removed = new HashSet<Runnable>();
- togo = new HashSet<Runnable>(tasks);
+ synchronized void startRun() {
+ sofar = new HashSet<>();
+ added = new HashSet<>();
+ removed = new HashSet<>();
+ togo = new HashSet<>(tasks);
runlist = togo.iterator();
}
/**
- * Get the next task to execute
+ * Get the next task to execute.
*/
- public synchronized Runnable next() {
+ synchronized Runnable next() {
while (runlist != null) {
if (runlist.hasNext()) {
Runnable task = runlist.next();
- if (removed.contains(task)) {
- continue;
+ if (addTaskToSoFar(task)) {
+ return task;
}
- if (sofar.contains(task)) {
- continue;
- }
- sofar.add(task);
- return (task);
}
- if (added.size() != 0) {
+ if (!added.isEmpty()) {
togo = added;
- added = new HashSet<Runnable>();
+ added = new HashSet<>();
removed.clear();
runlist = togo.iterator();
continue;
@@ -97,7 +88,7 @@ public class TaskList {
/**
* Add a task to the list of tasks to run whenever the event occurs.
*/
- public synchronized void addTask(Runnable task) {
+ synchronized void addTask(Runnable task) {
if (runlist != null) {
added.add(task);
removed.remove(task);
@@ -108,11 +99,22 @@ public class TaskList {
/**
* Remove a task from the list of tasks to run whenever the event occurs.
*/
- public synchronized void removeTask(Runnable task) {
+ synchronized void removeTask(Runnable task) {
if (runlist != null) {
removed.add(task);
added.remove(task);
}
tasks.remove(task);
}
+
+ private boolean addTaskToSoFar(Runnable task) {
+ if (removed.contains(task)) {
+ return false;
+ }
+ if (sofar.contains(task)) {
+ return false;
+ }
+ sofar.add(task);
+ return true;
+ }
}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/AuditFilter.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/AuditFilter.java
new file mode 100644
index 00000000..a278c2e3
--- /dev/null
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/AuditFilter.java
@@ -0,0 +1,38 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node.eelf;
+
+import ch.qos.logback.classic.spi.ILoggingEvent;
+import ch.qos.logback.core.filter.Filter;
+import ch.qos.logback.core.spi.FilterReply;
+
+
+public class AuditFilter extends Filter<ILoggingEvent> {
+ @Override
+ public FilterReply decide(ILoggingEvent event) {
+ if (event.getMessage().contains("DEL|") || event.getMessage().contains("PUB|") || event.getMessage().contains("PBF|")
+ || event.getMessage().contains("EXP|") || event.getMessage().contains("DLX|")) {
+ return FilterReply.ACCEPT;
+ } else {
+ return FilterReply.DENY;
+ }
+ }
+}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/EELFFilter.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/EELFFilter.java
deleted file mode 100644
index b733e7e4..00000000
--- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/EELFFilter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-package org.onap.dmaap.datarouter.node.eelf;
-
-import ch.qos.logback.classic.spi.ILoggingEvent;
-import ch.qos.logback.core.filter.Filter;
-import ch.qos.logback.core.spi.FilterReply;
-
-/*
- * When EELF functionality added it default started logging Jetty logs as well which in turn stopped existing functionality of logging jetty statements in node.log
- * added code in logback.xml to add jetty statements in node.log.
- * This class removes extran EELF statements from node.log since they are being logged in apicalls.log
- */
-public class EELFFilter extends Filter<ILoggingEvent> {
- @Override
- public FilterReply decide(ILoggingEvent event) {
- if (event.getMessage().contains("EELF")) {
- return FilterReply.DENY;
- } else {
- return FilterReply.ACCEPT;
- }
- }
-}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/JettyFilter.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/JettyFilter.java
new file mode 100644
index 00000000..69f51d82
--- /dev/null
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/JettyFilter.java
@@ -0,0 +1,37 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node.eelf;
+
+import ch.qos.logback.classic.spi.ILoggingEvent;
+import ch.qos.logback.core.filter.Filter;
+import ch.qos.logback.core.spi.FilterReply;
+
+
+public class JettyFilter extends Filter<ILoggingEvent> {
+ @Override
+ public FilterReply decide(ILoggingEvent event) {
+ if (event.getMessage().contains("org.eclipse.jetty")) {
+ return FilterReply.ACCEPT;
+ } else {
+ return FilterReply.DENY;
+ }
+ }
+}
diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/MetricsFilter.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/MetricsFilter.java
new file mode 100644
index 00000000..0fa57d4a
--- /dev/null
+++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/MetricsFilter.java
@@ -0,0 +1,42 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node.eelf;
+
+import ch.qos.logback.classic.Level;
+import ch.qos.logback.classic.spi.ILoggingEvent;
+import ch.qos.logback.core.filter.Filter;
+import ch.qos.logback.core.spi.FilterReply;
+
+
+public class MetricsFilter extends Filter<ILoggingEvent> {
+ @Override
+ public FilterReply decide(ILoggingEvent event) {
+ if (event.getLevel().equals(Level.INFO) && !event.getMessage().contains("jetty")) {
+ if (!event.getMessage().contains("DEL|") && !event.getMessage().contains("PUB|") && !event.getMessage().contains(
+ "PBF|") && !event.getMessage().contains("EXP|") && !event.getMessage().contains("DLX|")) {
+ return FilterReply.ACCEPT;
+ }
+ } else {
+ return FilterReply.DENY;
+ }
+ return FilterReply.DENY;
+ }
+}
diff --git a/datarouter-node/src/main/resources/docker/Dockerfile b/datarouter-node/src/main/resources/docker/Dockerfile
index b07b3ef3..01880bbb 100644
--- a/datarouter-node/src/main/resources/docker/Dockerfile
+++ b/datarouter-node/src/main/resources/docker/Dockerfile
@@ -35,6 +35,6 @@ ENTRYPOINT ["sh", "startup.sh"]
RUN addgroup -S -g 1001 onap \
&& adduser -S -u 1000 datarouter -G onap \
- && chown -R datarouter:onap /opt/
+ && chown -R datarouter:onap /opt/ /var/
USER datarouter \ No newline at end of file
diff --git a/datarouter-node/src/main/resources/logback.xml b/datarouter-node/src/main/resources/logback.xml
index 2ce2050c..dc19cb6f 100644
--- a/datarouter-node/src/main/resources/logback.xml
+++ b/datarouter-node/src/main/resources/logback.xml
@@ -8,9 +8,9 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
+ * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
@@ -20,33 +20,19 @@
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*
-->
-<configuration scan="true" scanPeriod="3 seconds" debug="true">
- <!--<jmxConfigurator /> -->
- <!-- directory path for all other type logs -->
- <!-- property name="logDir" value="/home/eby/dr2/logs" / -->
- <property name="logDir" value="/opt/app/datartr/logs" />
+<configuration scan="true" scanPeriod="3 seconds" debug="false">
- <!-- directory path for debugging type logs -->
- <!-- property name="debugDir" value="/home/eby/dr2/debug-logs" /-->
-
- <!-- specify the component name
- <ECOMP-component-name>::= "MSO" | "DCAE" | "ASDC " | "AAI" |"Policy" | "SDNC" | "AC" -->
- <!-- This creates the MSO directory in in the LogDir which is not needed, mentioned last directory of the path-->
- <!-- property name="componentName" value="logs"></property -->
+ <property name="logDir" value="/var/log/onap/datarouter" />
+ <!-- log file names -->
+ <property name="auditLog" value="audit" />
+ <property name="errorLog" value="error" />
+ <property name="debugLog" value="debug" />
+ <property name="metricsLog" value="metrics" />
+ <property name="jettyLog" value="jetty" />
<!-- log file names -->
- <property name="generalLogName" value="apicalls" />
- <!-- name="securityLogName" value="security" -->
- <!-- name="performanceLogName" value="performance" -->
- <!-- name="serverLogName" value="server" -->
- <!-- name="policyLogName" value="policy"-->
- <property name="errorLogName" value="errors" />
- <!-- name="metricsLogName" value="metrics" -->
- <property name="jettyAndNodeLogName" value="node"></property>
- <property name="defaultPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%X{RequestId}|%X{InvocationId}|%X{ServiceInstanceId}|%thread|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{Timer}|%msg%n" />
- <property name="jettyAndNodeLoggerPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%thread|%.-5level|%msg%n" />
+ <property name="defaultPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%X{RequestId}|%X{InvocationId}|%X{ServiceInstanceId}|%thread|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{Timer}%n|%msg%n" />
<property name="logDirectory" value="${logDir}" />
- <!-- property name="debugLogDirectory" value="${debugDir}/${componentName}" /-->
<!-- Example evaluator filter applied against console appender -->
@@ -64,258 +50,147 @@
log -->
- <appender name="EELF"
+ <appender name="Audit"
class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${generalLogName}.log</file>
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>INFO</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
+ <file>${logDirectory}/${auditLog}.log</file>
+ <filter class="org.onap.dmaap.datarouter.node.eelf.AuditFilter">
</filter>
<rollingPolicy
class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${generalLogName}.%i.log.zip
+ <fileNamePattern>${logDirectory}/${auditLog}.%i.log.zip
</fileNamePattern>
<minIndex>1</minIndex>
<maxIndex>9</maxIndex>
</rollingPolicy>
<triggeringPolicy
class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
+ <maxFileSize>50MB</maxFileSize>
</triggeringPolicy>
<encoder>
<pattern>${defaultPattern}</pattern>
</encoder>
</appender>
- <appender name="asyncEELF" class="ch.qos.logback.classic.AsyncAppender">
+ <appender name="asyncAudit" class="ch.qos.logback.classic.AsyncAppender">
<queueSize>256</queueSize>
- <appender-ref ref="EELF" />
- </appender>
-
- <!-- EELF Security Appender. This appender is used to record security events
- to the security log file. Security events are separate from other loggers
- in EELF so that security log records can be captured and managed in a secure
- way separate from the other logs. This appender is set to never discard any
- events. -->
- <!--appender name="EELFSecurity"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${securityLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${securityLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
+ <appender-ref ref="Audit" />
</appender>
- <appender name="asyncEELFSecurity" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <discardingThreshold>0</discardingThreshold>
- <appender-ref ref="EELFSecurity" />
- </appender-->
+ <!-- ============================================================================ -->
- <!-- EELF Performance Appender. This appender is used to record performance
- records. -->
- <!--appender name="EELFPerformance"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${performanceLogName}.log</file>
+ <appender name="Metrics"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logDirectory}/${metricsLog}.log</file>
+ <filter class="org.onap.dmaap.datarouter.node.eelf.MetricsFilter">
+ </filter>
<rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${performanceLogName}.%i.log.zip
+ class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+ <fileNamePattern>${logDirectory}/${metricsLog}.%i.log.zip
</fileNamePattern>
<minIndex>1</minIndex>
<maxIndex>9</maxIndex>
</rollingPolicy>
<triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
+ class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+ <maxFileSize>50MB</maxFileSize>
</triggeringPolicy>
<encoder>
- <outputPatternAsHeader>true</outputPatternAsHeader>
<pattern>${defaultPattern}</pattern>
</encoder>
</appender>
- <appender name="asyncEELFPerformance" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFPerformance" />
- </appender-->
-
- <!-- EELF Server Appender. This appender is used to record Server related
- logging events. The Server logger and appender are specializations of the
- EELF application root logger and appender. This can be used to segregate Server
- events from other components, or it can be eliminated to record these events
- as part of the application root log. -->
- <!--appender name="EELFServer"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${serverLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${serverLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asyncEELFServer" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFServer" />
- </appender-->
-
- <!-- EELF Policy Appender. This appender is used to record Policy engine
- related logging events. The Policy logger and appender are specializations
- of the EELF application root logger and appender. This can be used to segregate
- Policy engine events from other components, or it can be eliminated to record
- these events as part of the application root log. -->
- <!--appender name="EELFPolicy"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${policyLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${policyLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asyncEELFPolicy" class="ch.qos.logback.classic.AsyncAppender">
+ <appender name="asyncMetrics" class="ch.qos.logback.classic.AsyncAppender">
<queueSize>256</queueSize>
- <appender-ref ref="EELFPolicy" >
- </appender-->
+ <appender-ref ref="Metrics" />
+ </appender>
+ <!-- ============================================================================ -->
- <!-- EELF Audit Appender. This appender is used to record audit engine
- related logging events. The audit logger and appender are specializations
- of the EELF application root logger and appender. This can be used to segregate
- Policy engine events from other components, or it can be eliminated to record
- these events as part of the application root log. -->
- <!--appender name="EELFAudit"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${auditLogName}.log</file>
+ <appender name="Debug"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logDirectory}/${debugLog}.log</file>
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>DEBUG</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
<rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${auditLogName}.%i.log.zip
+ class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+ <fileNamePattern>${logDirectory}/${debugLog}.%i.log.zip
</fileNamePattern>
<minIndex>1</minIndex>
<maxIndex>9</maxIndex>
</rollingPolicy>
<triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
+ class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+ <maxFileSize>50MB</maxFileSize>
</triggeringPolicy>
<encoder>
- <pattern>${defaultPattern}</pattern>
+ <pattern>${defaultPattern}</pattern>
</encoder>
</appender>
- <appender name="asyncEELFAudit" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFAudit" />
- </appender-->
-<!--appender name="EELFMetrics"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${metricsLogName}.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${metricsLogName}.%i.log.zip
- </fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
- <triggeringPolicy
- class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
- </triggeringPolicy>
- <encoder-->
- <!-- <pattern>"%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} -
- %msg%n"</pattern> -->
- <!--pattern>${defaultPattern}</pattern>
- </encoder>
+ <appender name="asyncDebug" class="ch.qos.logback.classic.AsyncAppender">
+ <queueSize>256</queueSize>
+ <appender-ref ref="Debug" />
</appender>
+ <!-- ============================================================================ -->
- <appender name="asyncEELFMetrics" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>256</queueSize>
- <appender-ref ref="EELFMetrics"/>
- </appender-->
-
- <appender name="EELFError"
+ <appender name="Error"
class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${errorLogName}.log</file>
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>ERROR</level>
+ <file>${logDirectory}/${errorLog}.log</file>
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <level>WARN</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy
class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${errorLogName}.%i.log.zip
+ <fileNamePattern>${logDirectory}/${errorLog}.%i.log.zip
</fileNamePattern>
<minIndex>1</minIndex>
<maxIndex>9</maxIndex>
</rollingPolicy>
<triggeringPolicy
class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
+ <maxFileSize>50MB</maxFileSize>
</triggeringPolicy>
<encoder>
<pattern>${defaultPattern}</pattern>
</encoder>
</appender>
- <appender name="asyncEELFError" class="ch.qos.logback.classic.AsyncAppender">
+ <appender name="asyncError" class="ch.qos.logback.classic.AsyncAppender">
<queueSize>256</queueSize>
- <appender-ref ref="EELFError"/>
+ <appender-ref ref="Error"/>
</appender>
<!-- ============================================================================ -->
- <appender name="jettyAndNodelog"
+ <appender name="Jetty"
class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/${jettyAndNodeLogName}.log</file>
- <filter class="org.onap.dmaap.datarouter.node.eelf.EELFFilter" />
+ <file>${logDirectory}/${jettyLog}.log</file>
+ <filter class="org.onap.dmaap.datarouter.node.eelf.JettyFilter" />
<rollingPolicy
class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logDirectory}/${jettyAndNodeLogName}.%i.log.zip
+ <fileNamePattern>${logDirectory}/${jettyLog}.%i.log.zip
</fileNamePattern>
<minIndex>1</minIndex>
<maxIndex>9</maxIndex>
</rollingPolicy>
<triggeringPolicy
class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>5MB</maxFileSize>
+ <maxFileSize>50MB</maxFileSize>
</triggeringPolicy>
<encoder>
- <pattern>${jettyAndNodeLoggerPattern}</pattern>
+ <pattern>${defaultPattern}</pattern>
</encoder>
</appender>
- <appender name="asyncEELFjettyAndNodelog" class="ch.qos.logback.classic.AsyncAppender">
+ <appender name="asyncJettyLog" class="ch.qos.logback.classic.AsyncAppender">
<queueSize>256</queueSize>
- <appender-ref ref="jettyAndNodelog" />
+ <appender-ref ref="Jetty" />
<includeCallerData>true</includeCallerData>
</appender>
@@ -326,49 +201,34 @@
<!-- EELF loggers -->
<!-- ============================================================================ -->
<logger name="com.att.eelf" level="info" additivity="false">
- <appender-ref ref="asyncEELF" />
+ <appender-ref ref="asyncAudit" />
</logger>
- <logger name="com.att.eelf.error" level="error" additivity="false">
- <appender-ref ref="asyncEELFError" />
- </logger>
-
- <logger name="log4j.logger.org.eclipse.jetty" additivity="false" level="info">
- <appender-ref ref="asyncEELFjettyAndNodelog"/>
- </logger>
-
- <!-- logger name="com.att.eelf.security" level="info" additivity="false">
- <appender-ref ref="asyncEELFSecurity" />
- </logger>
- <logger name="com.att.eelf.perf" level="info" additivity="false">
- <appender-ref ref="asyncEELFPerformance" />
- </logger>
- <logger name="com.att.eelf.server" level="info" additivity="false">
- <appender-ref ref="asyncEELFServer" />
- </logger>
- <logger name="com.att.eelf.policy" level="info" additivity="false">
- <appender-ref ref="asyncEELFPolicy" />
+ <logger name="com.att.eelf" level="info" additivity="false">
+ <appender-ref ref="asyncMetrics" />
</logger>
- <logger name="com.att.eelf.audit" level="info" additivity="false">
- <appender-ref ref="asyncEELFAudit" />
+ <logger name="com.att.eelf" level="debug" additivity="false">
+ <appender-ref ref="asyncDebug" />
</logger>
- <logger name="com.att.eelf.metrics" level="info" additivity="false">
- <appender-ref ref="asyncEELFMetrics" />
- </logger>
+ <logger name="com.att.eelf.error" level="error" additivity="false">
+ <appender-ref ref="asyncError" />
+ </logger>
- <logger name="com.att.eelf.debug" level="debug" additivity="false">
- <appender-ref ref="asyncEELFDebug" />
- </logger-->
+ <logger name="log4j.logger.org.eclipse.jetty" additivity="false" level="info">
+ <appender-ref ref="asyncJettyLog"/>
+ </logger>
- <root level="TRACE">
- <appender-ref ref="asyncEELF" />
- <appender-ref ref="asyncEELFError" />
- <appender-ref ref="asyncEELFjettyAndNodelog" />
+ <root level="INFO">
+ <appender-ref ref="asyncAudit" />
+ <appender-ref ref="asyncMetrics" />
+ <appender-ref ref="asyncDebug" />
+ <appender-ref ref="asyncError" />
+ <appender-ref ref="asyncJettyLog" />
</root>
</configuration>
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilterTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilterTest.java
index f6737b1e..bb367186 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilterTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DRNodeCadiFilterTest.java
@@ -1,4 +1,4 @@
-/**-
+/*
* ============LICENSE_START=======================================================
* Copyright (C) 2019 Nordix Foundation.
* ================================================================================
@@ -20,6 +20,17 @@
package org.onap.dmaap.datarouter.node;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.io.IOException;
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -32,19 +43,10 @@ import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.powermock.modules.junit4.PowerMockRunner;
-import javax.servlet.FilterChain;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-
-import static org.mockito.Mockito.*;
-
@SuppressStaticInitializationFor("org.onap.dmaap.datarouter.node.NodeConfigManager")
@PrepareForTest({CadiFilter.class})
@RunWith(PowerMockRunner.class)
-public class DRNodeCadiFilterTest
-{
+public class DRNodeCadiFilterTest {
@Mock
private PropAccess access;
@@ -67,7 +69,8 @@ public class DRNodeCadiFilterTest
}
@Test
- public void Given_doFilter_Called_And_Method_Is_GET_And_AAF_DB_Instance_Is_NULL_Then_Chain_doFilter_Called() throws Exception {
+ public void Given_doFilter_Called_And_Method_Is_GET_And_AAF_DB_Instance_Is_NULL_Then_Chain_doFilter_Called()
+ throws Exception {
PowerMockito.mockStatic(NodeConfigManager.class);
NodeConfigManager config = mock(NodeConfigManager.class);
@@ -75,12 +78,13 @@ public class DRNodeCadiFilterTest
PowerMockito.when(config.getAafInstance("/other/5")).thenReturn("legacy");
when(request.getPathInfo()).thenReturn("/publish/5");
when(request.getMethod()).thenReturn("GET");
- cadiFilter.doFilter(request,response,chain);
+ cadiFilter.doFilter(request, response, chain);
verify(chain, times(1)).doFilter(request, response);
}
@Test
- public void Given_doFilter_Called_And_Method_Is_GET_And_Path_Includes_Internal_Then_Chain_doFilter_Called() throws Exception {
+ public void Given_doFilter_Called_And_Method_Is_GET_And_Path_Includes_Internal_Then_Chain_doFilter_Called()
+ throws Exception {
PowerMockito.mockStatic(NodeConfigManager.class);
NodeConfigManager config = mock(NodeConfigManager.class);
@@ -88,12 +92,13 @@ public class DRNodeCadiFilterTest
PowerMockito.when(config.getAafInstance("/other/5")).thenReturn("legacy");
when(request.getPathInfo()).thenReturn("/internal/5");
when(request.getMethod()).thenReturn("GET");
- cadiFilter.doFilter(request,response,chain);
+ cadiFilter.doFilter(request, response, chain);
verify(chain, times(1)).doFilter(request, response);
}
@Test
- public void Given_doFilter_Called_And_Method_Is_GET_And_AAF_DB_Is_Not_Null_Then_Super_doFilter_Called() throws Exception {
+ public void Given_doFilter_Called_And_Method_Is_GET_And_AAF_DB_Is_Not_Null_Then_Super_doFilter_Called()
+ throws Exception {
PowerMockito.mockStatic(NodeConfigManager.class);
NodeConfigManager config = mock(NodeConfigManager.class);
@@ -102,20 +107,22 @@ public class DRNodeCadiFilterTest
when(request.getPathInfo()).thenReturn("/publish/5/fileId");
when(request.getMethod()).thenReturn("GET");
PowerMockito.suppress(MemberMatcher.methodsDeclaredIn(CadiFilter.class));
- cadiFilter.doFilter(request,response,chain);
+ cadiFilter.doFilter(request, response, chain);
verify(chain, times(0)).doFilter(request, response);
}
@Test
- public void Given_getFileid_Called_And_SendError_Fails_Then_Throw_IOException_And_Call_chain_doFilter() throws Exception {
+ public void Given_getFileid_Called_And_SendError_Fails_Then_Throw_IOException_And_Call_chain_doFilter()
+ throws Exception {
PowerMockito.mockStatic(NodeConfigManager.class);
NodeConfigManager config = mock(NodeConfigManager.class);
PowerMockito.when(NodeConfigManager.getInstance()).thenReturn(config);
when(request.getPathInfo()).thenReturn("/publish/5");
when(request.getMethod()).thenReturn("DELETE");
- doThrow(new IOException()).when(response).sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid request URI. Expecting <feed-publishing-url>/<fileid>. Possible missing fileid.");
- cadiFilter.doFilter(request,response,chain);
+ doThrow(new IOException()).when(response).sendError(HttpServletResponse.SC_NOT_FOUND,
+ "Invalid request URI. Expecting <feed-publishing-url>/<fileid>. Possible missing fileid.");
+ cadiFilter.doFilter(request, response, chain);
verify(chain, times(1)).doFilter(request, response);
}
}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java
index 6dc334fc..6a5f219b 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryQueueTest.java
@@ -23,48 +23,60 @@
package org.onap.dmaap.datarouter.node;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.anyLong;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.io.File;
+import java.util.Hashtable;
+import java.util.Vector;
import org.apache.commons.lang3.reflect.FieldUtils;
+import org.jetbrains.annotations.NotNull;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.powermock.modules.junit4.PowerMockRunner;
-import java.io.File;
-
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.when;
-
@RunWith(PowerMockRunner.class)
+@SuppressStaticInitializationFor("org.onap.dmaap.datarouter.node.NodeConfigManager")
public class DeliveryQueueTest {
+ @Mock
+ DeliveryQueueHelper deliveryQueueHelper;
private DeliveryQueue deliveryQueue;
@Mock
private DestInfo destInfo;
- @Mock
- DeliveryQueueHelper deliveryQueueHelper;
-
private String dirPath = "/tmp/dir001/";
- private String FileName1 = "10000000000004.fileName.M";
+ private String fileName = "10000000000004.fileName.M";
@Before
- public void setUp() {
- when(destInfo.getSpool()).thenReturn("tmp");
+ public void setUp() throws IllegalAccessException {
+ when(destInfo.getSpool()).thenReturn(dirPath);
+ when(destInfo.isPrivilegedSubscriber()).thenReturn(true);
deliveryQueue = new DeliveryQueue(deliveryQueueHelper, destInfo);
+ NodeConfigManager configManager = mockNodeConfigManager();
+ FieldUtils.writeDeclaredStaticField(StatusLog.class, "config", configManager, true);
}
@Test
public void Given_New_DeliveryQueue_Directory_Is_Created_As_Defined_By_DestInfo() {
- when(destInfo.getSpool()).thenReturn("tmp");
- File file = new File("tmp");
+ File file = new File("/tmp");
assertTrue(file.exists());
- deleteFile("tmp");
}
@Test
public void Given_Delivery_Task_Failed_And_Resume_Time_Not_Reached_Return_Null() throws Exception {
- FieldUtils.writeField(deliveryQueue,"failed",true,true);
- FieldUtils.writeField(deliveryQueue,"resumetime",System.currentTimeMillis()*2,true);
+ FieldUtils.writeField(deliveryQueue, "failed", true, true);
+ FieldUtils.writeField(deliveryQueue, "resumetime", System.currentTimeMillis() * 2, true);
assertNull(deliveryQueue.peekNext());
}
@@ -75,21 +87,155 @@ public class DeliveryQueueTest {
deliveryQueue = new DeliveryQueue(deliveryQueueHelper, destInfo);
DeliveryTask nt = deliveryQueue.getNext();
assertEquals("10000000000004.fileName", nt.getPublishId());
- deleteFile(dirPath + FileName1);
+ deleteFile(dirPath + fileName);
deleteFile(dirPath);
}
@Test
+ public void Given_Task_In_Todo_Is_Already_Cleaned_GetNext_Returns_Null() throws Exception {
+ when(deliveryQueueHelper.getExpirationTimer()).thenReturn(10000L);
+ deliveryQueue = new DeliveryQueue(deliveryQueueHelper, destInfo);
+ Vector<DeliveryTask> tasks = new Vector<>();
+ DeliveryTask task = new DeliveryTask(deliveryQueue, "123.node.datarouternew.com");
+ task.clean();
+ tasks.add(task);
+ FieldUtils.writeField(deliveryQueue, "todo", tasks, true);
+ DeliveryTask nt = deliveryQueue.getNext();
+ assertNull(nt);
+ }
+
+ @Test
+ public void Given_Task_In_Todo_Has_Resume_Time_In_Future_GetNext_Returns_Null() throws Exception {
+ when(destInfo.isPrivilegedSubscriber()).thenReturn(true);
+ when(deliveryQueueHelper.getExpirationTimer()).thenReturn(10000L);
+ deliveryQueue = new DeliveryQueue(deliveryQueueHelper, destInfo);
+ Vector<DeliveryTask> tasks = new Vector<>();
+ DeliveryTask task = new DeliveryTask(deliveryQueue, "123.node.datarouternew.com");
+ long timeInFuture = 2558366240223L;
+ task.setResumeTime(timeInFuture);
+ tasks.add(task);
+ FieldUtils.writeField(deliveryQueue, "todo", tasks, true);
+ DeliveryTask nt = deliveryQueue.getNext();
+ assertNull(nt);
+ }
+
+ @Test
+ public void Given_Task_In_Todo_Is_Expired_GetNext_Returns_Null() throws Exception {
+ when(destInfo.isPrivilegedSubscriber()).thenReturn(true);
+ when(deliveryQueueHelper.getExpirationTimer()).thenReturn(10000L);
+ deliveryQueue = new DeliveryQueue(deliveryQueueHelper, destInfo);
+ Vector<DeliveryTask> tasks = new Vector<>();
+ DeliveryTask task = new DeliveryTask(deliveryQueue, "123.node.datarouternew.com");
+ long timeInPast = 1058366240223L;
+ task.setResumeTime(timeInPast);
+ tasks.add(task);
+ FieldUtils.writeField(deliveryQueue, "todo", tasks, true);
+ DeliveryTask nt = deliveryQueue.getNext();
+ assertNull(nt);
+ }
+
+ @Test
public void Given_Delivery_Task_Cancel_And_FileId_Is_Null_Return_Zero() {
long rc = deliveryQueue.cancelTask("123.node.datarouternew.com");
assertEquals(0, rc);
}
+ @Test
+ public void Given_Delivery_Task_Is_Working_Cancel_Task_Returns_Zero() throws IllegalAccessException {
+ Hashtable<String, DeliveryTask> tasks = new Hashtable<>();
+ tasks.put("123.node.datarouternew.com", new DeliveryTask(deliveryQueue, "123.node.datarouternew.com"));
+ FieldUtils.writeField(deliveryQueue, "working", tasks, true);
+ long rc = deliveryQueue.cancelTask("123.node.datarouternew.com");
+ assertEquals(0, rc);
+ }
+
+ @Test
+ public void Given_Delivery_Task_In_Todo_Cancel_Task_Returns_Zero() throws IllegalAccessException {
+ Vector<DeliveryTask> tasks = new Vector<>();
+ tasks.add(new DeliveryTask(deliveryQueue, "123.node.datarouternew.com"));
+ FieldUtils.writeField(deliveryQueue, "todo", tasks, true);
+ long rc = deliveryQueue.cancelTask("123.node.datarouternew.com");
+ assertEquals(0, rc);
+ }
+
+ @Test
+ public void Given_Ok_Status_And_Privileged_Subscriber_Then_Set_Resume_Time_Is_Called_On_DeliveryTask() {
+ DeliveryTask deliveryTask = mockDeliveryTask();
+ deliveryQueue.reportStatus(deliveryTask, 200, "123456789.dmaap-dr-node", "delivery");
+ verify(deliveryTask, times(1)).setResumeTime(anyLong());
+ cleanUpLogging();
+ }
+
+ @Test
+ public void Given_Ok_Status_And_Not_Privileged_Subscriber_Then_Clean_Is_Called_On_DeliveryTask() {
+ DeliveryTask deliveryTask = mockDeliveryTask();
+ when(destInfo.isPrivilegedSubscriber()).thenReturn(false);
+ deliveryQueue = new DeliveryQueue(deliveryQueueHelper, destInfo);
+ deliveryQueue.reportStatus(deliveryTask, 200, "123456789.dmaap-dr-node", "delivery");
+ verify(deliveryTask, times(1)).clean();
+ cleanUpLogging();
+ }
+
+ @Test
+ public void Given_Not_Ok_Status_Then_Clean_Is_Called_On_DeliveryTask() {
+ DeliveryTask deliveryTask = mockDeliveryTask();
+ deliveryQueue.reportStatus(deliveryTask, 400, "123456789.dmaap-dr-node", "delivery");
+ verify(deliveryTask, times(1)).clean();
+ cleanUpLogging();
+ }
+
+ @Test
+ public void Given_Task_In_Working_MarkTaskSuccess_Returns_True() throws IllegalAccessException {
+ Hashtable<String, DeliveryTask> tasks = new Hashtable<>();
+ tasks.put("123.node.datarouternew.com", new DeliveryTask(deliveryQueue, "123.node.datarouternew.com"));
+ FieldUtils.writeField(deliveryQueue, "working", tasks, true);
+ assertTrue(deliveryQueue.markTaskSuccess("123.node.datarouternew.com"));
+ }
+
+ @Test
+ public void Given_Task_In_Retry_MarkTaskSuccess_Returns_True() throws IllegalAccessException {
+ Hashtable<String, DeliveryTask> tasks = new Hashtable<>();
+ tasks.put("123.node.datarouternew.com", new DeliveryTask(deliveryQueue, "123.node.datarouternew.com"));
+ FieldUtils.writeField(deliveryQueue, "retry", tasks, true);
+ assertTrue(deliveryQueue.markTaskSuccess("123.node.datarouternew.com"));
+ }
+
+ @Test
+ public void Given_Task_Does_Not_Exist_MarkTaskSuccess_Returns_False() {
+ assertFalse(deliveryQueue.markTaskSuccess("false.pubId.com"));
+ }
+
+ private void cleanUpLogging() {
+ final File currentDir = new File(System.getProperty("user.dir"));
+ final File[] files = currentDir.listFiles((file, name) -> name.matches("null.*"));
+ if (files != null) {
+ for (final File file : files) {
+ file.delete();
+ }
+ }
+ }
+
+ @NotNull
+ private DeliveryTask mockDeliveryTask() {
+ DeliveryTask deliveryTask = mock(DeliveryTask.class);
+ when(deliveryTask.getPublishId()).thenReturn("123456789.dmaap-dr-node");
+ when(deliveryTask.getFeedId()).thenReturn("1");
+ when(deliveryTask.getSubId()).thenReturn("1");
+ when(deliveryTask.getURL()).thenReturn("http://subcriber.com:7070/delivery");
+ when(deliveryTask.getCType()).thenReturn("application/json");
+ when(deliveryTask.getLength()).thenReturn(486L);
+ return deliveryTask;
+ }
+
+ private NodeConfigManager mockNodeConfigManager() {
+ NodeConfigManager config = mock(NodeConfigManager.class);
+ PowerMockito.when(config.getEventLogInterval()).thenReturn("30000");
+ return config;
+ }
+
private void prepareFiles() throws Exception {
createFolder(dirPath);
- createFile(FileName1, dirPath);
- String[] files = new String[2];
- files[0] = dirPath + FileName1;
+ createFile(fileName, dirPath);
}
private void createFolder(String dirName) {
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTaskTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTaskTest.java
new file mode 100644
index 00000000..a0f05779
--- /dev/null
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTaskTest.java
@@ -0,0 +1,133 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({DeliveryTask.class})
+public class DeliveryTaskTest {
+
+ @Mock
+ private DeliveryQueue deliveryQueue;
+
+ private ExecutorService executorService;
+
+ @Before
+ public void setUp() throws Exception {
+ DestInfo destInfo = getPrivDestInfo();
+ deliveryQueue = mockDelvieryQueue(destInfo);
+
+ URL url = PowerMockito.mock(URL.class);
+ HttpURLConnection urlConnection = PowerMockito.mock(HttpURLConnection.class);
+ OutputStream outputStream = PowerMockito.mock(OutputStream.class);
+
+ PowerMockito.whenNew(URL.class).withParameterTypes(String.class).withArguments(Mockito.anyString())
+ .thenReturn(url);
+ PowerMockito.when(urlConnection.getOutputStream()).thenReturn(outputStream);
+ PowerMockito.when(url.openConnection()).thenReturn(urlConnection);
+ PowerMockito.when(urlConnection.getHeaderField(0)).thenReturn("PUT");
+ PowerMockito.when(urlConnection.getResponseCode()).thenReturn(200);
+ }
+
+ @After
+ public void tearDown() {
+ }
+
+
+ @Test
+ public void Validate_Delivery_Task_Equals() {
+ DeliveryTask task = new DeliveryTask(deliveryQueue, "123456789.test-dr-datafile");
+ DeliveryTask task2 = new DeliveryTask(deliveryQueue, "123456789.test-dr-datafile");
+ Assert.assertEquals(task, task2);
+ Assert.assertEquals(task.hashCode(), task2.hashCode());
+ Assert.assertEquals(task.toString(), task2.toString());
+ Assert.assertEquals(task.getPublishId(), task2.getPublishId());
+ Assert.assertEquals(task.getSubId(), task2.getSubId());
+ Assert.assertEquals(task.getFeedId(), task2.getFeedId());
+ Assert.assertEquals(task.getLength(), task2.getLength());
+ Assert.assertEquals(task.isCleaned(), task2.isCleaned());
+ Assert.assertEquals(task.getDate(), task2.getDate());
+ Assert.assertEquals(task.getURL(), task2.getURL());
+ Assert.assertEquals(task.getCType(), task2.getCType());
+ Assert.assertEquals(task.getMethod(), task2.getMethod());
+ Assert.assertEquals(task.getFileId(), task2.getFileId());
+ Assert.assertEquals(task.getAttempts(), task2.getAttempts());
+ Assert.assertEquals(task.getFollowRedirects(), task2.getFollowRedirects());
+
+ Assert.assertEquals(0, task.compareTo(task2));
+ }
+
+ @Test
+ public void Validate_Delivery_Tasks_Success_For_Standard_File() throws InterruptedException {
+ DeliveryTask task = new DeliveryTask(deliveryQueue, "123456789.test-dr-node");
+ executorService = Executors.newSingleThreadExecutor();
+ executorService.execute(task);
+
+ executorService.shutdown();
+ executorService.awaitTermination(2, TimeUnit.SECONDS);
+ }
+
+ @Test
+ public void Validate_Delivery_Tasks_Success_For_Compressed_File() throws InterruptedException {
+
+ DeliveryTask task = new DeliveryTask(deliveryQueue, "123456789.test-dr-node.gz");
+ executorService = Executors.newSingleThreadExecutor();
+ executorService.execute(task);
+
+ executorService.shutdown();
+ executorService.awaitTermination(2, TimeUnit.SECONDS);
+ }
+
+ private DestInfo getPrivDestInfo() {
+ return new DestInfoBuilder().setName("n:" + "dmaap-dr-node")
+ .setSpool(System.getProperty("user.dir") + "/src/test/resources/delivery_files")
+ .setSubid("1").setLogdata("n2n-dmaap-dr-node").setUrl("https://dmaap-dr-node:8443/internal/publish")
+ .setAuthuser("dmaap-dr-node").setAuthentication("Auth").setMetaonly(false).setUse100(true)
+ .setPrivilegedSubscriber(true).setFollowRedirects(false).setDecompress(true).createDestInfo();
+ }
+
+ private DeliveryQueue mockDelvieryQueue(DestInfo destInfo) {
+ DeliveryQueue mockedDeliveryQueue = mock(DeliveryQueue.class);
+ when(mockedDeliveryQueue.getDestinationInfo()).thenReturn(destInfo);
+ return mockedDeliveryQueue;
+ }
+
+}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java
index 08120073..18d9d56c 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java
@@ -22,86 +22,124 @@
******************************************************************************/
package org.onap.dmaap.datarouter.node;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Hashtable;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.junit.After;
+import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
+import org.onap.dmaap.datarouter.node.Delivery.DelItem;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.powermock.modules.junit4.PowerMockRunner;
-import java.io.File;
-import java.io.IOException;
-import java.util.Hashtable;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-
@RunWith(PowerMockRunner.class)
@SuppressStaticInitializationFor("org.onap.dmaap.datarouter.node.NodeConfigManager")
public class DeliveryTest {
- @Mock
- private DeliveryQueue deliveryQueue;
+ @Mock
+ private DeliveryQueue deliveryQueue;
+ @Mock
+ private NodeConfigManager config;
+ private File nDir = new File("tmp/n");
+ private File newNDir = new File("tmp/n/0");
+ private File newNFile = new File("tmp/n/0/testN.txt");
+ private File sDir = new File("tmp/s");
+ private File newSDir = new File("tmp/s/0/1");
+ private File newSpoolFile = new File("tmp/s/0/1/123456789.dmaap-dr-node");
+ private File spoolFileMeta = new File("tmp/s/0/1/123456789.dmaap-dr-node.M");
+
+ @Before
+ public void setUp() throws IOException {
+ nDir.mkdirs();
+ sDir.mkdirs();
+ newNDir.mkdirs();
+ newNFile.createNewFile();
+ newSDir.mkdirs();
+ newSpoolFile.createNewFile();
+ spoolFileMeta.createNewFile();
+ config = mockNodeConfigManager();
+ }
+
+ @Test
+ public void Validate_Reset_Queue_Calls_Reset_Queue_On_Delivery_Queue_Object() throws IllegalAccessException {
+ Delivery delivery = new Delivery(config);
+ HashMap<String, DeliveryQueue> dqs = new HashMap<>();
+ dqs.put("tmp/s/0/1", deliveryQueue);
+ FieldUtils.writeDeclaredField(delivery, "dqs", dqs, true);
+ delivery.resetQueue("tmp/s/0/1");
+ verify(deliveryQueue, times(1)).resetQueue();
+ }
- private File nDir = new File("tmp/n");
- private File sDir = new File("tmp/s");
+ @Test
+ public void Validate_Mark_Success_Calls_Mark_Success_On_Delivery_Queue_Object() throws IllegalAccessException {
+ Delivery delivery = new Delivery(config);
+ HashMap<String, DeliveryQueue> dqs = new HashMap<>();
+ dqs.put("tmp/s/0/1", deliveryQueue);
+ FieldUtils.writeDeclaredField(delivery, "dqs", dqs, true);
+ delivery.markTaskSuccess("tmp/s/0/1", "123456789.dmaap-dr-node");
+ verify(deliveryQueue, times(1)).markTaskSuccess("123456789.dmaap-dr-node");
+ }
- @Before
- public void setUp() throws IOException {
- nDir.mkdirs();
- sDir.mkdirs();
- File newNDir = new File("tmp/n/0");
- newNDir.mkdirs();
- File newNFile = new File("tmp/n/0/testN.txt");
- newNFile.createNewFile();
- File newSDir = new File("tmp/s/0/1");
- newSDir.mkdirs();
- File newSpoolFile = new File("tmp/s/0/1/testSpool.txt");
- newSpoolFile.createNewFile();
- }
+ @Test
+ public void Validate_DelItem_With_Equal_Spool_And_PubId_Are_Equal() {
+ DelItem delItem1 = new DelItem("123456789.dmaap-dr-node", "tmp/s/0/1");
+ DelItem delItem2 = new DelItem("123456789.dmaap-dr-node", "tmp/s/0/1");
+ Assert.assertEquals(delItem1, delItem2);
+ Assert.assertEquals(0, delItem1.compareTo(delItem2));
+ }
- @Test
- public void Validate_Reset_Queue_Calls_Reset_Queue_On_Delivery_Queue_Object() throws IllegalAccessException {
- NodeConfigManager config = mockNodeConfigManager();
- Delivery delivery = new Delivery(config);
- Hashtable<String, DeliveryQueue> dqs = new Hashtable<>();
- dqs.put("spool/s/0/1", deliveryQueue);
- FieldUtils.writeDeclaredField(delivery, "dqs", dqs, true);
- delivery.resetQueue("spool/s/0/1");
- verify(deliveryQueue, times(1)).resetQueue();
- }
+ @Test
+ public void Validate_DelItem_With_Unequal_Spool_And_PubId_Are_Not_Equal() {
+ DelItem delItem1 = new DelItem("123456789.dmaap-dr-node", "tmp/s/0/1");
+ DelItem delItem2 = new DelItem("000000000.dmaap-dr-node", "tmp/s/0/2");
+ Assert.assertNotEquals(delItem1, delItem2);
+ Assert.assertNotEquals(0, delItem1.compareTo(delItem2));
+ }
- @After
- public void tearDown() {
- nDir.delete();
- sDir.delete();
- File tmpDir = new File("tmp");
- tmpDir.delete();
- }
+ @After
+ public void tearDown() {
+ newSpoolFile.delete();
+ spoolFileMeta.delete();
+ newNFile.delete();
+ newNDir.delete();
+ newSDir.delete();
+ new File("tmp/s/0").delete();
+ nDir.delete();
+ sDir.delete();
+ File tmpDir = new File("tmp");
+ tmpDir.delete();
+ }
- private NodeConfigManager mockNodeConfigManager() {
- PowerMockito.mockStatic(NodeConfigManager.class);
- NodeConfigManager config = mock(NodeConfigManager.class);
- PowerMockito.when(config.isConfigured()).thenReturn(true);
- PowerMockito.when(config.getAllDests()).thenReturn(createDestInfoObjects());
- PowerMockito.when(config.getFreeDiskStart()).thenReturn(0.49);
- PowerMockito.when(config.getFreeDiskStop()).thenReturn(0.5);
- PowerMockito.when(config.getDeliveryThreads()).thenReturn(0);
- PowerMockito.when(config.getSpoolBase()).thenReturn("tmp");
- return config;
- }
+ private NodeConfigManager mockNodeConfigManager() {
+ NodeConfigManager config = mock(NodeConfigManager.class);
+ PowerMockito.when(config.isConfigured()).thenReturn(true);
+ PowerMockito.when(config.getAllDests()).thenReturn(createDestInfoObjects());
+ PowerMockito.when(config.getFreeDiskStart()).thenReturn(0.9);
+ PowerMockito.when(config.getFreeDiskStop()).thenReturn(0.2);
+ PowerMockito.when(config.getDeliveryThreads()).thenReturn(0);
+ PowerMockito.when(config.getSpoolBase()).thenReturn("tmp");
+ return config;
+ }
- private DestInfo[] createDestInfoObjects() {
- DestInfo[] destInfos = new DestInfo[1];
- DestInfo destInfo = new DestInfo.DestInfoBuilder().setName("node.datarouternew.com").setSpool("spool/s/0/1").setSubid("1")
- .setLogdata("logs/").setUrl("/subs/1").setAuthuser("user1").setAuthentication("Basic dXNlcjE6cGFzc3dvcmQx")
- .setMetaonly(false).setUse100(true).setPrivilegedSubscriber(false).setFollowRedirects(false)
- .setDecompress(false).createDestInfo();
- destInfos[0] = destInfo;
- return destInfos;
- }
+ private DestInfo[] createDestInfoObjects() {
+ DestInfo[] destInfos = new DestInfo[1];
+ DestInfo destInfo = new DestInfoBuilder().setName("node.datarouternew.com").setSpool("tmp/s/0/1")
+ .setSubid("1")
+ .setLogdata("logs/").setUrl("/subs/1").setAuthuser("user1")
+ .setAuthentication("Basic dXNlcjE6cGFzc3dvcmQx")
+ .setMetaonly(false).setUse100(true).setPrivilegedSubscriber(false).setFollowRedirects(false)
+ .setDecompress(false).createDestInfo();
+ destInfos[0] = destInfo;
+ return destInfos;
+ }
}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DestInfoTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DestInfoTest.java
new file mode 100644
index 00000000..ed629bf2
--- /dev/null
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DestInfoTest.java
@@ -0,0 +1,77 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class DestInfoTest {
+
+ private DestInfo destInfo;
+
+ @Before
+ public void setUp() {
+ destInfo = getDestInfo("/src/test/resources");
+ }
+
+ @Test
+ public void Validate_Getters_And_Setters() {
+ assertEquals("n:dmaap-dr-node", destInfo.getName());
+ assertEquals("/src/test/resources", destInfo.getSpool());
+ assertEquals("1", destInfo.getSubId());
+ assertEquals("n2n-dmaap-dr-node", destInfo.getLogData());
+ assertEquals("https://dmaap-dr-node:8443/internal/publish", destInfo.getURL());
+ assertEquals("dmaap-dr-node", destInfo.getAuthUser());
+ assertEquals("Auth", destInfo.getAuth());
+ assertFalse(destInfo.isMetaDataOnly());
+ assertTrue(destInfo.isUsing100());
+ assertFalse(destInfo.isPrivilegedSubscriber());
+ assertFalse(destInfo.isFollowRedirects());
+ assertFalse(destInfo.isDecompress());
+ }
+
+ @Test
+ public void Validate_DestInfo_Objects_Are_Equal() {
+ DestInfo destInfo2 = getDestInfo("/src/test/resources");
+ assertEquals(destInfo, destInfo2);
+ assertEquals(destInfo.hashCode(), destInfo2.hashCode());
+ }
+
+ @Test
+ public void Validate_DestInfo_Objects_Are_Not_Equal() {
+ DestInfo destInfo2 = getDestInfo("notEqual");
+ assertNotEquals(destInfo, destInfo2);
+ assertNotEquals(destInfo.hashCode(), destInfo2.hashCode());
+ }
+
+ private DestInfo getDestInfo(String spool) {
+ return new DestInfoBuilder().setName("n:" + "dmaap-dr-node").setSpool(spool)
+ .setSubid("1").setLogdata("n2n-dmaap-dr-node").setUrl("https://dmaap-dr-node:8443/internal/publish")
+ .setAuthuser("dmaap-dr-node").setAuthentication("Auth").setMetaonly(false).setUse100(true)
+ .setPrivilegedSubscriber(false).setFollowRedirects(false).setDecompress(false).createDestInfo();
+ }
+
+}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/LogManagerTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/LogManagerTest.java
new file mode 100644
index 00000000..da690206
--- /dev/null
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/LogManagerTest.java
@@ -0,0 +1,107 @@
+/*
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Timer;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.reflect.FieldUtils;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.onap.dmaap.datarouter.node.LogManager.Uploader;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+@SuppressStaticInitializationFor({"org.onap.dmaap.datarouter.node.NodeConfigManager"})
+public class LogManagerTest {
+
+ @Mock
+ private NodeConfigManager config;
+
+ private LogManager logManager;
+
+ @Before
+ public void setUp() throws IllegalAccessException {
+ mockNodeConfigManager();
+ FieldUtils.writeDeclaredStaticField(StatusLog.class, "config", config, true);
+ logManager = new LogManager(config);
+ }
+
+ @AfterClass
+ public static void tearDown() throws IOException {
+ File spoolDir = new File(System.getProperty("user.dir") + "/src/test/resources/.spool");
+ FileUtils.deleteDirectory(spoolDir);
+ }
+
+ @Test
+ public void Verify_LogManager_Attempts_To_Deliver_Log_Files_To_Prov() {
+ logManager.run();
+ try {
+ Thread.sleep(1000);
+ } catch (Exception e) {
+ System.out.println("Exception caught: " + e.getMessage());
+ }
+ File file = new File(System.getProperty("user.dir") + "/src/test/resources/.spool/.lastqueued");
+ assertTrue(file.isFile());
+ }
+
+ @Test
+ public void Validate_Uploader_Getters() {
+ Uploader worker = logManager.getWorker();
+ assertEquals(10000L, worker.getInitFailureTimer());
+ assertEquals(600000L, worker.getWaitForFileProcessFailureTimer());
+ assertEquals(2.0, worker.getFailureBackoff(), 0.0);
+ assertEquals(150000L, worker.getMaxFailureTimer());
+ assertEquals(604800000L, worker.getExpirationTimer());
+ assertEquals(10000, worker.getFairFileLimit());
+ assertEquals(86400000, worker.getFairTimeLimit());
+ assertEquals("https://dmaap-dr-prov:8443/internal/logs",
+ worker.getDestURL(new DestInfoBuilder().createDestInfo(), "String"));
+ assertFalse(worker.handleRedirection(new DestInfoBuilder().createDestInfo(), "", ""));
+ assertFalse(worker.isFollowRedirects());
+ assertNull(worker.getFeedId(""));
+ }
+
+ private void mockNodeConfigManager() {
+ PowerMockito.when(config.getLogDir()).thenReturn(System.getProperty("user.dir") + "/src/test/resources");
+ PowerMockito.when(config.getTimer()).thenReturn(new Timer("Node Configuration Timer", true));
+ PowerMockito.when(config.getEventLogPrefix())
+ .thenReturn(System.getProperty("user.dir") + "/src/test/resources/events");
+ PowerMockito.when(config.getEventLogSuffix()).thenReturn(".log");
+ PowerMockito.when(config.getLogRetention()).thenReturn(94608000000L);
+ PowerMockito.when(config.getEventLogInterval()).thenReturn("30s");
+ PowerMockito.when(config.getPublishId()).thenReturn("123456789.dmaap-dr-node");
+ PowerMockito.when(config.getEventLogUrl()).thenReturn("https://dmaap-dr-prov:8443/internal/logs");
+ }
+
+}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java
index 5e357373..b03407bf 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeConfigTest.java
@@ -22,6 +22,9 @@
******************************************************************************/
package org.onap.dmaap.datarouter.node;
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
import org.json.JSONArray;
import org.json.JSONObject;
import org.junit.Assert;
@@ -31,23 +34,114 @@ import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.powermock.modules.junit4.PowerMockRunner;
-import java.io.IOException;
-import java.io.Reader;
-import java.io.StringReader;
-
@RunWith(PowerMockRunner.class)
-@SuppressStaticInitializationFor({"org.onap.dmaap.datarouter.node.ProvData",
- "org.onap.dmaap.datarouter.node.NodeUtils"})
+@SuppressStaticInitializationFor({"org.onap.dmaap.datarouter.node.ProvData"})
public class NodeConfigTest {
private static NodeConfig nodeConfig;
@BeforeClass
- public static void setUp() throws IOException{
+ public static void setUp() throws IOException {
ProvData provData = setUpProvData();
nodeConfig = new NodeConfig(provData, "Name", "spool/dir", 80, "Key");
}
+ private static ProvData setUpProvData() throws IOException {
+ JSONObject provData = new JSONObject();
+ createValidFeed(provData);
+ createValidSubscription(provData);
+ createValidParameters(provData);
+ createValidIngressValues(provData);
+ createValidEgressValues(provData);
+ createValidRoutingValues(provData);
+ Reader reader = new StringReader(provData.toString());
+ return new ProvData(reader);
+ }
+
+ private static void createValidFeed(JSONObject provData) {
+ JSONArray feeds = new JSONArray();
+ JSONObject feed = new JSONObject();
+ JSONObject auth = new JSONObject();
+ JSONArray endpointIds = new JSONArray();
+ JSONArray endpointAddrs = new JSONArray();
+ JSONObject endpointId = new JSONObject();
+ feed.put("feedid", "1");
+ feed.put("name", "Feed1");
+ feed.put("version", "m1.0");
+ feed.put("suspend", false);
+ feed.put("deleted", false);
+ endpointId.put("id", "user1");
+ endpointId.put("password", "password1");
+ endpointIds.put(endpointId);
+ auth.put("endpoint_ids", endpointIds);
+ endpointAddrs.put("172.0.0.1");
+ auth.put("endpoint_addrs", endpointAddrs);
+ feed.put("authorization", auth);
+ feed.put("aaf_instance", "legacy");
+ feeds.put(feed);
+ provData.put("feeds", feeds);
+ }
+
+ private static void createValidSubscription(JSONObject provData) {
+ JSONArray subscriptions = new JSONArray();
+ JSONObject subscription = new JSONObject();
+ JSONObject delivery = new JSONObject();
+ subscription.put("subid", "1");
+ subscription.put("feedid", "1");
+ subscription.put("suspend", false);
+ subscription.put("metadataOnly", false);
+ delivery.put("url", "https://172.0.0.2");
+ delivery.put("user", "user1");
+ delivery.put("password", "password1");
+ delivery.put("use100", true);
+ subscription.put("delivery", delivery);
+ subscription.put("privilegedSubscriber", false);
+ subscription.put("follow_redirect", false);
+ subscription.put("decompress", false);
+ subscriptions.put(subscription);
+ provData.put("subscriptions", subscriptions);
+ }
+
+ private static void createValidParameters(JSONObject provData) {
+ JSONObject parameters = new JSONObject();
+ JSONArray nodes = new JSONArray();
+ parameters.put("PROV_NAME", "prov.datarouternew.com");
+ parameters.put("DELIVERY_INIT_RETRY_INTERVAL", "10");
+ parameters.put("DELIVERY_MAX_AGE", "86400");
+ parameters.put("PROV_DOMAIN", "");
+ nodes.put("172.0.0.4");
+ parameters.put("NODES", nodes);
+ provData.put("parameters", parameters);
+ }
+
+ private static void createValidIngressValues(JSONObject provData) {
+ JSONArray ingresses = new JSONArray();
+ JSONObject ingress = new JSONObject();
+ ingress.put("feedid", "1");
+ ingress.put("subnet", "");
+ ingress.put("user", "");
+ ingress.put("node", "172.0.0.4");
+ ingresses.put(ingress);
+ provData.put("ingress", ingresses);
+ }
+
+ private static void createValidEgressValues(JSONObject provData) {
+ JSONObject egress = new JSONObject();
+ egress.put("subid", "1");
+ egress.put("nodeid", "172.0.0.4");
+ provData.put("egress", egress);
+ }
+
+ private static void createValidRoutingValues(JSONObject provData) {
+ JSONArray routings = new JSONArray();
+ JSONObject routing = new JSONObject();
+ routing.put("from", "prov.datarouternew.com");
+ routing.put("to", "172.0.0.4");
+ routing.put("via", "172.100.0.1");
+ routings.put(routing);
+ provData.put("routing", routings);
+ }
+
@Test
public void Given_Feed_Does_Not_Exist_Then_Is_Publish_Permitted_Returns_Not_Null() {
String permitted = nodeConfig.isPublishPermitted("2", "user", "0.0.0.0");
@@ -73,7 +167,7 @@ public class NodeConfigTest {
}
@Test
- public void Given_SubId_Then_Get_Feed_Id_Returns_Correct_Id(){
+ public void Given_SubId_Then_Get_Feed_Id_Returns_Correct_Id() {
String feedId = nodeConfig.getFeedId("1");
Assert.assertEquals("1", feedId);
}
@@ -164,100 +258,4 @@ public class NodeConfigTest {
String auth = nodeConfig.getMyAuth();
Assert.assertEquals("Basic TmFtZTp6Z04wMFkyS3gybFppbXltNy94ZDhuMkdEYjA9", auth);
}
-
- private static ProvData setUpProvData() throws IOException {
- JSONObject provData = new JSONObject();
- createValidFeed(provData);
- createValidSubscription(provData);
- createValidParameters(provData);
- createValidIngressValues(provData);
- createValidEgressValues(provData);
- createValidRoutingValues(provData);
- Reader reader = new StringReader(provData.toString());
- return new ProvData(reader);
- }
-
- private static void createValidFeed(JSONObject provData) {
- JSONArray feeds = new JSONArray();
- JSONObject feed = new JSONObject();
- JSONObject auth = new JSONObject();
- JSONArray endpointIds = new JSONArray();
- JSONArray endpointAddrs = new JSONArray();
- JSONObject endpointId = new JSONObject();
- feed.put("feedid", "1");
- feed.put("name", "Feed1");
- feed.put("version", "m1.0");
- feed.put("suspend", false);
- feed.put("deleted", false);
- endpointId.put("id", "user1");
- endpointId.put("password", "password1");
- endpointIds.put(endpointId);
- auth.put("endpoint_ids", endpointIds);
- endpointAddrs.put("172.0.0.1");
- auth.put("endpoint_addrs", endpointAddrs);
- feed.put("authorization", auth);
- feed.put("aaf_instance", "legacy");
- feeds.put(feed);
- provData.put("feeds", feeds);
- }
-
- private static void createValidSubscription(JSONObject provData) {
- JSONArray subscriptions = new JSONArray();
- JSONObject subscription = new JSONObject();
- JSONObject delivery = new JSONObject();
- subscription.put("subid", "1");
- subscription.put("feedid", "1");
- subscription.put("suspend", false);
- subscription.put("metadataOnly", false);
- delivery.put("url", "https://172.0.0.2");
- delivery.put("user", "user1");
- delivery.put("password", "password1");
- delivery.put("use100", true);
- subscription.put("delivery", delivery);
- subscription.put("privilegedSubscriber", false);
- subscription.put("follow_redirect", false);
- subscription.put("decompress", false);
- subscriptions.put(subscription);
- provData.put("subscriptions", subscriptions);
- }
-
- private static void createValidParameters(JSONObject provData) {
- JSONObject parameters = new JSONObject();
- JSONArray nodes = new JSONArray();
- parameters.put("PROV_NAME", "prov.datarouternew.com");
- parameters.put("DELIVERY_INIT_RETRY_INTERVAL", "10");
- parameters.put("DELIVERY_MAX_AGE", "86400");
- parameters.put("PROV_DOMAIN", "");
- nodes.put("172.0.0.4");
- parameters.put("NODES", nodes);
- provData.put("parameters", parameters);
- }
-
- private static void createValidIngressValues(JSONObject provData) {
- JSONArray ingresses = new JSONArray();
- JSONObject ingress = new JSONObject();
- ingress.put("feedid", "1");
- ingress.put("subnet", "");
- ingress.put("user", "");
- ingress.put("node", "172.0.0.4");
- ingresses.put(ingress);
- provData.put("ingress", ingresses);
- }
-
- private static void createValidEgressValues(JSONObject provData) {
- JSONObject egress = new JSONObject();
- egress.put("subid", "1");
- egress.put("nodeid", "172.0.0.4");
- provData.put("egress", egress);
- }
-
- private static void createValidRoutingValues(JSONObject provData) {
- JSONArray routings = new JSONArray();
- JSONObject routing = new JSONObject();
- routing.put("from", "prov.datarouternew.com");
- routing.put("to", "172.0.0.4");
- routing.put("via", "172.100.0.1");
- routings.put(routing);
- provData.put("routing", routings);
- }
}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java
index db71ceae..a375f026 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeServletTest.java
@@ -219,7 +219,7 @@ public class NodeServletTest {
@Test
public void Given_Request_Is_HTTP_PUT_On_Publish_On_AAF_Feed_And_Cadi_Enabled_And_No_Permissions_Then_Forbidden_Response_Is_Generated() throws Exception {
- when(config.getCadiEnabeld()).thenReturn(true);
+ when(config.getCadiEnabled()).thenReturn(true);
when(config.getAafInstance("1")).thenReturn("*");
when(request.getPathInfo()).thenReturn("/publish/1/fileName");
setHeadersForValidRequest(true);
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeUtilsTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeUtilsTest.java
index 27fcd1c4..2d87b8b9 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeUtilsTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/NodeUtilsTest.java
@@ -22,29 +22,19 @@
******************************************************************************/
package org.onap.dmaap.datarouter.node;
+import static org.mockito.Mockito.when;
+
+import javax.servlet.http.HttpServletRequest;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.MDC;
-import javax.servlet.http.HttpServletRequest;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.util.UUID;
-
-import static com.att.eelf.configuration.Configuration.MDC_SERVER_FQDN;
-import static com.att.eelf.configuration.Configuration.MDC_SERVER_IP_ADDRESS;
-import static org.mockito.Mockito.when;
-import static org.powermock.api.mockito.PowerMockito.mockStatic;
-
@RunWith(PowerMockRunner.class)
-@SuppressStaticInitializationFor("org.onap.dmaap.datarouter.node.NodeUtils")
-@PrepareForTest({ UUID.class, InetAddress.class })
+@PowerMockIgnore({"java.net.ssl", "javax.security.auth.x500.X500Principal"})
public class NodeUtilsTest {
@Mock
@@ -54,6 +44,7 @@ public class NodeUtilsTest {
public void Given_Uri_With_Params_Then_Get_Feed_And_File_Id_Returns_Correct_Values() {
String uri = "prov.datarouternew.com:8443/feed/12/fileName";
String[] uriParams = NodeUtils.getFeedAndFileID(uri);
+ assert uriParams != null;
Assert.assertEquals("12", uriParams[0]);
Assert.assertEquals("fileName", uriParams[1]);
}
@@ -87,23 +78,8 @@ public class NodeUtilsTest {
}
@Test
- public void Given_setIpAndFqdnForEelf_Called_Set_MDC_Values() throws IOException {
- mockStatic(InetAddress.class);
- when(InetAddress.getLocalHost().getHostName()).thenReturn("testHostName");
- when(InetAddress.getLocalHost().getHostAddress()).thenReturn("testHostAddress");
- NodeUtils.setIpAndFqdnForEelf("doGet");
- Assert.assertEquals("testHostName", MDC.get(MDC_SERVER_FQDN));
- Assert.assertEquals("testHostAddress", MDC.get(MDC_SERVER_IP_ADDRESS));
- }
-
- @Test
- public void Given_Request_Has_Empty_RequestId_And_InvocationId_Headers_Generate_MDC_Values() {
- when(request.getHeader("X-ONAP-RequestID")).thenReturn("");
- when(request.getHeader("X-InvocationID")).thenReturn("");
- mockStatic(UUID.class);
- when(UUID.randomUUID().toString()).thenReturn("123", "456");
- NodeUtils.setRequestIdAndInvocationId(request);
- Assert.assertEquals("123", MDC.get("RequestId"));
- Assert.assertEquals("456", MDC.get("InvocationId"));
+ public void Given_Get_CanonicalName_Called_Valid_CN_Returned() {
+ String canonicalName = NodeUtils.getCanonicalName("jks", "src/test/resources/org.onap.dmaap-dr-test-cert.jks", "WGxd2P6MDo*Bi4+UdzWs{?$8");
+ Assert.assertEquals("dmaap-dr-node", canonicalName);
}
}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/PathFinderTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/PathFinderTest.java
new file mode 100644
index 00000000..25edd0c0
--- /dev/null
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/PathFinderTest.java
@@ -0,0 +1,75 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node;
+
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertThat;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+public class PathFinderTest {
+
+ @Test
+ public void Given_Unknown_From_Node_Returns_Null() {
+ new PathFinder("dr-node-1", new String[]{"dr-node-1", "dr-node-2", "dr-node-3"},
+ new NodeConfig.ProvHop[]{new NodeConfig.ProvHop("dr-node-4", "dr-node-3", "dr-node-2")});
+ }
+
+ @Test
+ public void Given_Unknown_Destination_Node_Returns_Null() {
+ new PathFinder("dr-node-1", new String[]{"dr-node-1", "dr-node-2", "dr-node-3"},
+ new NodeConfig.ProvHop[]{new NodeConfig.ProvHop("dr-node-1", "dr-node-5", "dr-node-2")});
+ }
+
+ @Test
+ public void Given_Duplicate_Next_Hop_Returns_Null() {
+ PathFinder p = new PathFinder("dr-node-1", new String[]{"dr-node-1", "dr-node-2", "dr-node-3"},
+ new NodeConfig.ProvHop[]{new NodeConfig.ProvHop("dr-node-1", "dr-node-3", "dr-node-2"),
+ new NodeConfig.ProvHop("dr-node-1", "dr-node-3", "dr-node-2")});
+ assertThat(p.getErrors().length, is(1));
+ assertNotNull(p.getPath("dr-node-3"));
+ assertThat(p.getPath("dr-node-5").length(), is(0));
+ }
+
+ @Test
+ public void Given_Unknown_Via_Node_Returns_Null() {
+ new PathFinder("dr-node-1", new String[]{"dr-node-1", "dr-node-2", "dr-node-3"},
+ new NodeConfig.ProvHop[]{new NodeConfig.ProvHop("dr-node-1", "dr-node-3", "dr-node-4")});
+ }
+
+ @Test
+ public void Given_Dest_Equals_Via_Bad_Hop_Defined() {
+ new PathFinder("dr-node-1", new String[]{"dr-node-1", "dr-node-2", "dr-node-3"},
+ new NodeConfig.ProvHop[]{new NodeConfig.ProvHop("dr-node-1", "dr-node-2", "dr-node-2")});
+ }
+
+ @Test
+ public void Given_Valid_Path_Defined_Success() {
+ new PathFinder("dr-node-1", new String[]{"dr-node-1", "dr-node-2", "dr-node-3"},
+ new NodeConfig.ProvHop[]{new NodeConfig.ProvHop("dr-node-1", "dr-node-3+", "dr-node-2")});
+ }
+
+
+}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/ProvDataTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/ProvDataTest.java
index 662f2cc0..1fd79d9a 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/ProvDataTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/ProvDataTest.java
@@ -20,17 +20,16 @@
package org.onap.dmaap.datarouter.node;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.modules.junit4.PowerMockRunner;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNull;
import java.io.ByteArrayInputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
-
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertNull;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
public class ProvDataTest {
@@ -40,77 +39,77 @@ public class ProvDataTest {
public void Validate_Values_Are_Set_Correctly_Through_ProvData_Constuctor() throws Exception {
String InternalProvData =
"{" +
- "\"ingress\":[{" +
- "\"feedid\":1," +
- "\"subnet\":\"\"," +
- "\"user\":\"\"," +
- "\"node\":\"node\"" +
- "}]," +
- "\"routing\":[{" +
- "\"from\":\"172.10.10.10\"," +
- "\"to\":\"172.10.10.12\"," +
- "\"via\":\"172.10.10.11\"" +
- "}]," +
- "\"subscriptions\":[{" +
- "\"subid\":1," +
- "\"suspend\":false," +
- "\"delivery\":{" +
- "\"use100\":true," +
- "\"password\":\"PASSWORD\"," +
- "\"user\":\"LOGIN\"," +
- "\"url\":\"http://172.18.0.2:7070\"" +
- "}," +
- "\"last_mod\":1553608460000," +
- "\"subscriber\":\"PMMAPER\"," +
- "\"feedid\":1," +
- "\"decompress\":false," +
- "\"groupid\":1," +
- "\"metadataOnly\":false," +
- "\"follow_redirect\":false," +
- "\"links\":{" +
- "\"feed\":\"https://dmaap-dr-prov/feed/1\"" +
- ",\"log\":\"https://dmaap-dr-prov/sublog/1\"" +
- ",\"self\":\"https://dmaap-dr-prov/subs/1\"" +
- "}," +
- "\"created_date\":1553608460000," +
- "\"privilegedSubscriber\":false" +
- "}]," +
- "\"feeds\":[{" +
- "\"suspend\":false," +
- "\"groupid\":0," +
- "\"description\":\"Default feed\"," +
- "\"version\":\"m1.0\"," +
- "\"authorization\":{" +
- "\"endpoint_addrs\":[\"172.10.10.20\"]," +
- "\"classification\":\"unclassified\"," +
- "\"endpoint_ids\":[{" +
- "\"password\":\"password\"," +
- "\"id\":\"user\"" +
- "}]" +
- "}," +
- "\"last_mod\":1553608454000," +
- "\"deleted\":false," +
- "\"feedid\":1," +
- "\"name\":\"CSIT_Test2\"" +
- ",\"business_description\":\"Default Feed\"" +
- ",\"publisher\":\"dradmin\"" +
- ",\"links\":{" +
- "\"subscribe\":\"https://dmaap-dr-prov/subscribe/1\"," +
- "\"log\":\"https://dmaap-dr-prov/feedlog/1\"," +
- "\"publish\":\"https://dmaap-dr-prov/publish/1\"," +
- "\"self\":\"https://dmaap-dr-prov/feed/1\"" +
- "}," +
- "\"created_date\":1553608454000" +
- "}]," +
- "\"groups\":[]," +
- "\"parameters\":{" +
- "\"NODES\":[\"dmaap-dr-node\"]," +
- "\"PROV_DOMAIN\":\"\"" +
- "}," +
- "\"egress\":{" +
- "\"1\":1" +
- "}" +
- "}" ;
+ "\"ingress\":[{" +
+ "\"feedid\":1," +
+ "\"subnet\":\"\"," +
+ "\"user\":\"\"," +
+ "\"node\":\"node\"" +
+ "}]," +
+ "\"routing\":[{" +
+ "\"from\":\"172.10.10.10\"," +
+ "\"to\":\"172.10.10.12\"," +
+ "\"via\":\"172.10.10.11\"" +
+ "}]," +
+ "\"subscriptions\":[{" +
+ "\"subid\":1," +
+ "\"suspend\":false," +
+ "\"delivery\":{" +
+ "\"use100\":true," +
+ "\"password\":\"PASSWORD\"," +
+ "\"user\":\"LOGIN\"," +
+ "\"url\":\"http://172.18.0.2:7070\"" +
+ "}," +
+ "\"last_mod\":1553608460000," +
+ "\"subscriber\":\"PMMAPER\"," +
+ "\"feedid\":1," +
+ "\"decompress\":false," +
+ "\"groupid\":1," +
+ "\"metadataOnly\":false," +
+ "\"follow_redirect\":false," +
+ "\"links\":{" +
+ "\"feed\":\"https://dmaap-dr-prov/feed/1\"" +
+ ",\"log\":\"https://dmaap-dr-prov/sublog/1\"" +
+ ",\"self\":\"https://dmaap-dr-prov/subs/1\"" +
+ "}," +
+ "\"created_date\":1553608460000," +
+ "\"privilegedSubscriber\":false" +
+ "}]," +
+ "\"feeds\":[{" +
+ "\"suspend\":false," +
+ "\"groupid\":0," +
+ "\"description\":\"Default feed\"," +
+ "\"version\":\"m1.0\"," +
+ "\"authorization\":{" +
+ "\"endpoint_addrs\":[\"172.10.10.20\"]," +
+ "\"classification\":\"unclassified\"," +
+ "\"endpoint_ids\":[{" +
+ "\"password\":\"password\"," +
+ "\"id\":\"user\"" +
+ "}]" +
+ "}," +
+ "\"last_mod\":1553608454000," +
+ "\"deleted\":false," +
+ "\"feedid\":1," +
+ "\"name\":\"CSIT_Test2\"" +
+ ",\"business_description\":\"Default Feed\"" +
+ ",\"publisher\":\"dradmin\"" +
+ ",\"links\":{" +
+ "\"subscribe\":\"https://dmaap-dr-prov/subscribe/1\"," +
+ "\"log\":\"https://dmaap-dr-prov/feedlog/1\"," +
+ "\"publish\":\"https://dmaap-dr-prov/publish/1\"," +
+ "\"self\":\"https://dmaap-dr-prov/feed/1\"" +
+ "}," +
+ "\"created_date\":1553608454000" +
+ "}]," +
+ "\"groups\":[]," +
+ "\"parameters\":{" +
+ "\"NODES\":[\"dmaap-dr-node\"]," +
+ "\"PROV_DOMAIN\":\"\"" +
+ "}," +
+ "\"egress\":{" +
+ "\"1\":1" +
+ "}" +
+ "}";
Reader r = new InputStreamReader(new ByteArrayInputStream(InternalProvData.getBytes(StandardCharsets.UTF_8)));
ProvData pd = new ProvData(r);
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/RedirManagerTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/RedirManagerTest.java
new file mode 100644
index 00000000..2c8a0e52
--- /dev/null
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/RedirManagerTest.java
@@ -0,0 +1,73 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node;
+
+import static org.junit.Assert.assertThat;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
+import java.util.Timer;
+import org.hamcrest.core.Is;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+public class RedirManagerTest {
+
+ private RedirManager redirManager;
+ private String redirFilePath = System.getProperty("user.dir") + "/src/test/resources/redir_file";
+
+ @Before
+ public void setUp() {
+ Timer timer = new Timer("Node Configuration Timer", true);
+ redirManager = new RedirManager(redirFilePath, 10000L, timer);
+ }
+
+ @Test
+ public void Given_Lookup_On_Valid_Redirect_Returns_Target_URL() {
+ assertThat(redirManager.lookup("1", "http://destination:8443/path/to"), Is.is("http://redirect:8443/path/to"));
+ }
+
+ @Test
+ public void Given_IsRedirected_Called_On_Valid_Sub_Id_Then_Returns_True() {
+ assertThat(redirManager.isRedirected("1"), Is.is(true));
+ }
+
+ @Test
+ public void Given_Redirect_Called_On_Valid_Redirect_New_Redirect_Added() throws IOException {
+ long origFileLenght = new File(redirFilePath).length();
+ redirManager.redirect("3", "http://destination3:8443/path/to", "http://redirect3:8443/path/to");
+ assertThat(redirManager.lookup("3", "http://destination3:8443/path/to"), Is.is("http://redirect3:8443/path/to"));
+ new RandomAccessFile(redirFilePath, "rw").setLength(origFileLenght);
+ }
+
+ @Test
+ public void Given_Lookup_On_Invalid_Redirect_Returns_Primary_Target_URL_And_Is_Forgotten() throws IOException {
+ assertThat(redirManager.lookup("2", "http://invalid:8443/path/to"), Is.is("http://invalid:8443/path/to"));
+ Files.write(Paths.get(redirFilePath), "2 http://destination2:8443/path/to http://redirect2:8443/path/to".getBytes(), StandardOpenOption.APPEND);
+ }
+}
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/StatusLogTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/StatusLogTest.java
index e60f576c..d3793069 100644
--- a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/StatusLogTest.java
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/StatusLogTest.java
@@ -22,6 +22,9 @@
******************************************************************************/
package org.onap.dmaap.datarouter.node;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@@ -31,9 +34,6 @@ import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.powermock.modules.junit4.PowerMockRunner;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
@RunWith(PowerMockRunner.class)
@SuppressStaticInitializationFor("org.onap.dmaap.datarouter.node.NodeConfigManager")
@PrepareForTest(StatusLog.class)
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/TaskListTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/TaskListTest.java
new file mode 100644
index 00000000..311165c6
--- /dev/null
+++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/TaskListTest.java
@@ -0,0 +1,44 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node;
+
+import org.junit.Test;
+
+public class TaskListTest {
+
+ @Test
+ public void Given_New_Task_List_Verify_Add_And_Run() {
+ TaskList taskList = new TaskList();
+ taskList.startRun();
+ taskList.addTask(() -> {
+ });
+ taskList.next();
+ taskList.removeTask(() -> {
+ });
+ }
+
+ @Test
+ public void Given_Empty_Task_List_Verify_Next() {
+ TaskList taskList = new TaskList();
+ taskList.startRun();
+ taskList.next();
+ }
+}
diff --git a/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node b/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node
new file mode 100644
index 00000000..1677aafe
--- /dev/null
+++ b/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node
@@ -0,0 +1 @@
+Hello World!!!!!! \ No newline at end of file
diff --git a/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.M b/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.M
new file mode 100644
index 00000000..b317b671
--- /dev/null
+++ b/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.M
@@ -0,0 +1,8 @@
+PUT A20190626.1106+0000-1120+0000_excl-eeiwbue-perf-large3-pnf-sim-lw-17.xml.gz
+X-RequestID A20190626.1106+0000-1120+0000_excl-eeiwbue-perf-large3-pnf-sim-lw-17.xml.gz
+X-InvocationID ea407097-2de5-452e-9f21-569e6d867deb
+X-TransactionID A20190626.1106+0000-1120+0000_excl-eeiwbue-perf-large3-pnf-sim-lw-17.xml.gz
+X-ONAP-RequestID A20190626.1106+0000-1120+0000_excl-eeiwbue-perf-large3-pnf-sim-lw-17.xml.gz
+X-DMAAP-DR-META {"productName":"RnNode","vendorName":"Ericsson","lastEpochMicrosec":"1561550151179","sourceName":"","startEpochMicrosec":"1561550151179","timeZoneOffset":"UTC+00:00","location":"ftps://onap:pano@10.209.63.42:2036/A20190626.1106+0000-1120+0000_excl-eeiwbue-perf-large3-pnf-sim-lw-17.xml.gz","compression":"gzip","fileFormatType":"org.3GPP.32.435#measCollec","fileFormatVersion":"V10"}
+Content-Type application/octet-stream
+X-DMAAP-DR-RECEIVED 2019-06-26T11:56:38.216Z;from=10.42.54.206;by=10.42.70.78 \ No newline at end of file
diff --git a/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.gz b/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.gz
new file mode 100644
index 00000000..d38bae1f
--- /dev/null
+++ b/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.gz
Binary files differ
diff --git a/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.gz.M b/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.gz.M
new file mode 100644
index 00000000..b317b671
--- /dev/null
+++ b/datarouter-node/src/test/resources/delivery_files/123456789.test-dr-node.gz.M
@@ -0,0 +1,8 @@
+PUT A20190626.1106+0000-1120+0000_excl-eeiwbue-perf-large3-pnf-sim-lw-17.xml.gz
+X-RequestID A20190626.1106+0000-1120+0000_excl-eeiwbue-perf-large3-pnf-sim-lw-17.xml.gz
+X-InvocationID ea407097-2de5-452e-9f21-569e6d867deb
+X-TransactionID A20190626.1106+0000-1120+0000_excl-eeiwbue-perf-large3-pnf-sim-lw-17.xml.gz
+X-ONAP-RequestID A20190626.1106+0000-1120+0000_excl-eeiwbue-perf-large3-pnf-sim-lw-17.xml.gz
+X-DMAAP-DR-META {"productName":"RnNode","vendorName":"Ericsson","lastEpochMicrosec":"1561550151179","sourceName":"","startEpochMicrosec":"1561550151179","timeZoneOffset":"UTC+00:00","location":"ftps://onap:pano@10.209.63.42:2036/A20190626.1106+0000-1120+0000_excl-eeiwbue-perf-large3-pnf-sim-lw-17.xml.gz","compression":"gzip","fileFormatType":"org.3GPP.32.435#measCollec","fileFormatVersion":"V10"}
+Content-Type application/octet-stream
+X-DMAAP-DR-RECEIVED 2019-06-26T11:56:38.216Z;from=10.42.54.206;by=10.42.70.78 \ No newline at end of file
diff --git a/datarouter-node/src/test/resources/org.onap.dmaap-dr-test-cert.jks b/datarouter-node/src/test/resources/org.onap.dmaap-dr-test-cert.jks
new file mode 100644
index 00000000..2320dc9f
--- /dev/null
+++ b/datarouter-node/src/test/resources/org.onap.dmaap-dr-test-cert.jks
Binary files differ
diff --git a/datarouter-node/src/test/resources/redir_file b/datarouter-node/src/test/resources/redir_file
new file mode 100644
index 00000000..0c72ebe9
--- /dev/null
+++ b/datarouter-node/src/test/resources/redir_file
@@ -0,0 +1,2 @@
+1 http://destination:8443/path/to http://redirect:8443/path/to
+2 http://destination2:8443/path/to http://redirect2:8443/path/to \ No newline at end of file
diff --git a/datarouter-prov/pom.xml b/datarouter-prov/pom.xml
index 56a4b139..cbcfc71f 100755
--- a/datarouter-prov/pom.xml
+++ b/datarouter-prov/pom.xml
@@ -36,7 +36,8 @@
<sitePath>/content/sites/site/${project.groupId}/${project.artifactId}/${project.version}</sitePath>
<docker.location>${basedir}/target/${project.artifactId}</docker.location>
<datarouter.prov.image.name>${docker.image.root}${project.artifactId}</datarouter.prov.image.name>
- <sonar.exclusions>**/src/main/java/org/onap/dmaap/datarouter/reports/**</sonar.exclusions>
+ <sonar.exclusions>**/src/main/java/org/onap/dmaap/datarouter/reports/**,
+ **/src/main/java/org/onap/dmaap/authz/impl/AuthRespSupplementImpl.java</sonar.exclusions>
<sonar.language>java</sonar.language>
<sonar.skip>false</sonar.skip>
</properties>
@@ -430,15 +431,12 @@
<artifactId>cobertura-maven-plugin</artifactId>
</plugin>
<plugin>
- <groupId>org.sonatype.plugins</groupId>
- <artifactId>nexus-staging-maven-plugin</artifactId>
- </plugin>
- <plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/src/main/java/org/onap/dmaap/datarouter/reports/**</exclude>
+ <exclude>**/src/main/java/org/onap/dmaap/authz/impl/AuthRespSupplementImpl.java</exclude>
</excludes>
</configuration>
</plugin>
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespImpl.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespImpl.java
index f3278332..c7d71996 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespImpl.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespImpl.java
@@ -44,21 +44,23 @@ public class AuthRespImpl implements AuthorizationResponse {
/** Constructor. This version will not be used in Data Router R1 since we will not have advice and obligations.
*
* @param authorized flag indicating whether the response carried a permit response (<code>true</code>)
- * or something else (<code>false</code>).
+ * or something else (<code>false</code>).
* @param advice list of advice elements returned in the response.
* @param obligations list of obligation elements returned in the response.
*/
- public AuthRespImpl(boolean authorized, List<AuthorizationResponseSupplement> advice, List<AuthorizationResponseSupplement> obligations) {
+ private AuthRespImpl(boolean authorized, List<AuthorizationResponseSupplement> advice,
+ List<AuthorizationResponseSupplement> obligations) {
this.authorized = authorized;
- this.advice = (advice == null ? null : new ArrayList<AuthorizationResponseSupplement> (advice));
- this.obligations = (obligations == null ? null : new ArrayList<AuthorizationResponseSupplement> (obligations));
+ this.advice = (advice == null ? null : new ArrayList<>(advice));
+ this.obligations = (obligations == null ? null : new ArrayList<>(obligations));
}
/** Constructor. Simple version for authorization responses that have no advice and no obligations.
*
- * @param authorized flag indicating whether the response carried a permit (<code>true</code>) or something else (<code>false</code>).
+ * @param authorized flag indicating whether the response carried a permit (<code>true</code>)
+ * or something else (<code>false</code>).
*/
- public AuthRespImpl(boolean authorized) {
+ AuthRespImpl(boolean authorized) {
this(authorized, null, null);
}
@@ -69,25 +71,25 @@ public class AuthRespImpl implements AuthorizationResponse {
*/
@Override
public boolean isAuthorized() {
- return authorized;
+ return authorized;
}
/**
* Returns any advice elements that were included in the authorization response.
*
- * @return A list of objects implementing the <code>AuthorizationResponseSupplement</code> interface, with each object representing an
- * advice element from the authorization response.
+ * @return A list of objects implementing the <code>AuthorizationResponseSupplement</code> interface,
+ * with each object representing an advice element from the authorization response.
*/
@Override
public List<AuthorizationResponseSupplement> getAdvice() {
- return advice;
+ return advice;
}
/**
* Returns any obligation elements that were included in the authorization response.
*
- * @return A list of objects implementing the <code>AuthorizationResponseSupplement</code> interface, with each object representing an
- * obligation element from the authorization response.
+ * @return A list of objects implementing the <code>AuthorizationResponseSupplement</code> interface,
+ * with each object representing an obligation element from the authorization response.
*/
@Override
public List<AuthorizationResponseSupplement> getObligations() {
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespSupplementImpl.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespSupplementImpl.java
index d995270e..b61c00e5 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespSupplementImpl.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespSupplementImpl.java
@@ -36,17 +36,17 @@ import org.onap.dmaap.datarouter.authz.AuthorizationResponseSupplement;
*/
public class AuthRespSupplementImpl implements AuthorizationResponseSupplement {
- private String id = null;
- private Map<String, String> attributes = null;
+ private String id;
+ private Map<String, String> attributes;
/** Constructor, available within the package.
*
* @param id The identifier for the advice or obligation element
* @param attributes The attributes (name-value pairs) for the advice or obligation element.
*/
- AuthRespSupplementImpl (String id, Map<String, String> attributes) {
+ AuthRespSupplementImpl(String id, Map<String, String> attributes) {
this.id = id;
- this.attributes = new HashMap<String,String>(attributes);
+ this.attributes = new HashMap<>(attributes);
}
/** Return the identifier for the supplementary information element.
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthzResource.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthzResource.java
index 0357fa74..c248468f 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthzResource.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthzResource.java
@@ -30,7 +30,6 @@ import java.util.regex.Pattern;
/** Internal representation of an authorization resource (the entity to which access is being requested). Consists
* of a type and an identifier. The constructor takes the request URI from an HTTP request and checks it against
* patterns for the the different resource types. In DR R1, there are four resource types:
- * <ul>
* <li>the feeds collection resource, the target of POST requests to create a new feed and GET requests to list
* the existing feeds. This is the root resource for the DR provisioning system, and it has no explicit id.
* </li>
@@ -53,10 +52,10 @@ public class AuthzResource {
private String id = "";
/* Construct an AuthzResource by matching a request URI against the various patterns */
- public AuthzResource(String rURI) {
- if (rURI != null) {
+ AuthzResource(String requestUri) {
+ if (requestUri != null) {
for (ResourceType t : ResourceType.values()) {
- Matcher m = t.getPattern().matcher(rURI);
+ Matcher m = t.getPattern().matcher(requestUri);
if (m.find(0)) {
this.type = t;
if (m.group("id") != null) {
@@ -83,13 +82,13 @@ public class AuthzResource {
*/
public enum ResourceType {
FEEDS_COLLECTION("((://[^/]+/)|(^/))(?<id>)$"),
- SUBS_COLLECTION ("((://[^/]+/)|(^/{0,1}))subscribe/(?<id>[^/]+)$"),
+ SUBS_COLLECTION("((://[^/]+/)|(^/{0,1}))subscribe/(?<id>[^/]+)$"),
FEED("((://[^/]+/)|(^/{0,1}))feed/(?<id>[^/]+)$"),
SUB("((://[^/]+/)|(^/{0,1}))subs/(?<id>[^/]+)$");
private Pattern uriPattern;
- private ResourceType(String patternString) {
+ ResourceType(String patternString) {
this.uriPattern = Pattern.compile(patternString);
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthorizer.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthorizer.java
index 745e339d..595b626c 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthorizer.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthorizer.java
@@ -23,17 +23,15 @@
package org.onap.dmaap.datarouter.authz.impl;
-import java.util.Map;
-
-import javax.servlet.http.HttpServletRequest;
-
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
+import java.util.Map;
+import javax.servlet.http.HttpServletRequest;
import org.onap.dmaap.datarouter.authz.AuthorizationResponse;
import org.onap.dmaap.datarouter.authz.Authorizer;
import org.onap.dmaap.datarouter.authz.impl.AuthzResource.ResourceType;
-/** Authorizer for the provisioning API for Data Router R1
+/** Authorizer for the provisioning API for Data Router R1.
*
* @author J. F. Lucas
*
@@ -45,6 +43,7 @@ public class ProvAuthorizer implements Authorizer {
private static final String SUBJECT_HEADER = "X-DMAAP-DR-ON-BEHALF-OF"; // HTTP header carrying requester identity
private static final String SUBJECT_HEADER_GROUP = "X-DMAAP-DR-ON-BEHALF-OF-GROUP"; // HTTP header carrying requester identity by group Rally : US708115
+
/** Constructor. For the moment, do nothing special. Make it a singleton?
*
*/
@@ -63,7 +62,7 @@ public class ProvAuthorizer implements Authorizer {
*/
@Override
public AuthorizationResponse decide(HttpServletRequest request) {
- return this.decide(request, null);
+ return this.decide(request, null);
}
/**
@@ -79,80 +78,66 @@ public class ProvAuthorizer implements Authorizer {
@Override
public AuthorizationResponse decide(HttpServletRequest request,
Map<String, String> additionalAttrs) {
- log.trace ("Entering decide()");
-
+ log.trace("Entering decide()");
boolean decision = false;
-
// Extract interesting parts of the HTTP request
String method = request.getMethod();
AuthzResource resource = new AuthzResource(request.getRequestURI());
- String subject = (request.getHeader(SUBJECT_HEADER)); // identity of the requester
- String subjectgroup = (request.getHeader(SUBJECT_HEADER_GROUP)); // identity of the requester by group Rally : US708115
-
- log.trace("Method: " + method + " -- Type: " + resource.getType() + " -- Id: " + resource.getId() +
- " -- Subject: " + subject);
+ String subject = (request.getHeader(SUBJECT_HEADER));
+ String subjectgroup = (request.getHeader(SUBJECT_HEADER_GROUP));
+ log.trace("Method: " + method + " -- Type: " + resource.getType() + " -- Id: " + resource.getId()
+ + " -- Subject: " + subject);
// Choose authorization method based on the resource type
ResourceType resourceType = resource.getType();
if (resourceType != null) {
-
switch (resourceType) {
-
- case FEEDS_COLLECTION:
- decision = allowFeedsCollectionAccess(resource, method, subject, subjectgroup);
- break;
-
- case SUBS_COLLECTION:
- decision = allowSubsCollectionAccess(resource, method, subject, subjectgroup);
- break;
-
- case FEED:
- decision = allowFeedAccess(resource, method, subject, subjectgroup);
- break;
-
- case SUB:
- decision = allowSubAccess(resource, method, subject, subjectgroup);
- break;
-
- default:
- decision = false;
- break;
+ case FEEDS_COLLECTION:
+ decision = allowFeedsCollectionAccess(method);
+ break;
+ case SUBS_COLLECTION:
+ decision = allowSubsCollectionAccess(method);
+ break;
+ case FEED:
+ decision = allowFeedAccess(resource, method, subject, subjectgroup);
+ break;
+ case SUB:
+ decision = allowSubAccess(resource, method, subject, subjectgroup);
+ break;
+ default:
+ decision = false;
+ break;
}
}
- log.debug("Exit decide(): " + method + "|" + resourceType + "|" + resource.getId() + "|" + subject + " ==> " + decision);
+ log.debug("Exit decide(): " + method + "|" + resourceType + "|" + resource.getId() + "|"
+ + subject + " ==> " + decision);
return new AuthRespImpl(decision);
}
- private boolean allowFeedsCollectionAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
-
+ private boolean allowFeedsCollectionAccess(String method) {
// Allow GET or POST unconditionally
return method != null && ("GET".equalsIgnoreCase(method) || "POST".equalsIgnoreCase(method));
}
- private boolean allowSubsCollectionAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
-
+ private boolean allowSubsCollectionAccess(String method) {
// Allow GET or POST unconditionally
return method != null && ("GET".equalsIgnoreCase(method) || "POST".equalsIgnoreCase(method));
}
- private boolean allowFeedAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
+ private boolean allowFeedAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
boolean decision = false;
-
// Allow GET, PUT, or DELETE if requester (subject) is the owner (publisher) of the feed
- if ( method != null && ("GET".equalsIgnoreCase(method) || "PUT".equalsIgnoreCase(method) ||
- "DELETE".equalsIgnoreCase(method))) {
+ if ( method != null && ("GET".equalsIgnoreCase(method) || "PUT".equalsIgnoreCase(method) || "DELETE".equalsIgnoreCase(method))) {
String owner = provData.getFeedOwner(resource.getId());
decision = (owner != null) && owner.equals(subject);
-
//Verifying by group Rally : US708115
- if(subjectgroup != null) {
- String feedowner = provData.getGroupByFeedGroupId(subject, resource.getId());
- decision = (feedowner != null) && feedowner.equals(subjectgroup);
+ if (subjectgroup != null) {
+ String feedOwner = provData.getGroupByFeedGroupId(subject, resource.getId());
+ decision = (feedOwner != null) && feedOwner.equals(subjectgroup);
}
}
-
return decision;
}
@@ -160,14 +145,13 @@ public class ProvAuthorizer implements Authorizer {
boolean decision = false;
// Allow GET, PUT, or DELETE if requester (subject) is the owner of the subscription (subscriber)
- if (method != null && ("GET".equalsIgnoreCase(method) || "PUT".equalsIgnoreCase(method) ||
- "DELETE".equalsIgnoreCase(method) || "POST".equalsIgnoreCase(method))) {
+ if (method != null && ("GET".equalsIgnoreCase(method) || "PUT".equalsIgnoreCase(method) || "DELETE".equalsIgnoreCase(method) || "POST".equalsIgnoreCase(method))) {
String owner = provData.getSubscriptionOwner(resource.getId());
decision = (owner != null) && owner.equals(subject);
//Verifying by group Rally : US708115
- if(subjectgroup != null) {
+ if (subjectgroup != null) {
String feedowner = provData.getGroupBySubGroupId(subject, resource.getId());
decision = (feedowner != null) && feedowner.equals(subjectgroup);
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java
index deb78b16..c6b1cde7 100755
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java
@@ -24,30 +24,33 @@
package org.onap.dmaap.datarouter.provisioning;
+import static com.att.eelf.configuration.Configuration.MDC_KEY_REQUEST_ID;
import static com.att.eelf.configuration.Configuration.MDC_SERVER_FQDN;
-
import static com.att.eelf.configuration.Configuration.MDC_SERVER_IP_ADDRESS;
import static com.att.eelf.configuration.Configuration.MDC_SERVICE_NAME;
-import static com.att.eelf.configuration.Configuration.MDC_KEY_REQUEST_ID;
-
-
-import java.io.IOException;
-import java.io.InputStream;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import java.net.InetAddress;
import java.net.UnknownHostException;
+import java.security.GeneralSecurityException;
import java.security.cert.X509Certificate;
import java.sql.Connection;
import java.sql.SQLException;
-
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.UUID;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
import org.apache.commons.lang3.StringUtils;
+import org.jetbrains.annotations.Nullable;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
@@ -55,21 +58,19 @@ import org.json.JSONTokener;
import org.onap.dmaap.datarouter.authz.Authorizer;
import org.onap.dmaap.datarouter.authz.impl.ProvAuthorizer;
import org.onap.dmaap.datarouter.authz.impl.ProvDataProvider;
-import org.onap.dmaap.datarouter.provisioning.beans.*;
+import org.onap.dmaap.datarouter.provisioning.beans.Deleteable;
+import org.onap.dmaap.datarouter.provisioning.beans.Feed;
+import org.onap.dmaap.datarouter.provisioning.beans.Group;
+import org.onap.dmaap.datarouter.provisioning.beans.Insertable;
+import org.onap.dmaap.datarouter.provisioning.beans.NodeClass;
+import org.onap.dmaap.datarouter.provisioning.beans.Parameters;
+import org.onap.dmaap.datarouter.provisioning.beans.Subscription;
+import org.onap.dmaap.datarouter.provisioning.beans.Updateable;
import org.onap.dmaap.datarouter.provisioning.utils.DB;
import org.onap.dmaap.datarouter.provisioning.utils.PasswordProcessor;
import org.onap.dmaap.datarouter.provisioning.utils.ThrottleFilter;
import org.slf4j.MDC;
-import javax.mail.*;
-import javax.mail.internet.InternetAddress;
-import javax.mail.internet.MimeBodyPart;
-import javax.mail.internet.MimeMessage;
-import javax.mail.internet.MimeMultipart;
-import java.security.GeneralSecurityException;
-import java.util.*;
-import java.util.regex.Pattern;
-
/**
* This is the base class for all Servlets in the provisioning code. It provides standard constants and some common
@@ -94,10 +95,10 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
static final String CREATE_PERMISSION = "create";
static final String EDIT_PERMISSION = "edit";
static final String DELETE_PERMISSION = "delete";
- static final String PUBLISH_PERMISSION = "publish";
- static final String SUSPEND_PERMISSION = "suspend";
- static final String RESTORE_PERMISSION = "restore";
- static final String SUBSCRIBE_PERMISSION = "subscribe";
+ private static final String PUBLISH_PERMISSION = "publish";
+ private static final String SUSPEND_PERMISSION = "suspend";
+ private static final String RESTORE_PERMISSION = "restore";
+ private static final String SUBSCRIBE_PERMISSION = "subscribe";
static final String APPROVE_SUB_PERMISSION = "approveSub";
static final String FEED_BASECONTENT_TYPE = "application/vnd.dmaap-dr.feed";
@@ -113,7 +114,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
//Adding groups functionality, ...1610
static final String GROUP_BASECONTENT_TYPE = "application/vnd.dmaap-dr.group";
static final String GROUP_CONTENT_TYPE = "application/vnd.dmaap-dr.group; version=2.0";
- public static final String GROUPFULL_CONTENT_TYPE = "application/vnd.dmaap-dr.group-full; version=2.0";
+ static final String GROUPFULL_CONTENT_TYPE = "application/vnd.dmaap-dr.group-full; version=2.0";
public static final String GROUPLIST_CONTENT_TYPE = "application/vnd.dmaap-dr.fegrouped-list; version=1.0";
@@ -130,101 +131,123 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
private static final int DEFAULT_POKETIMER2 = 30;
private static final String DEFAULT_DOMAIN = "onap";
private static final String DEFAULT_PROVSRVR_NAME = "dmaap-dr-prov";
- private static final String STATIC_ROUTING_NODES = ""; //Adding new param for static Routing - Rally:US664862-1610
+
+ //Common Errors
+ static final String MISSING_ON_BEHALF = "Missing X-DMAAP-DR-ON-BEHALF-OF header.";
+ static final String MISSING_FEED = "Missing or bad feed number.";
+ static final String POLICY_ENGINE = "Policy Engine disallows access.";
+ static final String UNAUTHORIZED = "Unauthorized.";
+ static final String BAD_SUB = "Missing or bad subscription number.";
+ static final String BAD_JSON = "Badly formed JSON";
+ static final String BAD_URL = "Bad URL.";
+
+ public static final String API = "/api/";
+ static final String LOGS = "/logs/";
+ static final String TEXT_CT = "text/plain";
+ static final String INGRESS = "/ingress/";
+ static final String EGRESS = "/egress/";
+ static final String NETWORK = "/network/";
+ static final String GROUPID = "groupid";
+ public static final String FEEDID = "feedid";
+ static final String FEEDIDS = "feedids";
+ static final String SUBID = "subid";
+ static final String EVENT_TYPE = "eventType";
+ static final String OUTPUT_TYPE = "output_type";
+ static final String START_TIME = "start_time";
+ static final String END_TIME = "end_time";
+ static final String REASON_SQL = "reasonSQL";
+
/**
- * A boolean to trigger one time "provisioning changed" event on startup
+ * A boolean to trigger one time "provisioning changed" event on startup.
*/
private static boolean startmsgFlag = true;
/**
- * This POD should require SSL connections from clients; pulled from the DB (PROV_REQUIRE_SECURE)
+ * This POD should require SSL connections from clients; pulled from the DB (PROV_REQUIRE_SECURE).
*/
private static boolean requireSecure = true;
/**
- * This POD should require signed, recognized certificates from clients; pulled from the DB (PROV_REQUIRE_CERT)
+ * This POD should require signed, recognized certificates from clients; pulled from the DB (PROV_REQUIRE_CERT).
*/
private static boolean requireCert = true;
/**
- * The set of authorized addresses and networks; pulled from the DB (PROV_AUTH_ADDRESSES)
+ * The set of authorized addresses and networks; pulled from the DB (PROV_AUTH_ADDRESSES).
*/
private static Set<String> authorizedAddressesAndNetworks = new HashSet<>();
/**
- * The set of authorized names; pulled from the DB (PROV_AUTH_SUBJECTS)
+ * The set of authorized names; pulled from the DB (PROV_AUTH_SUBJECTS).
*/
private static Set<String> authorizedNames = new HashSet<>();
/**
- * The FQDN of the initially "active" provisioning server in this Data Router ecosystem
+ * The FQDN of the initially "active" provisioning server in this Data Router ecosystem.
*/
private static String initialActivePod;
/**
- * The FQDN of the initially "standby" provisioning server in this Data Router ecosystem
+ * The FQDN of the initially "standby" provisioning server in this Data Router ecosystem.
*/
private static String initialStandbyPod;
/**
- * The FQDN of this provisioning server in this Data Router ecosystem
+ * The FQDN of this provisioning server in this Data Router ecosystem.
*/
private static String thisPod;
/**
- * "Timer 1" - used to determine when to notify nodes of provisioning changes
+ * "Timer 1" - used to determine when to notify nodes of provisioning changes.
*/
private static long pokeTimer1;
/**
- * "Timer 2" - used to determine when to notify nodes of provisioning changes
+ * "Timer 2" - used to determine when to notify nodes of provisioning changes.
*/
private static long pokeTimer2;
/**
- * Array of nodes names and/or FQDNs
+ * Array of nodes names and/or FQDNs.
*/
private static String[] nodes = new String[0];
/**
- * [DATARTR-27] Poke all the DR nodes : Array of nodes names and/or FQDNs
+ * [DATARTR-27] Poke all the DR nodes : Array of nodes names and/or FQDNs.
*/
private static String[] drnodes = new String[0];
/**
- * Array of node IP addresses
+ * Array of node IP addresses.
*/
private static InetAddress[] nodeAddresses = new InetAddress[0];
/**
- * Array of POD IP addresses
+ * Array of POD IP addresses.
*/
private static InetAddress[] podAddresses = new InetAddress[0];
/**
- * The maximum number of feeds allowed; pulled from the DB (PROV_MAXFEED_COUNT)
+ * The maximum number of feeds allowed; pulled from the DB (PROV_MAXFEED_COUNT).
*/
static int maxFeeds = 0;
/**
- * The maximum number of subscriptions allowed; pulled from the DB (PROV_MAXSUB_COUNT)
+ * The maximum number of subscriptions allowed; pulled from the DB (PROV_MAXSUB_COUNT).
*/
static int maxSubs = 0;
/**
- * The current number of feeds in the system
+ * The current number of feeds in the system.
*/
static int activeFeeds = 0;
/**
- * The current number of subscriptions in the system
+ * The current number of subscriptions in the system.
*/
static int activeSubs = 0;
/**
- * The domain used to generate a FQDN from the "bare" node names
+ * The domain used to generate a FQDN from the "bare" node names.
*/
private static String provDomain = "web.att.com";
/**
- * The standard FQDN of the provisioning server in this Data Router ecosystem
+ * The standard FQDN of the provisioning server in this Data Router ecosystem.
*/
private static String provName = "feeds-drtr.web.att.com";
/**
- * The standard FQDN of the ACTIVE provisioning server in this Data Router ecosystem
+ * The standard FQDN of the ACTIVE_POD provisioning server in this Data Router ecosystem.
*/
private static String activeProvName = "feeds-drtr.web.att.com";
- //Adding new param for static Routing - Rally:US664862-1610
- private static String staticRoutingNodes = STATIC_ROUTING_NODES;
-
/**
- * This logger is used to log provisioning events
+ * This logger is used to log provisioning events.
*/
protected static EELFLogger eventlogger;
/**
@@ -232,21 +255,17 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
*/
protected static EELFLogger intlogger;
/**
- * Authorizer - interface to the Policy Engine
+ * Authorizer - interface to the Policy Engine.
*/
protected static Authorizer authz;
/**
- * The Synchronizer used to sync active DB to standby one
+ * The Synchronizer used to sync active DB to standby one.
*/
private static SynchronizerTask synctask = null;
//Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
private InetAddress thishost;
private InetAddress loopback;
- private static Boolean mailSendFlag = false;
-
- private static final String MAILCONFIG_FILE = "mail.properties";
- private static Properties mailprops;
//DMAAP-597 (Tech Dept) REST request source IP auth relaxation to accommodate OOM kubernetes deploy
private static String isAddressAuthEnabled = (new DB()).getProperties()
@@ -259,10 +278,10 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
* Initialize data common to all the provisioning server servlets.
*/
protected BaseServlet() {
- if(eventlogger == null) {
- this.eventlogger = EELFManager.getInstance().getLogger("EventLog");
+ if (eventlogger == null) {
+ eventlogger = EELFManager.getInstance().getLogger("EventLog");
}
- if(intlogger == null) {
+ if (intlogger == null) {
this.intlogger = EELFManager.getInstance().getLogger("InternalLog");
}
if (authz == null) {
@@ -285,9 +304,8 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
try {
thishost = InetAddress.getLocalHost();
loopback = InetAddress.getLoopbackAddress();
- //checkHttpsRelaxation(); //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
} catch (UnknownHostException e) {
- // ignore
+ intlogger.info("BaseServlet.init: " + e.getMessage(), e);
}
}
@@ -304,7 +322,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
}
/**
- * Read the request's input stream and return a JSONObject from it
+ * Read the request's input stream and return a JSONObject from it.
*
* @param req the HTTP request
* @return the JSONObject, or null if the stream cannot be parsed
@@ -323,35 +341,40 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
}
/**
- * This method encrypt/decrypt the key in the JSON passed by user request inside the authorisation header object in request before logging the JSON.
+ * This method encrypt/decrypt the key in the JSON passed by user request inside the authorisation
+ * header object in request before logging the JSON.
*
- * @param jo- the JSON passed in http request.
- * @param maskKey- the key to be masked in the JSON passed.
- * @param action- whether to mask the key or unmask it in a JSON passed.
+ * @param jo the JSON passed in http request.
+ * @param maskKey the key to be masked in the JSON passed.
+ * @param action whether to mask the key or unmask it in a JSON passed.
* @return the JSONObject, or null if the stream cannot be parsed.
*/
- public static JSONObject maskJSON(JSONObject jo, String maskKey, boolean action) {
+ static JSONObject maskJSON(JSONObject jo, String maskKey, boolean action) {
if (!jo.isNull("authorization")) {
- JSONObject j2 = jo.getJSONObject("authorization");
- JSONArray ja = j2.getJSONArray("endpoint_ids");
- for (int i = 0; i < ja.length(); i++) {
- if ((!ja.getJSONObject(i).isNull(maskKey))) {
- String password = ja.getJSONObject(i).get(maskKey).toString();
- try {
- if (action) {
- ja.getJSONObject(i).put(maskKey, PasswordProcessor.encrypt(password));
- } else {
- ja.getJSONObject(i).put(maskKey, PasswordProcessor.decrypt(password));
- }
- } catch (JSONException | GeneralSecurityException e) {
- intlogger.info("Error reading JSON while masking: " + e);
- }
+ JSONArray endpointIds = jo.getJSONObject("authorization").getJSONArray("endpoint_ids");
+ for (int index = 0; index < endpointIds.length(); index++) {
+ if ((!endpointIds.getJSONObject(index).isNull(maskKey))) {
+ String password = endpointIds.getJSONObject(index).get(maskKey).toString();
+ processPassword(maskKey, action, endpointIds, index, password);
}
}
}
return jo;
}
+ private static void processPassword(String maskKey, boolean action, JSONArray endpointIds, int index,
+ String password) {
+ try {
+ if (action) {
+ endpointIds.getJSONObject(index).put(maskKey, PasswordProcessor.encrypt(password));
+ } else {
+ endpointIds.getJSONObject(index).put(maskKey, PasswordProcessor.decrypt(password));
+ }
+ } catch (JSONException | GeneralSecurityException e) {
+ intlogger.info("Error reading JSON while masking: " + e);
+ }
+ }
+
/**
* Check if the remote host is authorized to perform provisioning. Is the request secure? Is it coming from an
* authorized IP address or network (configured via PROV_AUTH_ADDRESSES)? Does it have a valid client certificate
@@ -368,20 +391,9 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
if (requireSecure && !request.isSecure()) {
return "Request must be made over an HTTPS connection.";
}
- // Is remote IP authorized?
- String remote = request.getRemoteAddr();
- try {
- boolean found = false;
- InetAddress ip = InetAddress.getByName(remote);
- for (String addrnet : authorizedAddressesAndNetworks) {
- found |= addressMatchesNetwork(ip, addrnet);
- }
- if (!found) {
- return "Unauthorized address: " + remote;
- }
- } catch (UnknownHostException e) {
- intlogger.error("PROV0051 BaseServlet.isAuthorizedForProvisioning: ", e.getMessage());
- return "Unauthorized address: " + remote;
+ String remoteHostCheck = checkRemoteHostAuthorization(request);
+ if (remoteHostCheck != null) {
+ return remoteHostCheck;
}
// Does remote have a valid certificate?
if (requireCert) {
@@ -400,6 +412,26 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
return null;
}
+ @Nullable
+ private String checkRemoteHostAuthorization(HttpServletRequest request) {
+ // Is remote IP authorized?
+ String remote = request.getRemoteAddr();
+ try {
+ boolean found = false;
+ InetAddress ip = InetAddress.getByName(remote);
+ for (String addrnet : authorizedAddressesAndNetworks) {
+ found |= addressMatchesNetwork(ip, addrnet);
+ }
+ if (!found) {
+ return "Unauthorized address: " + remote;
+ }
+ } catch (UnknownHostException e) {
+ intlogger.error("PROV0051 BaseServlet.isAuthorizedForProvisioning: " + e.getMessage(), e);
+ return "Unauthorized address: " + remote;
+ }
+ return null;
+ }
+
/**
* Check if the remote IP address is authorized to see the /internal URL tree.
*
@@ -413,23 +445,23 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
}
InetAddress ip = InetAddress.getByName(request.getRemoteAddr());
for (InetAddress node : getNodeAddresses()) {
- if (node != null && ip.equals(node)) {
+ if (ip.equals(node)) {
return true;
}
}
for (InetAddress pod : getPodAddresses()) {
- if (pod != null && ip.equals(pod)) {
+ if (ip.equals(pod)) {
return true;
}
}
- if (thishost != null && ip.equals(thishost)) {
+ if (ip.equals(thishost)) {
return true;
}
- if (loopback != null && ip.equals(loopback)) {
+ if (ip.equals(loopback)) {
return true;
}
} catch (UnknownHostException e) {
- intlogger.error("PROV0052 BaseServlet.isAuthorizedForInternal: ", e.getMessage());
+ intlogger.error("PROV0052 BaseServlet.isAuthorizedForInternal: " + e.getMessage(), e);
}
return false;
}
@@ -443,7 +475,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
*/
private static boolean addressMatchesNetwork(InetAddress ip, String s) {
int mlen = -1;
- int n = s.indexOf("/");
+ int n = s.indexOf('/');
if (n >= 0) {
mlen = Integer.parseInt(s.substring(n + 1));
s = s.substring(0, n);
@@ -473,7 +505,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
}
}
} catch (UnknownHostException e) {
- intlogger.error("PROV0053 BaseServlet.addressMatchesNetwork: ", e.getMessage());
+ intlogger.error("PROV0053 BaseServlet.addressMatchesNetwork: " + e.getMessage(), e);
return false;
}
return true;
@@ -503,23 +535,23 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
maxSubs = getInt(map, Parameters.PROV_MAXSUB_COUNT, DEFAULT_MAX_SUBS);
pokeTimer1 = getInt(map, Parameters.PROV_POKETIMER1, DEFAULT_POKETIMER1);
pokeTimer2 = getInt(map, Parameters.PROV_POKETIMER2, DEFAULT_POKETIMER2);
- /**
- * The domain used to generate a FQDN from the "bare" node names
- */
+
+ // The domain used to generate a FQDN from the "bare" node names
provDomain = getString(map, Parameters.PROV_DOMAIN, DEFAULT_DOMAIN);
provName = getString(map, Parameters.PROV_NAME, DEFAULT_PROVSRVR_NAME);
activeProvName = getString(map, Parameters.PROV_ACTIVE_NAME, provName);
initialActivePod = getString(map, Parameters.ACTIVE_POD, "");
initialStandbyPod = getString(map, Parameters.STANDBY_POD, "");
- staticRoutingNodes = getString(map, Parameters.STATIC_ROUTING_NODES,
- ""); //Adding new param for static Routing - Rally:US664862-1610
+
+ //Adding new param for static Routing - Rally:US664862-1610
+ String staticRoutingNodes = getString(map, Parameters.STATIC_ROUTING_NODES, "");
activeFeeds = Feed.countActiveFeeds();
activeSubs = Subscription.countActiveSubscriptions();
try {
thisPod = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
thisPod = "";
- intlogger.warn("PROV0014 Cannot determine the name of this provisioning server.");
+ intlogger.warn("PROV0014 Cannot determine the name of this provisioning server.", e);
}
// Normalize the nodes, and fill in nodeAddresses
@@ -530,7 +562,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
intlogger.debug("PROV0003 DNS lookup: " + nodes[i] + " => " + na[i].toString());
} catch (UnknownHostException e) {
na[i] = null;
- intlogger.warn("PROV0004 Cannot lookup " + nodes[i] + ": " + e.getMessage());
+ intlogger.warn("PROV0004 Cannot lookup " + nodes[i] + ": " + e.getMessage(), e);
}
}
@@ -558,7 +590,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
intlogger.debug("PROV0003 DNS lookup: " + pods[i] + " => " + na[i].toString());
} catch (UnknownHostException e) {
na[i] = null;
- intlogger.warn("PROV0004 Cannot lookup " + pods[i] + ": " + e.getMessage());
+ intlogger.warn("PROV0004 Cannot lookup " + pods[i] + ": " + e.getMessage(), e);
}
}
podAddresses = na;
@@ -572,96 +604,11 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
}
}
-
- /**
- * Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047. Load mail properties.
- *
- * @author vs215k
- **/
- private void loadMailProperties() {
- if (mailprops == null) {
- mailprops = new Properties();
- try (InputStream inStream = getClass().getClassLoader().getResourceAsStream(MAILCONFIG_FILE)) {
- mailprops.load(inStream);
- } catch (IOException e) {
- intlogger.error("PROV9003 Opening properties: " + e.getMessage());
- System.exit(1);
- }
- }
- }
-
- /**
- * Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047. Check if HTTPS Relexaction is enabled
- *
- * @author vs215k
- **/
- private void checkHttpsRelaxation() {
- if (!mailSendFlag) {
- Properties p = (new DB()).getProperties();
- intlogger.info("HTTPS relaxation: " + p.get("org.onap.dmaap.datarouter.provserver.https.relaxation"));
-
- if (p.get("org.onap.dmaap.datarouter.provserver.https.relaxation").equals("true")) {
- try {
- notifyPSTeam(p.get("org.onap.dmaap.datarouter.provserver.https.relax.notify").toString());
- } catch (Exception e) {
- intlogger.warn("Exception: " + e.getMessage());
- }
- }
- mailSendFlag = true;
- }
- }
-
- /**
- * Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
- *
- * @param email - list of email ids to notify if HTTP relexcation is enabled.
- * @author vs215k
- **/
- private void notifyPSTeam(String email) throws Exception {
- loadMailProperties(); //Load HTTPS Relex mail properties.
- String[] emails = email.split(Pattern.quote("|"));
-
- Properties mailproperties = new Properties();
- mailproperties.put("mail.smtp.host", mailprops.get("com.att.dmaap.datarouter.mail.server"));
- mailproperties.put("mail.transport.protocol", mailprops.get("com.att.dmaap.datarouter.mail.protocol"));
-
- Session session = Session.getDefaultInstance(mailproperties, null);
- Multipart mp = new MimeMultipart();
- MimeBodyPart htmlPart = new MimeBodyPart();
-
- try {
-
- Message msg = new MimeMessage(session);
- msg.setFrom(new InternetAddress(mailprops.get("com.att.dmaap.datarouter.mail.from").toString()));
-
- InternetAddress[] addressTo = new InternetAddress[emails.length];
- for (int x = 0; x < emails.length; x++) {
- addressTo[x] = new InternetAddress(emails[x]);
- }
-
- msg.addRecipients(Message.RecipientType.TO, addressTo);
- msg.setSubject(mailprops.get("com.att.dmaap.datarouter.mail.subject").toString());
- htmlPart.setContent(mailprops.get("com.att.dmaap.datarouter.mail.body").toString()
- .replace("[SERVER]", InetAddress.getLocalHost().getHostName()), "text/html");
- mp.addBodyPart(htmlPart);
- msg.setContent(mp);
-
- System.out.println(mailprops.get("com.att.dmaap.datarouter.mail.body").toString()
- .replace("[SERVER]", InetAddress.getLocalHost().getHostName()));
-
- Transport.send(msg);
- intlogger.info("HTTPS relaxation mail is sent to - : " + email);
-
- } catch (MessagingException e) {
- intlogger.error("Invalid email address, unable to send https relaxation mail to - : " + email);
- }
- }
-
public static String getProvName() {
return provName;
}
- public static String getActiveProvName() {
+ static String getActiveProvName() {
return activeProvName;
}
@@ -675,21 +622,11 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
}
/**
- * [DATARTR-27] Poke all the DR nodes
- * Get an array of all node names in the DR network.
- *
- * @return an array of Strings
- */
- public static String[] getDRNodes() {
- return drnodes;
- }
-
- /**
* Get an array of all node InetAddresses in the DR network.
*
* @return an array of InetAddresses
*/
- public static InetAddress[] getNodeAddresses() {
+ private static InetAddress[] getNodeAddresses() {
return nodeAddresses;
}
@@ -712,7 +649,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
}
/**
- * Gets the FQDN of the initially ACTIVE provisioning server (POD). Note: this used to be called isActivePOD(),
+ * Gets the FQDN of the initially ACTIVE_POD provisioning server (POD). Note: this used to be called isActivePOD(),
* however, that is a misnomer, as the active status could shift to the standby POD without these parameters
* changing. Hence, the function names have been changed to more accurately reflect their purpose.
*
@@ -723,7 +660,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
}
/**
- * Gets the FQDN of the initially STANDBY provisioning server (POD). Note: this used to be called isStandbyPOD(),
+ * Gets the FQDN of the initially STANDBY_POD provisioning server (POD). Note: this used to be called isStandbyPOD(),
* however, that is a misnomer, as the standby status could shift to the active POD without these parameters
* changing. Hence, the function names have been changed to more accurately reflect their purpose.
*
@@ -748,7 +685,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
rv = bean.doInsert(conn);
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0005 doInsert: " + e.getMessage());
+ intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e);
} finally {
if (conn != null) {
db.release(conn);
@@ -772,7 +709,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
rv = bean.doUpdate(conn);
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0006 doUpdate: " + e.getMessage());
+ intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e);
} finally {
if (conn != null) {
db.release(conn);
@@ -796,7 +733,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
rv = bean.doDelete(conn);
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0007 doDelete: " + e.getMessage());
+ intlogger.warn("PROV0007 doDelete: " + e.getMessage(), e);
} finally {
if (conn != null) {
db.release(conn);
@@ -807,7 +744,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
private static boolean getBoolean(Map<String, String> map, String name) {
String s = map.get(name);
- return (s != null) && s.equalsIgnoreCase("true");
+ return "true".equalsIgnoreCase(s);
}
private static String getString(Map<String, String> map, String name, String dflt) {
@@ -847,7 +784,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
*/
public class ContentHeader {
- private String type = "";
+ private String type;
private Map<String, String> map = new HashMap<>();
ContentHeader() {
@@ -863,7 +800,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
return type;
}
- public String getAttribute(String key) {
+ String getAttribute(String key) {
String s = map.get(key);
if (s == null) {
s = "";
@@ -959,7 +896,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
return true;
}
} catch (JSONException e) {
- intlogger.error("JSONException: " + e.getMessage());
+ intlogger.error("JSONException: " + e.getMessage(), e);
}
}
return false;
@@ -969,19 +906,17 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
/*
* @Method - getGroupByFeedGroupId- Rally:US708115
* @Params - User to check in group and feedid which is assigned the group.
- * @return - string value grupid/null
+ * @return - string value groupid/null
*/
@Override
public String getGroupByFeedGroupId(String owner, String feedId) {
try {
- int n = Integer.parseInt(feedId);
- Feed f = Feed.getFeedById(n);
+ Feed f = Feed.getFeedById(Integer.parseInt(feedId));
if (f != null) {
int groupid = f.getGroupid();
if (groupid > 0) {
Group group = Group.getGroupById(groupid);
- assert group != null;
- if (isUserMemberOfGroup(group, owner)) {
+ if (group != null && isUserMemberOfGroup(group, owner)) {
return group.getAuthid();
}
}
@@ -995,7 +930,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
/*
* @Method - getGroupBySubGroupId - Rally:US708115
* @Params - User to check in group and subid which is assigned the group.
- * @return - string value grupid/null
+ * @return - string value groupid/null
*/
@Override
public String getGroupBySubGroupId(String owner, String subId) {
@@ -1006,8 +941,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
int groupid = s.getGroupid();
if (groupid > 0) {
Group group = Group.getGroupById(groupid);
- assert group != null;
- if (isUserMemberOfGroup(group, owner)) {
+ if (group != null && isUserMemberOfGroup(group, owner)) {
return group.getAuthid();
}
}
@@ -1048,7 +982,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
MDC.put(MDC_SERVER_FQDN, InetAddress.getLocalHost().getHostName());
MDC.put(MDC_SERVER_IP_ADDRESS, InetAddress.getLocalHost().getHostAddress());
} catch (Exception e) {
- intlogger.error("Exception: " + e.getMessage());
+ intlogger.error("Exception: " + e.getMessage(), e);
}
}
@@ -1086,12 +1020,12 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
default:
action = "*";
}
- if (aafInstance == null || aafInstance.equals("")) {
+ if (aafInstance == null || "".equals(aafInstance)) {
aafInstance = props.getProperty(AAF_INSTANCE, "org.onap.dmaap-dr.NoInstanceDefined");
}
return type + "|" + aafInstance + "|" + action;
} catch (Exception e) {
- intlogger.error("PROV7005 BaseServlet.getFeedPermission: ", e.getMessage());
+ intlogger.error("PROV7005 BaseServlet.getFeedPermission: " + e.getMessage(), e);
}
return null;
}
@@ -1134,12 +1068,12 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
default:
action = "*";
}
- if (aafInstance == null || aafInstance.equals("")) {
+ if (aafInstance == null || "".equals(aafInstance)) {
aafInstance = props.getProperty(AAF_INSTANCE, "org.onap.dmaap-dr.NoInstanceDefined");
}
return type + "|" + aafInstance + "|" + action;
} catch (Exception e) {
- intlogger.error("PROV7005 BaseServlet.getSubscriberPermission: ", e.getMessage());
+ intlogger.error("PROV7005 BaseServlet.getSubscriberPermission: " + e.getMessage(), e);
}
return null;
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/DRFeedsServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/DRFeedsServlet.java
index 86e0268d..960d5094 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/DRFeedsServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/DRFeedsServlet.java
@@ -109,8 +109,8 @@ public class DRFeedsServlet extends ProxyServlet {
}
// Note: I think this should be getPathInfo(), but that doesn't work (Jetty bug?)
String path = req.getRequestURI();
- if (path != null && !path.equals("/")) {
- message = "Bad URL.";
+ if (path != null && !"/".equals(path)) {
+ message = BAD_URL;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
eventlogger.error(elr.toString());
@@ -120,7 +120,7 @@ public class DRFeedsServlet extends ProxyServlet {
// Check with the Authorizer
AuthorizationResponse aresp = authz.decide(req);
if (!aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -150,7 +150,7 @@ public class DRFeedsServlet extends ProxyServlet {
try {
resp.getOutputStream().print(feed.asJSONObject(true).toString());
} catch (IOException ioe) {
- eventlogger.error("IOException" + ioe.getMessage());
+ eventlogger.error("PROV0111 DRFeedServlet.doGet " + ioe.getMessage(), ioe);
}
}
} else {
@@ -174,7 +174,7 @@ public class DRFeedsServlet extends ProxyServlet {
try {
resp.getOutputStream().print(t);
} catch (IOException ioe) {
- eventlogger.error("IOException" + ioe.getMessage());
+ eventlogger.error("PROV0112 DRFeedServlet.doGet " + ioe.getMessage(), ioe);
}
}
} finally {
@@ -236,8 +236,8 @@ public class DRFeedsServlet extends ProxyServlet {
}
// Note: I think this should be getPathInfo(), but that doesn't work (Jetty bug?)
String path = req.getRequestURI();
- if (path != null && !path.equals("/")) {
- message = "Bad URL.";
+ if (path != null && !"/".equals(path)) {
+ message = BAD_URL;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
eventlogger.error(elr.toString());
@@ -247,7 +247,7 @@ public class DRFeedsServlet extends ProxyServlet {
// check content type is FEED_CONTENT_TYPE, version 1.0
ContentHeader ch = getContentHeader(req);
String ver = ch.getAttribute("version");
- if (!ch.getType().equals(FEED_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
+ if (!ch.getType().equals(FEED_BASECONTENT_TYPE) || !("1.0".equals(ver) || "2.0".equals(ver))) {
message = "Incorrect content-type";
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
@@ -257,7 +257,7 @@ public class DRFeedsServlet extends ProxyServlet {
}
JSONObject jo = getJSONfromInput(req);
if (jo == null) {
- message = "Badly formed JSON";
+ message = BAD_JSON;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -283,7 +283,7 @@ public class DRFeedsServlet extends ProxyServlet {
message = e.getMessage();
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.error(elr.toString());
+ eventlogger.error(elr.toString(), e);
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
return;
}
@@ -295,11 +295,11 @@ public class DRFeedsServlet extends ProxyServlet {
*/
String aafInstance = feed.getAafInstance();
if (Boolean.parseBoolean(isCadiEnabled)) {
- if ((aafInstance == null || aafInstance.equals("") || (aafInstance.equalsIgnoreCase("legacy")) && req.getHeader(EXCLUDE_AAF_HEADER).equalsIgnoreCase("true"))) {
+ if ((aafInstance == null || "".equals(aafInstance) || ("legacy".equalsIgnoreCase(aafInstance)) && "true".equalsIgnoreCase(req.getHeader(EXCLUDE_AAF_HEADER)))) {
// Check with the Authorizer
AuthorizationResponse aresp = authz.decide(req);
if (!aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -307,7 +307,7 @@ public class DRFeedsServlet extends ProxyServlet {
return;
}
} else {
- if (req.getHeader(EXCLUDE_AAF_HEADER).equalsIgnoreCase("true")) {
+ if ("true".equalsIgnoreCase(req.getHeader(EXCLUDE_AAF_HEADER))) {
message = "DRFeedsServlet.doPost() -Invalid request exclude_AAF should not be true if passing AAF_Instance value= " + aafInstance;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
@@ -329,7 +329,7 @@ public class DRFeedsServlet extends ProxyServlet {
} else {
AuthorizationResponse aresp = authz.decide(req);
if (!aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -365,7 +365,7 @@ public class DRFeedsServlet extends ProxyServlet {
try {
resp.getOutputStream().print(feed.asLimitedJSONObject().toString());
} catch (IOException ioe) {
- eventlogger.error("IOException" + ioe.getMessage());
+ eventlogger.error("PROV0113 DRFeedServlet.doPost " + ioe.getMessage(), ioe);
}
provisioningDataChanged();
} else {
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/FeedServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/FeedServlet.java
index 3cbaac3c..e1938cd8 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/FeedServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/FeedServlet.java
@@ -82,7 +82,7 @@ public class FeedServlet extends ProxyServlet {
}
String bhdr = req.getHeader(BEHALF_HEADER);
if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
+ message = MISSING_ON_BEHALF;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -91,7 +91,7 @@ public class FeedServlet extends ProxyServlet {
}
int feedid = getIdFromPath(req);
if (feedid < 0) {
- message = "Missing or bad feed number.";
+ message = MISSING_FEED;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -100,7 +100,7 @@ public class FeedServlet extends ProxyServlet {
}
Feed feed = Feed.getFeedById(feedid);
if (feed == null || feed.isDeleted()) {
- message = "Missing or bad feed number.";
+ message = MISSING_FEED;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
eventlogger.error(elr.toString());
@@ -113,10 +113,10 @@ public class FeedServlet extends ProxyServlet {
* CADI code - check on permissions based on Legacy/AAF users to allow to delete/remove feed
*/
String aafInstance = feed.getAafInstance();
- if (aafInstance == null || aafInstance.equals("") || aafInstance.equalsIgnoreCase("legacy")) {
+ if (aafInstance == null || "".equals(aafInstance) || "legacy".equalsIgnoreCase(aafInstance)) {
AuthorizationResponse aresp = authz.decide(req);
if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -183,7 +183,7 @@ public class FeedServlet extends ProxyServlet {
}
String bhdr = req.getHeader(BEHALF_HEADER);
if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
+ message = MISSING_ON_BEHALF;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -192,7 +192,7 @@ public class FeedServlet extends ProxyServlet {
}
int feedid = getIdFromPath(req);
if (feedid < 0) {
- message = "Missing or bad feed number.";
+ message = MISSING_FEED;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -201,7 +201,7 @@ public class FeedServlet extends ProxyServlet {
}
Feed feed = Feed.getFeedById(feedid);
if (feed == null || feed.isDeleted()) {
- message = "Missing or bad feed number.";
+ message = MISSING_FEED;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
eventlogger.error(elr.toString());
@@ -211,7 +211,7 @@ public class FeedServlet extends ProxyServlet {
// Check with the Authorizer
AuthorizationResponse aresp = authz.decide(req);
if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -227,7 +227,7 @@ public class FeedServlet extends ProxyServlet {
try {
resp.getOutputStream().print(feed.asJSONObject(true).toString());
} catch (IOException ioe) {
- eventlogger.error("IOException" + ioe.getMessage());
+ eventlogger.error("PROV0101 FeedServlet.doGet: " + ioe.getMessage(), ioe);
}
} finally {
eelfLogger.info(EelfMsgs.EXIT);
@@ -259,7 +259,7 @@ public class FeedServlet extends ProxyServlet {
}
String bhdr = req.getHeader(BEHALF_HEADER);
if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
+ message = MISSING_ON_BEHALF;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -268,7 +268,7 @@ public class FeedServlet extends ProxyServlet {
}
int feedid = getIdFromPath(req);
if (feedid < 0) {
- message = "Missing or bad feed number.";
+ message = MISSING_FEED;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -277,7 +277,7 @@ public class FeedServlet extends ProxyServlet {
}
Feed oldFeed = Feed.getFeedById(feedid);
if (oldFeed == null || oldFeed.isDeleted()) {
- message = "Missing or bad feed number.";
+ message = MISSING_FEED;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
eventlogger.error(elr.toString());
@@ -287,7 +287,7 @@ public class FeedServlet extends ProxyServlet {
// check content type is FEED_CONTENT_TYPE, version 1.0
ContentHeader ch = getContentHeader(req);
String ver = ch.getAttribute("version");
- if (!ch.getType().equals(FEED_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
+ if (!ch.getType().equals(FEED_BASECONTENT_TYPE) || !("1.0".equals(ver) || "2.0".equals(ver))) {
message = "Incorrect content-type";
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
@@ -297,7 +297,7 @@ public class FeedServlet extends ProxyServlet {
}
JSONObject jo = getJSONfromInput(req);
if (jo == null) {
- message = "Badly formed JSON";
+ message = BAD_JSON;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -313,7 +313,7 @@ public class FeedServlet extends ProxyServlet {
message = e.getMessage();
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.error(elr.toString());
+ eventlogger.error(elr.toString(), e);
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
return;
}
@@ -355,11 +355,11 @@ public class FeedServlet extends ProxyServlet {
* CADI code - check on permissions based on Legacy/AAF users to allow feed edit/update/modify
*/
String aafInstance = feed.getAafInstance();
- if (aafInstance == null || aafInstance.equals("") || aafInstance.equalsIgnoreCase("legacy")) {
+ if (aafInstance == null || "".equals(aafInstance) || "legacy".equalsIgnoreCase(aafInstance)) {
// Check with the Authorizer
AuthorizationResponse aresp = authz.decide(req);
if (!aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -392,7 +392,7 @@ public class FeedServlet extends ProxyServlet {
try {
resp.getOutputStream().print(feed.asLimitedJSONObject().toString());
} catch (IOException ioe) {
- eventlogger.error("IOException" + ioe.getMessage());
+ eventlogger.error("PROV0102 FeedServlet.doPut: " + ioe.getMessage(), ioe);
}
@@ -405,7 +405,7 @@ public class FeedServlet extends ProxyServlet {
feed.changeOwnerShip();
}
} catch (JSONException je) {
- eventlogger.error("JSONException" + je.getMessage());
+ eventlogger.error("PROV0103 FeedServlet.doPut: " + je.getMessage(), je);
}
}
/***End of change ownership*/
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/GroupServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/GroupServlet.java
index 188dce61..4432913f 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/GroupServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/GroupServlet.java
@@ -88,31 +88,6 @@ public class GroupServlet extends ProxyServlet {
return;
}
- // Check with the Authorizer
- /*AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.error(elr.toString());
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }*/
-
-
- /*ContentHeader ch = getContentHeader(req);
- String ver = ch.getAttribute("version");
- if (!ch.getType().equals(GROUPLIST_CONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
- intlogger.debug("Content-type is: "+req.getHeader("Content-Type"));
- message = "Incorrect content-type";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.error(elr.toString());
- resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, message);
- return;
- }*/
-
-
int groupid = getIdFromPath(req);
if (groupid < 0) {
message = "Missing or bad group number.";
@@ -132,19 +107,9 @@ public class GroupServlet extends ProxyServlet {
try {
resp.getOutputStream().print(gup.asJSONObject().toString());
} catch (IOException ioe) {
- eventlogger.error("IOException" + ioe.getMessage());
+ eventlogger.error("PROV0121 GroupServlet.doGet: " + ioe.getMessage(), ioe);
}
- // Display a list of Groups
- /*Collection<Group> list = Group.getGroupById(groupid);
- String t = JSONUtilities.createJSONArray(list);
-
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr.toString());
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(GROUPLIST_CONTENT_TYPE);
- resp.getOutputStream().print(t);*/
}
/**
* PUT on the &lt;GROUPS&gt; -- not supported.
@@ -191,20 +156,11 @@ public class GroupServlet extends ProxyServlet {
sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, message, eventlogger);
return;
}
- // Check with the Authorizer
- /*AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.error(elr.toString());
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }*/
+
// check content type is SUB_CONTENT_TYPE, version 1.0
ContentHeader ch = getContentHeader(req);
String ver = ch.getAttribute("version");
- if (!ch.getType().equals(GROUP_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
+ if (!ch.getType().equals(GROUP_BASECONTENT_TYPE) || !("1.0".equals(ver) || "2.0".equals(ver))) {
message = "Incorrect content-type";
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
@@ -214,7 +170,7 @@ public class GroupServlet extends ProxyServlet {
}
JSONObject jo = getJSONfromInput(req);
if (jo == null) {
- message = "Badly formed JSON";
+ message = BAD_JSON;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -230,7 +186,7 @@ public class GroupServlet extends ProxyServlet {
message = e.getMessage();
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.error(elr.toString());
+ eventlogger.error(elr.toString(), e);
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
return;
}
@@ -253,7 +209,7 @@ public class GroupServlet extends ProxyServlet {
try {
resp.getOutputStream().print(gup.asJSONObject().toString());
} catch (IOException ioe) {
- eventlogger.error("IOException" + ioe.getMessage());
+ eventlogger.error("PROV0122 GroupServlet.doPut: " + ioe.getMessage(), ioe);
}
provisioningDataChanged();
} else {
@@ -292,34 +248,6 @@ public class GroupServlet extends ProxyServlet {
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
return;
}
- /*int feedid = getIdFromPath(req);
- if (feedid < 0) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr.toString());
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Feed feed = Feed.getFeedById(feedid);
- if (feed == null || feed.isDeleted()) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr.toString());
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }*/
- // Check with the Authorizer
- /*AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr.toString());
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }*/
// check content type is SUB_CONTENT_TYPE, version 1.0
ContentHeader ch = getContentHeader(req);
@@ -335,7 +263,7 @@ public class GroupServlet extends ProxyServlet {
}
JSONObject jo = getJSONfromInput(req);
if (jo == null) {
- message = "Badly formed JSON";
+ message = BAD_JSON;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -352,7 +280,7 @@ public class GroupServlet extends ProxyServlet {
message = e.getMessage();
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.error(elr.toString());
+ eventlogger.error(elr.toString(), e);
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
return;
}
@@ -379,7 +307,7 @@ public class GroupServlet extends ProxyServlet {
try {
resp.getOutputStream().print(gup.asJSONObject().toString());
} catch (IOException ioe) {
- eventlogger.error("IOException" + ioe.getMessage());
+ eventlogger.error("PROV0122 GroupServlet.doPost: " + ioe.getMessage(), ioe);
}
provisioningDataChanged();
} else {
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/InternalServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/InternalServlet.java
index 56b40e04..8ae9fa20 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/InternalServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/InternalServlet.java
@@ -156,6 +156,7 @@ import static org.onap.dmaap.datarouter.provisioning.utils.HttpServletUtils.send
@SuppressWarnings("serial")
public class InternalServlet extends ProxyServlet {
+
private static final Object lock = new Object();
private static Integer logseq = 0; // another piece of info to make log spool file names unique
//Adding EELF Logger Rally:US664892
@@ -174,15 +175,15 @@ public class InternalServlet extends ProxyServlet {
eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER), getIdFromPath(req) + "");
EventLogRecord elr = new EventLogRecord(req);
if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
+ elr.setMessage(UNAUTHORIZED);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, "Unauthorized.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, UNAUTHORIZED, eventlogger);
return;
}
String path = req.getPathInfo();
- if (path.startsWith("/api/")) {
+ if (path.startsWith(API)) {
if (isProxyOK(req) && isProxyServer()) {
super.doDelete(req, resp);
return;
@@ -207,7 +208,7 @@ public class InternalServlet extends ProxyServlet {
}
}
}
- sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "Bad URL.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, BAD_URL, eventlogger);
} finally {
eelfLogger.info(EelfMsgs.EXIT);
}
@@ -225,7 +226,7 @@ public class InternalServlet extends ProxyServlet {
eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER), getIdFromPath(req) + "");
String path = req.getPathInfo();
Properties props = (new DB()).getProperties();
- if (path.equals("/halt") && !req.isSecure()) {
+ if ("/halt".equals(path) && !req.isSecure()) {
// request to halt the server - can ONLY come from localhost
String remote = req.getRemoteAddr();
if (remote.equals(props.getProperty("org.onap.dmaap.datarouter.provserver.localhost"))) {
@@ -241,20 +242,20 @@ public class InternalServlet extends ProxyServlet {
EventLogRecord elr = new EventLogRecord(req);
if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
+ elr.setMessage(UNAUTHORIZED);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, "Unauthorized.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, UNAUTHORIZED, eventlogger);
return;
}
- if (path.equals("/fetchProv") && !req.isSecure()) {
+ if ("/fetchProv".equals(path) && !req.isSecure()) {
// if request came from active_pod or standby_pod and it is not us, reload prov data
SynchronizerTask s = SynchronizerTask.getSynchronizer();
s.doFetch();
resp.setStatus(HttpServletResponse.SC_OK);
return;
}
- if (path.equals("/prov")) {
+ if ("/prov".equals(path)) {
if (isProxyOK(req) && isProxyServer()) {
if (super.doGetWithFallback(req, resp)) {
return;
@@ -268,33 +269,33 @@ public class InternalServlet extends ProxyServlet {
try {
resp.getOutputStream().print(p.getProvisioningString());
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("PROV0131 InternalServlet.doGet: " + ioe.getMessage(), ioe);
}
return;
}
- if (path.equals("/logs") || path.equals("/logs/")) {
+ if ("/logs".equals(path) || LOGS.equals(path)) {
resp.setStatus(HttpServletResponse.SC_OK);
resp.setContentType("application/json");
try {
resp.getOutputStream().print(generateLogfileList().toString());
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("PROV0132 InternalServlet.doGet: " + ioe.getMessage(), ioe);
}
return;
}
- if (path.startsWith("/logs/")) {
+ if (path.startsWith(LOGS)) {
String logdir = props.getProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir");
String logfile = path.substring(6);
if (logdir != null && logfile != null && logfile.indexOf('/') < 0) {
File log = new File(logdir + "/" + logfile);
if (log.exists() && log.isFile()) {
resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType("text/plain");
+ resp.setContentType(TEXT_CT);
Path logpath = Paths.get(log.getAbsolutePath());
try {
Files.copy(logpath, resp.getOutputStream());
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("PROV0133 InternalServlet.doGet: " + ioe.getMessage(), ioe);
}
return;
}
@@ -302,7 +303,7 @@ public class InternalServlet extends ProxyServlet {
sendResponseError(resp, HttpServletResponse.SC_NO_CONTENT, "No file.", eventlogger);
return;
}
- if (path.startsWith("/api/")) {
+ if (path.startsWith(API)) {
if (isProxyOK(req) && isProxyServer()) {
super.doGet(req, resp);
return;
@@ -312,29 +313,29 @@ public class InternalServlet extends ProxyServlet {
Parameters param = Parameters.getParameter(key);
if (param != null) {
resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType("text/plain");
+ resp.setContentType(TEXT_CT);
try {
resp.getOutputStream().print(param.getValue() + "\n");
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("PROV0134 InternalServlet.doGet: " + ioe.getMessage(), ioe);
}
return;
}
}
}
- if (path.equals("/drlogs") || path.equals("/drlogs/")) {
+ if ("/drlogs".equals(path) || "/drlogs/".equals(path)) {
// Special POD <=> POD API to determine what log file records are loaded here
LogfileLoader lfl = LogfileLoader.getLoader();
resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType("text/plain");
+ resp.setContentType(TEXT_CT);
try {
resp.getOutputStream().print(lfl.getBitSet().toString());
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("PROV0135 InternalServlet.doGet: " + ioe.getMessage(), ioe);
}
return;
}
- sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "Bad URL.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, BAD_URL, eventlogger);
} finally {
eelfLogger.info(EelfMsgs.EXIT);
}
@@ -352,14 +353,14 @@ public class InternalServlet extends ProxyServlet {
eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER), getIdFromPath(req) + "");
EventLogRecord elr = new EventLogRecord(req);
if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
+ elr.setMessage(UNAUTHORIZED);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, "Unauthorized.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, UNAUTHORIZED, eventlogger);
return;
}
String path = req.getPathInfo();
- if (path.startsWith("/api/")) {
+ if (path.startsWith(API)) {
if (isProxyOK(req) && isProxyServer()) {
super.doPut(req, resp);
return;
@@ -386,7 +387,7 @@ public class InternalServlet extends ProxyServlet {
}
}
}
- sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "Bad URL.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, BAD_URL, eventlogger);
} finally {
eelfLogger.info(EelfMsgs.EXIT);
}
@@ -405,15 +406,15 @@ public class InternalServlet extends ProxyServlet {
eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));
EventLogRecord elr = new EventLogRecord(req);
if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
+ elr.setMessage(UNAUTHORIZED);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, "Unauthorized.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, UNAUTHORIZED, eventlogger);
return;
}
String path = req.getPathInfo();
- if (path.startsWith("/api/")) {
+ if (path.startsWith(API)) {
if (isProxyOK(req) && isProxyServer()) {
super.doPost(req, resp);
return;
@@ -441,9 +442,9 @@ public class InternalServlet extends ProxyServlet {
}
}
- if (path.equals("/logs") || path.equals("/logs/")) {
+ if ("/logs".equals(path) || LOGS.equals(path)) {
String ctype = req.getHeader("Content-Type");
- if (ctype == null || !ctype.equals("text/plain")) {
+ if (ctype == null || !TEXT_CT.equals(ctype)) {
elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
elr.setMessage("Bad media type: " + ctype);
resp.setStatus(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
@@ -459,7 +460,7 @@ public class InternalServlet extends ProxyServlet {
}
String encoding = req.getHeader("Content-Encoding");
if (encoding != null) {
- if (encoding.trim().equals("gzip")) {
+ if ("gzip".equals(encoding.trim())) {
spoolname += ".gz";
} else {
elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
@@ -477,12 +478,13 @@ public class InternalServlet extends ProxyServlet {
total += store.getTotalSpace();
avail += store.getUsableSpace();
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("PROV0136 InternalServlet.doPost: " + ioe.getMessage(), ioe);
}
}
try {
fs.close();
} catch (Exception e) {
+ intlogger.error("PROV0137 InternalServlet.doPost: " + e.getMessage(), e);
}
if (((avail * 100) / total) < 5) {
elr.setResult(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
@@ -500,15 +502,15 @@ public class InternalServlet extends ProxyServlet {
eventlogger.info(elr.toString());
LogfileLoader.getLoader(); // This starts the logfile loader "task"
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("PROV0138 InternalServlet.doPost: " + ioe.getMessage(), ioe);
}
return;
}
- if (path.equals("/drlogs") || path.equals("/drlogs/")) {
+ if ("/drlogs".equals(path) || "/drlogs/".equals(path)) {
// Receive post request and generate log entries
String ctype = req.getHeader("Content-Type");
- if (ctype == null || !ctype.equals("text/plain")) {
+ if (ctype == null || !TEXT_CT.equals(ctype)) {
elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
elr.setMessage("Bad media type: " + ctype);
resp.setStatus(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
@@ -525,17 +527,17 @@ public class InternalServlet extends ProxyServlet {
RLEBitSet bs = new RLEBitSet(bos.toString()); // The set of records to retrieve
elr.setResult(HttpServletResponse.SC_OK);
resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType("text/plain");
+ resp.setContentType(TEXT_CT);
LogRecord.printLogRecords(resp.getOutputStream(), bs);
eventlogger.info(elr.toString());
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("PROV0139 InternalServlet.doPost: " + ioe.getMessage(), ioe);
}
return;
}
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "Bad URL.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, BAD_URL, eventlogger);
eventlogger.error(elr.toString());
} finally {
eelfLogger.info(EelfMsgs.EXIT);
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/LogServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/LogServlet.java
index 77bcbddc..762ab4e5 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/LogServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/LogServlet.java
@@ -68,6 +68,12 @@ public class LogServlet extends BaseServlet {
private static final long TWENTYFOUR_HOURS = (24 * 60 * 60 * 1000L);
private static final String FMT_1 = "yyyy-MM-dd'T'HH:mm:ss'Z'";
private static final String FMT_2 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
+ private static final String PUBLISHSQL = "publishSQL";
+ private static final String STATUSSQL = "statusSQL";
+ private static final String RESULTSQL = "resultSQL";
+ private static final String FILENAMESQL = "filenameSQL";
+ private static final String TIMESQL = "timeSQL";
+ private static final String LOG_RECORDSSQL = "select * from LOG_RECORDS where FEEDID = ";
private final boolean isfeedlog;
@@ -226,7 +232,7 @@ public class LogServlet extends BaseServlet {
}
out.print("]");
} catch (IOException ioe) {
- eventlogger.error("IOException: " + ioe.getMessage());
+ eventlogger.error("PROV0141 LogServlet.doGet: " + ioe.getMessage(), ioe);
}
} finally {
eelfLogger.info(EelfMsgs.EXIT);
@@ -275,7 +281,7 @@ public class LogServlet extends BaseServlet {
Map<String, String> map = new HashMap<>();
String s = req.getParameter("type");
if (s != null) {
- if (s.equals("pub") || s.equals("del") || s.equals("exp")) {
+ if ("pub".equals(s) || "del".equals(s) || "exp".equals(s)) {
map.put("type", s);
} else {
map.put("err", "bad type");
@@ -284,11 +290,11 @@ public class LogServlet extends BaseServlet {
} else {
map.put("type", "all");
}
- map.put("publishSQL", "");
- map.put("statusSQL", "");
- map.put("resultSQL", "");
- map.put("reasonSQL", "");
- map.put("filenameSQL", "");
+ map.put(PUBLISHSQL, "");
+ map.put(STATUSSQL, "");
+ map.put(RESULTSQL, "");
+ map.put(REASON_SQL, "");
+ map.put(FILENAMESQL, "");
s = req.getParameter("publishId");
if (s != null) {
@@ -296,22 +302,22 @@ public class LogServlet extends BaseServlet {
map.put("err", "bad publishId");
return map;
}
- map.put("publishSQL", " AND PUBLISH_ID = '"+s+"'");
+ map.put(PUBLISHSQL, " AND PUBLISH_ID = '"+s+"'");
}
s = req.getParameter("filename");
if (s != null) {
- map.put("filenameSQL", " AND FILENAME = '"+s+"'");
+ map.put(FILENAMESQL, " AND FILENAME = '"+s+"'");
}
s = req.getParameter("statusCode");
if (s != null) {
String sql = null;
- if (s.equals("success")) {
+ if ("success".equals(s)) {
sql = " AND STATUS >= 200 AND STATUS < 300";
- } else if (s.equals("redirect")) {
+ } else if ("redirect".equals(s)) {
sql = " AND STATUS >= 300 AND STATUS < 400";
- } else if (s.equals("failure")) {
+ } else if ("failure".equals(s)) {
sql = " AND STATUS >= 400";
} else {
try {
@@ -325,21 +331,21 @@ public class LogServlet extends BaseServlet {
map.put("err", "bad statusCode");
return map;
}
- map.put("statusSQL", sql);
- map.put("resultSQL", sql.replaceAll("STATUS", "RESULT"));
+ map.put(STATUSSQL, sql);
+ map.put(RESULTSQL, sql.replaceAll("STATUS", "RESULT"));
}
s = req.getParameter("expiryReason");
if (s != null) {
map.put("type", "exp");
- if (s.equals("notRetryable")) {
- map.put("reasonSQL", " AND REASON = 'notRetryable'");
- } else if (s.equals("retriesExhausted")) {
- map.put("reasonSQL", " AND REASON = 'retriesExhausted'");
- } else if (s.equals("diskFull")) {
- map.put("reasonSQL", " AND REASON = 'diskFull'");
- } else if (s.equals("other")) {
- map.put("reasonSQL", " AND REASON = 'other'");
+ if ("notRetryable".equals(s)) {
+ map.put(REASON_SQL, " AND REASON = 'notRetryable'");
+ } else if ("retriesExhausted".equals(s)) {
+ map.put(REASON_SQL, " AND REASON = 'retriesExhausted'");
+ } else if ("diskFull".equals(s)) {
+ map.put(REASON_SQL, " AND REASON = 'diskFull'");
+ } else if ("other".equals(s)) {
+ map.put(REASON_SQL, " AND REASON = 'other'");
} else {
map.put("err", "bad expiryReason");
return map;
@@ -364,7 +370,7 @@ public class LogServlet extends BaseServlet {
} else if (etime == 0) {
etime = stime + TWENTYFOUR_HOURS;
}
- map.put("timeSQL", String.format(" AND EVENT_TIME >= %d AND EVENT_TIME <= %d", stime, etime));
+ map.put(TIMESQL, String.format(" AND EVENT_TIME >= %d AND EVENT_TIME <= %d", stime, etime));
return map;
}
private long getTimeFromParam(final String s) {
@@ -381,8 +387,7 @@ public class LogServlet extends BaseServlet {
}
try {
// Also allow a long (in ms); useful for testing
- long n = Long.parseLong(s);
- return n;
+ return Long.parseLong(s);
} catch (NumberFormatException numberFormatException) {
intlogger.error("Exception in getting Time :- "+numberFormatException.getMessage(),numberFormatException);
}
@@ -392,51 +397,51 @@ public class LogServlet extends BaseServlet {
private void getPublishRecordsForFeed(int feedid, RowHandler rh, Map<String, String> map) {
String type = map.get("type");
- if (type.equals("all") || type.equals("pub")) {
- String sql = "select * from LOG_RECORDS where FEEDID = "+feedid
+ if ("all".equals(type) || "pub".equals(type)) {
+ String sql = LOG_RECORDSSQL+feedid
+ " AND TYPE = 'pub'"
- + map.get("timeSQL") + map.get("publishSQL") + map.get("statusSQL") + map.get("filenameSQL");
+ + map.get(TIMESQL) + map.get(PUBLISHSQL) + map.get(STATUSSQL) + map.get(FILENAMESQL);
getRecordsForSQL(sql, rh);
}
}
private void getDeliveryRecordsForFeed(int feedid, RowHandler rh, Map<String, String> map) {
String type = map.get("type");
- if (type.equals("all") || type.equals("del")) {
- String sql = "select * from LOG_RECORDS where FEEDID = "+feedid
+ if ("all".equals(type) || "del".equals(type)) {
+ String sql = LOG_RECORDSSQL+feedid
+ " AND TYPE = 'del'"
- + map.get("timeSQL") + map.get("publishSQL") + map.get("resultSQL");
+ + map.get(TIMESQL) + map.get(PUBLISHSQL) + map.get(RESULTSQL);
getRecordsForSQL(sql, rh);
}
}
private void getDeliveryRecordsForSubscription(int subid, RowHandler rh, Map<String, String> map) {
String type = map.get("type");
- if (type.equals("all") || type.equals("del")) {
+ if ("all".equals(type) || "del".equals(type)) {
String sql = "select * from LOG_RECORDS where DELIVERY_SUBID = "+subid
+ " AND TYPE = 'del'"
- + map.get("timeSQL") + map.get("publishSQL") + map.get("resultSQL");
+ + map.get(TIMESQL) + map.get(PUBLISHSQL) + map.get(RESULTSQL);
getRecordsForSQL(sql, rh);
}
}
private void getExpiryRecordsForFeed(int feedid, RowHandler rh, Map<String, String> map) {
String type = map.get("type");
- if (type.equals("all") || type.equals("exp")) {
- String st = map.get("statusSQL");
+ if ("all".equals(type) || "exp".equals(type)) {
+ String st = map.get(STATUSSQL);
if (st == null || st.length() == 0) {
- String sql = "select * from LOG_RECORDS where FEEDID = "+feedid
+ String sql = LOG_RECORDSSQL+feedid
+ " AND TYPE = 'exp'"
- + map.get("timeSQL") + map.get("publishSQL") + map.get("reasonSQL");
+ + map.get(TIMESQL) + map.get(PUBLISHSQL) + map.get(REASON_SQL);
getRecordsForSQL(sql, rh);
}
}
}
private void getExpiryRecordsForSubscription(int subid, RowHandler rh, Map<String, String> map) {
String type = map.get("type");
- if (type.equals("all") || type.equals("exp")) {
- String st = map.get("statusSQL");
+ if ("all".equals(type) || "exp".equals(type)) {
+ String st = map.get(STATUSSQL);
if (st == null || st.length() == 0) {
String sql = "select * from LOG_RECORDS where DELIVERY_SUBID = "+subid
+ " AND TYPE = 'exp'"
- + map.get("timeSQL") + map.get("publishSQL") + map.get("reasonSQL");
+ + map.get(TIMESQL) + map.get(PUBLISHSQL) + map.get(REASON_SQL);
getRecordsForSQL(sql, rh);
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Main.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Main.java
index a0d8664f..7c693bd2 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Main.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Main.java
@@ -102,7 +102,7 @@ public class Main {
try {
in = getClass().getClassLoader().getResourceAsStream("drProvCadi.properties");
} catch (Exception e) {
- intlogger.error("Exception in Main.getCadiProps() method ", e.getMessage());
+ intlogger.error("Exception in Main.getCadiProps(): " + e.getMessage(), e);
}
return in;
}
@@ -247,8 +247,8 @@ public class Main {
Inner obj = new Main().new Inner();
InputStream in = obj.getCadiProps();
cadiProperties.load(in);
- } catch (IOException e1) {
- intlogger.error("PROV0001 Exception loading CADI properties", e1.getMessage());
+ } catch (IOException ioe) {
+ intlogger.error("PROV0001 Exception loading CADI properties: " + ioe.getMessage(), ioe);
}
cadiProperties.setProperty("aaf_locate_url", provProperties.getProperty("org.onap.dmaap.datarouter.provserver.cadi.aaf.url", "https://aaf-onap-test.osaaf.org:8095"));
intlogger.info("PROV0001 aaf_url set to - " + cadiProperties.getProperty("aaf_url"));
@@ -281,7 +281,7 @@ public class Main {
server.start();
intlogger.info("Prov Server started-" + server.getState());
} catch (Exception e) {
- intlogger.info("Jetty failed to start. Reporting will we unavailable", e.getMessage());
+ intlogger.info("Jetty failed to start. Reporting will we unavailable: " + e.getMessage(), e);
}
server.join();
intlogger.info("PROV0001 **** AT&T Data Router Provisioning Server halted.");
@@ -302,7 +302,7 @@ public class Main {
Thread.sleep(5000L);
System.exit(0);
} catch (Exception e) {
- intlogger.error("Exception in Main.shutdown() method " + e.getMessage());
+ intlogger.error("Exception in Main.shutdown(): " + e.getMessage(), e);
}
});
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Poker.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Poker.java
index b9d5e7a6..6cb8520d 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Poker.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Poker.java
@@ -66,6 +66,7 @@ public class Poker extends TimerTask {
private static final String POKE_URL_TEMPLATE = "http://%s/internal/fetchProv";
private static final Object lock = new Object();
+ private static final String CARRIAGE_RETURN = "\n],\n";
/**
* This is a singleton -- there is only one Poker object in the server
@@ -77,6 +78,7 @@ public class Poker extends TimerTask {
private EELFLogger logger;
private String provString;
+
private Poker() {
timer1 = timer2 = 0;
Timer rolex = new Timer();
@@ -84,8 +86,8 @@ public class Poker extends TimerTask {
try {
thisPod = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
- thisPod = "*UNKNOWN*"; // not a major problem
- logger.info("UnknownHostException: Setting thisPod to \"*UNKNOWN*\"");
+ thisPod = "*UNKNOWN_POD*"; // not a major problem
+ logger.info("UnknownHostException: Setting thisPod to \"*UNKNOWN_POD*\"", e);
}
provString = buildProvisioningString();
@@ -195,10 +197,10 @@ public class Poker extends TimerTask {
} catch (MalformedURLException e) {
logger.warn(
"PROV0013 MalformedURLException Error poking node at " + nodeUrl + " : " + e
- .getMessage());
+ .getMessage(), e);
} catch (IOException e) {
logger.warn("PROV0013 IOException Error poking node at " + nodeUrl + " : " + e
- .getMessage());
+ .getMessage(), e);
}
};
r.run();
@@ -215,7 +217,7 @@ public class Poker extends TimerTask {
sb.append(f.asJSONObject().toString());
pfx = ",\n";
}
- sb.append("\n],\n");
+ sb.append(CARRIAGE_RETURN);
//Append groups to the string - Rally:US708115 - 1610
pfx = "\n";
@@ -225,7 +227,7 @@ public class Poker extends TimerTask {
sb.append(s.asJSONObject().toString());
pfx = ",\n";
}
- sb.append("\n],\n");
+ sb.append(CARRIAGE_RETURN);
// Append Subscriptions to the string
pfx = "\n";
@@ -237,13 +239,13 @@ public class Poker extends TimerTask {
}
pfx = ",\n";
}
- sb.append("\n],\n");
+ sb.append(CARRIAGE_RETURN);
// Append Parameters to the string
pfx = "\n";
sb.append("\"parameters\": {");
Map<String, String> props = Parameters.getParameters();
- Set<String> ivals = new HashSet<String>();
+ Set<String> ivals = new HashSet<>();
String intv = props.get("_INT_VALUES");
if (intv != null) {
ivals.addAll(Arrays.asList(intv.split("\\|")));
@@ -280,7 +282,7 @@ public class Poker extends TimerTask {
sb.append(in.asJSONObject().toString());
pfx = ",\n";
}
- sb.append("\n],\n");
+ sb.append(CARRIAGE_RETURN);
pfx = "\n";
sb.append("\"egress\": {");
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProxyServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProxyServlet.java
index 67a74de3..75423602 100755
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProxyServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProxyServlet.java
@@ -101,7 +101,7 @@ public class ProxyServlet extends BaseServlet {
sch = new Scheme("https", 443, socketFactory);
inited = true;
} catch (Exception e) {
- intlogger.error("ProxyServlet: " + e.getMessage());
+ intlogger.error("ProxyServlet.init: " + e.getMessage(), e);
}
intlogger.info("ProxyServlet: inited = " + inited);
}
@@ -111,7 +111,7 @@ public class ProxyServlet extends BaseServlet {
try (FileInputStream instream = new FileInputStream(new File(store))) {
ks.load(instream, pass.toCharArray());
} catch (FileNotFoundException fileNotFoundException) {
- intlogger.error("ProxyServlet: " + fileNotFoundException.getMessage());
+ intlogger.error("ProxyServlet.readStore: " + fileNotFoundException.getMessage(), fileNotFoundException);
} catch (Exception x) {
intlogger.error("READING TRUSTSTORE: " + x);
}
@@ -130,7 +130,7 @@ public class ProxyServlet extends BaseServlet {
if (t != null) {
t = t.replaceAll("&amp;", "&");
for (String s : t.split("&")) {
- if (s.equals("noproxy") || s.startsWith("noproxy=")) {
+ if ("noproxy".equals(s) || s.startsWith("noproxy=")) {
return false;
}
}
@@ -146,7 +146,7 @@ public class ProxyServlet extends BaseServlet {
*/
public boolean isProxyServer() {
SynchronizerTask st = SynchronizerTask.getSynchronizer();
- return st.getState() == SynchronizerTask.STANDBY;
+ return st.getPodState() == SynchronizerTask.STANDBY_POD;
}
/**
@@ -211,7 +211,7 @@ public class ProxyServlet extends BaseServlet {
rv = true;
} catch (IOException e) {
- intlogger.error("ProxyServlet: " + e.getMessage());
+ intlogger.error("ProxyServlet.doGetWithFallback: " + e.getMessage(), e);
} finally {
proxy.releaseConnection();
httpclient.getConnectionManager().shutdown();
@@ -234,7 +234,7 @@ public class ProxyServlet extends BaseServlet {
// Copy request headers and request body
copyRequestHeaders(req, proxy);
- if (method.equals("POST") || method.equals("PUT")) {
+ if ("POST".equals(method) || "PUT".equals(method)) {
BasicHttpEntity body = new BasicHttpEntity();
body.setContent(req.getInputStream());
body.setContentLength(-1); // -1 = unknown
@@ -250,7 +250,7 @@ public class ProxyServlet extends BaseServlet {
copyResponseHeaders(pxyResponse, resp);
copyEntityContent(pxyResponse, resp);
} catch (IOException e) {
- intlogger.warn("ProxyServlet: " + e.getMessage());
+ intlogger.warn("ProxyServlet.doProxy: " + e.getMessage(), e);
sendResponseError(resp, HttpServletResponse.SC_SERVICE_UNAVAILABLE, "", intlogger);
} finally {
proxy.releaseConnection();
@@ -279,7 +279,7 @@ public class ProxyServlet extends BaseServlet {
List<String> list = Collections.list(from.getHeaderNames());
for (String name : list) {
// Proxy code will add this one
- if (!name.equalsIgnoreCase("Content-Length")) {
+ if (!"Content-Length".equalsIgnoreCase(name)) {
to.addHeader(name, from.getHeader(name));
}
}
@@ -288,7 +288,7 @@ public class ProxyServlet extends BaseServlet {
private void copyResponseHeaders(HttpResponse from, HttpServletResponse to) {
for (Header hdr : from.getAllHeaders()) {
// Don't copy Date: our Jetty will add another Date header
- if (!hdr.getName().equals("Date")) {
+ if (!"Date".equals(hdr.getName())) {
to.addHeader(hdr.getName(), hdr.getValue());
}
}
@@ -300,7 +300,7 @@ public class ProxyServlet extends BaseServlet {
try (InputStream in = entity.getContent()) {
IOUtils.copy(in, resp.getOutputStream());
} catch (Exception e) {
- intlogger.error("Exception: " + e.getMessage());
+ intlogger.error("ProxyServlet.copyEntityContent: " + e.getMessage(), e);
}
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java
index 76a983f8..2ef5087f 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java
@@ -169,7 +169,7 @@ public class PublishServlet extends BaseServlet {
}
}
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("PROV0151 PublishServlet.redirect: " + ioe.getMessage(), ioe);
}
}
@@ -235,6 +235,7 @@ public class PublishServlet extends BaseServlet {
}
return -1;
} catch (NumberFormatException | JSONException e) {
+ intlogger.debug("PROV0152 PublishServlet.checkPath: " + e.getMessage(), e);
return -1;
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/RouteServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/RouteServlet.java
index 4dd422a0..383798fb 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/RouteServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/RouteServlet.java
@@ -129,6 +129,7 @@ import static org.onap.dmaap.datarouter.provisioning.utils.HttpServletUtils.send
*/
@SuppressWarnings("serial")
public class RouteServlet extends ProxyServlet {
+
/**
* DELETE route table entries by deleting part of the route table tree.
*/
@@ -136,10 +137,10 @@ public class RouteServlet extends ProxyServlet {
public void doDelete(HttpServletRequest req, HttpServletResponse resp) {
EventLogRecord elr = new EventLogRecord(req);
if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
+ elr.setMessage(UNAUTHORIZED);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, "Unauthorized.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, UNAUTHORIZED, eventlogger);
return;
}
if (isProxyOK(req) && isProxyServer()) {
@@ -150,7 +151,7 @@ public class RouteServlet extends ProxyServlet {
String path = req.getPathInfo();
String[] parts = path.substring(1).split("/");
Deleteable[] d = null;
- if (parts[0].equals("ingress")) {
+ if ("ingress".equals(parts[0])) {
if (parts.length == 4) {
// /internal/route/ingress/<feed>/<user>/<subnet>
try {
@@ -179,7 +180,7 @@ public class RouteServlet extends ProxyServlet {
sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "Invalid number of arguments in 'delete ingress' command.", eventlogger);
return;
}
- } else if (parts[0].equals("egress")) {
+ } else if ("egress".equals(parts[0])) {
if (parts.length == 2) {
// /internal/route/egress/<sub>
try {
@@ -198,7 +199,7 @@ public class RouteServlet extends ProxyServlet {
sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "Invalid number of arguments in 'delete egress' command.", eventlogger);
return;
}
- } else if (parts[0].equals("network")) {
+ } else if ("network".equals(parts[0])) {
if (parts.length == 3) {
// /internal/route/network/<from>/<to>
try {//
@@ -208,7 +209,9 @@ public class RouteServlet extends ProxyServlet {
);
d = new Deleteable[] { nr };
} catch (IllegalArgumentException e) {
- sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "The specified network route does not exist.", eventlogger);
+ String message = "The specified network route does not exist.";
+ eventlogger.error(message, e);
+ sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, message, eventlogger);
return;
}
} else {
@@ -217,7 +220,7 @@ public class RouteServlet extends ProxyServlet {
}
}
if (d == null) {
- sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "Bad URL.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, BAD_URL, eventlogger);
return;
}
boolean rv = true;
@@ -244,10 +247,10 @@ public class RouteServlet extends ProxyServlet {
public void doGet(HttpServletRequest req, HttpServletResponse resp) {
EventLogRecord elr = new EventLogRecord(req);
if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
+ elr.setMessage(UNAUTHORIZED);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, "Unauthorized.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, UNAUTHORIZED, eventlogger);
return;
}
if (isProxyOK(req) && isProxyServer()) {
@@ -258,14 +261,14 @@ public class RouteServlet extends ProxyServlet {
String path = req.getPathInfo();
if (!path.endsWith("/"))
path += "/";
- if (!path.equals("/") && !path.equals("/ingress/") && !path.equals("/egress/") && !path.equals("/network/")) {
- sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "Bad URL.", eventlogger);
+ if (!"/".equals(path) && !INGRESS.equals(path) && !EGRESS.equals(path) && !NETWORK.equals(path)) {
+ sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, BAD_URL, eventlogger);
return;
}
StringBuilder sb = new StringBuilder("{\n");
String px2 = "";
- if (path.equals("/") || path.equals("/ingress/")) {
+ if ("/".equals(path) || INGRESS.equals(path)) {
String pfx = "\n";
sb.append("\"ingress\": [");
for (IngressRoute in : IngressRoute.getAllIngressRoutes()) {
@@ -277,7 +280,7 @@ public class RouteServlet extends ProxyServlet {
px2 = ",\n";
}
- if (path.equals("/") || path.equals("/egress/")) {
+ if ("/".equals(path) || EGRESS.equals(path)) {
String pfx = "\n";
sb.append(px2);
sb.append("\"egress\": {");
@@ -289,7 +292,7 @@ public class RouteServlet extends ProxyServlet {
try {
sb.append("\"").append(jx.getString(key)).append("\"");
} catch (JSONException je) {
- eventlogger.error("JSONException" + je.getMessage());
+ eventlogger.error("PROV0161 RouteServlet.doGet: " + je.getMessage(), je);
}
pfx = ",\n";
}
@@ -298,7 +301,7 @@ public class RouteServlet extends ProxyServlet {
px2 = ",\n";
}
- if (path.equals("/") || path.equals("/network/")) {
+ if ("/".equals(path) || NETWORK.equals(path)) {
String pfx = "\n";
sb.append(px2);
sb.append("\"routing\": [");
@@ -315,7 +318,7 @@ public class RouteServlet extends ProxyServlet {
try {
resp.getOutputStream().print(sb.toString());
} catch (IOException ioe) {
- eventlogger.error("IOException" + ioe.getMessage());
+ eventlogger.error("PROV0162 RouteServlet.doGet: " + ioe.getMessage(), ioe);
}
}
/**
@@ -325,13 +328,13 @@ public class RouteServlet extends ProxyServlet {
public void doPut(HttpServletRequest req, HttpServletResponse resp) {
EventLogRecord elr = new EventLogRecord(req);
if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
+ elr.setMessage(UNAUTHORIZED);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, "Unauthorized.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, UNAUTHORIZED, eventlogger);
return;
}
- sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "Bad URL.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, BAD_URL, eventlogger);
}
/**
* POST - modify existing route table entries in the route table tree specified by the URL path.
@@ -340,10 +343,10 @@ public class RouteServlet extends ProxyServlet {
public void doPost(HttpServletRequest req, HttpServletResponse resp) {
EventLogRecord elr = new EventLogRecord(req);
if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
+ elr.setMessage(UNAUTHORIZED);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, "Unauthorized.", eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_FORBIDDEN, UNAUTHORIZED, eventlogger);
return;
}
if (isProxyOK(req) && isProxyServer()) {
@@ -352,7 +355,7 @@ public class RouteServlet extends ProxyServlet {
}
String path = req.getPathInfo();
Insertable[] ins = null;
- if (path.startsWith("/ingress/")) {
+ if (path.startsWith(INGRESS)) {
// /internal/route/ingress/?feed=%s&amp;user=%s&amp;subnet=%s&amp;nodepatt=%s
try {
// Although it probably doesn't make sense, you can install two identical routes in the IRT
@@ -368,11 +371,11 @@ public class RouteServlet extends ProxyServlet {
int seq = (t != null) ? Integer.parseInt(t) : (IngressRoute.getMaxSequence() + 100);
ins = new Insertable[] { new IngressRoute(seq, feedid, user, subnet, NodeClass.lookupNodeNames(nodepatt)) };
} catch (Exception e) {
- intlogger.info(e.toString());
+ intlogger.info(e.toString(), e);
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, "Invalid arguments in 'add ingress' command.", intlogger);
return;
}
- } else if (path.startsWith("/egress/")) {
+ } else if (path.startsWith(EGRESS)) {
// /internal/route/egress/?sub=%s&amp;node=%s
try {
int subid = Integer.parseInt(req.getParameter("sub"));
@@ -384,11 +387,11 @@ public class RouteServlet extends ProxyServlet {
String node = NodeClass.normalizeNodename(req.getParameter("node"));
ins = new Insertable[] { new EgressRoute(subid, node) };
} catch (Exception e) {
- intlogger.info(e.toString());
+ intlogger.info(e.toString(), e);
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, "Invalid arguments in 'add egress' command.", intlogger);
return;
}
- } else if (path.startsWith("/network/")) {
+ } else if (path.startsWith(NETWORK)) {
// /internal/route/network/?from=%s&amp;to=%s&amp;via=%s
try {
String nfrom = req.getParameter("from");
@@ -410,13 +413,13 @@ public class RouteServlet extends ProxyServlet {
}
ins = new Insertable[] { nr };
} catch (IllegalArgumentException e) {
- intlogger.info(e.toString());
+ intlogger.info(e.toString(), e);
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, "Invalid arguments in 'add network' command.", intlogger);
return;
}
}
if (ins == null) {
- sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, "Bad URL.", intlogger);
+ sendResponseError(resp, HttpServletResponse.SC_NOT_FOUND, BAD_URL, intlogger);
return;
}
boolean rv = true;
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java
index 34ba5d34..9cbce0a8 100755
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java
@@ -57,8 +57,9 @@ import static org.onap.dmaap.datarouter.provisioning.utils.HttpServletUtils.send
public class StatisticsServlet extends BaseServlet {
private static final long TWENTYFOUR_HOURS = (24 * 60 * 60 * 1000L);
- private static final String fmt1 = "yyyy-MM-dd'T'HH:mm:ss'Z'";
- private static final String fmt2 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
+ private static final String FMT1 = "yyyy-MM-dd'T'HH:mm:ss'Z'";
+ private static final String FMT2 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
+
/**
@@ -92,97 +93,83 @@ public class StatisticsServlet extends BaseServlet {
resp.setContentType(LOGLIST_CONTENT_TYPE);
String outputType = "json";
- String feedids = null;
- if (req.getParameter("feedid") == null && req.getParameter("groupid") == null) {
+ if (req.getParameter(FEEDID) == null && req.getParameter(GROUPID) == null) {
try {
resp.getOutputStream().print("Invalid request, Feedid or Group ID is required.");
} catch (IOException ioe) {
- eventlogger.error("IOException: " + ioe.getMessage());
+ eventlogger.error("PROV0171 StatisticsServlet.doGet: " + ioe.getMessage(), ioe);
}
}
- if (req.getParameter("feedid") != null && req.getParameter("groupid") == null) {
- map.put("feedids", req.getParameter("feedid").replace("|", ",").toString());
+ if (req.getParameter(FEEDID) != null && req.getParameter(GROUPID) == null) {
+ map.put(FEEDIDS, req.getParameter(FEEDID).replace("|", ","));
}
- if (req.getParameter("groupid") != null && req.getParameter("feedid") == null) {
+ if (req.getParameter(GROUPID) != null && req.getParameter(FEEDID) == null) {
StringBuffer groupid1 = new StringBuffer();
try {
- System.out.println("feeedidsssssssss");
- groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter("groupid")));
- System.out.println("feeedids" + req.getParameter("groupid"));
-
- map.put("feedids", groupid1.toString());
- System.out.println("groupid1" + groupid1.toString());
-
-
+ groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter(GROUPID)));
+ map.put(FEEDIDS, groupid1.toString());
} catch (NumberFormatException | SQLException e) {
- eventlogger.error(e.getMessage());
+ eventlogger.error("PROV0172 StatisticsServlet.doGet: " + e.getMessage(), e);
}
}
- if (req.getParameter("groupid") != null && req.getParameter("feedid") != null) {
+ if (req.getParameter(GROUPID) != null && req.getParameter(FEEDID) != null) {
StringBuffer groupid1 = new StringBuffer();
try {
- System.out.println("both r not null");
- groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter("groupid")));
- System.out.println("feeedids" + req.getParameter("groupid"));
+ groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter(GROUPID)));
groupid1.append(",");
- groupid1.append(req.getParameter("feedid").replace("|", ",").toString());
-
- map.put("feedids", groupid1.toString());
-
- System.out.println("groupid1" + groupid1.toString());
-
-
+ groupid1.append(req.getParameter(FEEDID).replace("|", ","));
+ map.put(FEEDIDS, groupid1.toString());
} catch (NumberFormatException | SQLException e) {
- eventlogger.error(e.getMessage());
+ eventlogger.error("PROV0173 StatisticsServlet.doGet: " + e.getMessage(), e);
}
}
- if (req.getParameter("subid") != null && req.getParameter("feedid") != null) {
+ if (req.getParameter(SUBID) != null && req.getParameter(FEEDID) != null) {
StringBuffer subidstr = new StringBuffer();
subidstr.append("and e.DELIVERY_SUBID in(");
- subidstr.append(req.getParameter("subid").replace("|", ",").toString());
+ subidstr.append(req.getParameter(SUBID).replace("|", ","));
subidstr.append(")");
- map.put("subid", subidstr.toString());
+ map.put(SUBID, subidstr.toString());
}
- if (req.getParameter("subid") != null && req.getParameter("groupid") != null) {
+ if (req.getParameter(SUBID) != null && req.getParameter(GROUPID) != null) {
StringBuffer subidstr = new StringBuffer();
subidstr.append("and e.DELIVERY_SUBID in(");
- subidstr.append(req.getParameter("subid").replace("|", ",").toString());
+ subidstr.append(req.getParameter(SUBID).replace("|", ","));
subidstr.append(")");
- map.put("subid", subidstr.toString());
+ map.put(SUBID, subidstr.toString());
}
if (req.getParameter("type") != null) {
- map.put("eventType", req.getParameter("type").replace("|", ",").toString());
+ map.put(EVENT_TYPE, req.getParameter("type").replace("|", ","));
}
- if (req.getParameter("output_type") != null) {
- map.put("output_type", req.getParameter("output_type").toString());
+ if (req.getParameter(OUTPUT_TYPE) != null) {
+ map.put(OUTPUT_TYPE, req.getParameter(OUTPUT_TYPE));
}
- if (req.getParameter("start_time") != null) {
- map.put("start_time", req.getParameter("start_time").toString());
+ if (req.getParameter(START_TIME) != null) {
+ map.put(START_TIME, req.getParameter(START_TIME));
}
- if (req.getParameter("end_time") != null) {
- map.put("end_time", req.getParameter("end_time").toString());
+ if (req.getParameter(END_TIME) != null) {
+ map.put(END_TIME, req.getParameter(END_TIME));
}
if (req.getParameter("time") != null) {
- map.put("start_time", req.getParameter("time").toString());
- map.put("end_time", null);
+ map.put(START_TIME, req.getParameter("time"));
+ map.put(END_TIME, null);
}
- if (req.getParameter("output_type") != null) {
- outputType = req.getParameter("output_type");
+ if (req.getParameter(OUTPUT_TYPE) != null) {
+ outputType = req.getParameter(OUTPUT_TYPE);
}
try {
this.getRecordsForSQL(map, outputType, resp.getOutputStream(), resp);
} catch (IOException ioe) {
- eventlogger.error("IOException: " + ioe.getMessage());
+ eventlogger.error("PROV0174 StatisticsServlet.doGet: " + ioe.getMessage(), ioe);
}
}
@@ -238,7 +225,7 @@ public class StatisticsServlet extends BaseServlet {
*/
public void rsToJson(ResultSet rs, ServletOutputStream out) throws IOException, SQLException {
- String fields[] = {"FEEDNAME", "FEEDID", "FILES_PUBLISHED", "PUBLISH_LENGTH", "FILES_DELIVERED",
+ String[] fields = {"FEEDNAME", "FEEDID", "FILES_PUBLISHED", "PUBLISH_LENGTH", "FILES_DELIVERED",
"DELIVERED_LENGTH", "SUBSCRIBER_URL", "SUBID", "PUBLISH_TIME", "DELIVERY_TIME",
"AverageDelay"};
StringBuffer line = new StringBuffer();
@@ -271,7 +258,6 @@ public class StatisticsServlet extends BaseServlet {
DB db = null;
Connection conn = null;
- //PreparedStatement prepareStatement = null;
ResultSet resultSet = null;
String sqlGoupid = null;
StringBuffer feedIds = new StringBuffer();
@@ -291,7 +277,7 @@ public class StatisticsServlet extends BaseServlet {
System.out.println("feedIds" + feedIds.toString());
}
} catch (SQLException e) {
- eventlogger.error(e.getMessage());
+ eventlogger.error("PROV0175 StatisticsServlet.getFeedIdsByGroupId: " + e.getMessage(), e);
} finally {
try {
if (resultSet != null) {
@@ -302,7 +288,7 @@ public class StatisticsServlet extends BaseServlet {
db.release(conn);
}
} catch (Exception e) {
- eventlogger.error(e.getMessage());
+ eventlogger.error("PROV0176 StatisticsServlet.getFeedIdsByGroupId: " + e.getMessage(), e);
}
}
return feedIds;
@@ -322,23 +308,23 @@ public class StatisticsServlet extends BaseServlet {
String start_time = null;
String end_time = null;
String subid = " ";
- if (map.get("eventType") != null) {
- eventType = (String) map.get("eventType");
+ if (map.get(EVENT_TYPE) != null) {
+ eventType = map.get(EVENT_TYPE);
}
- if (map.get("feedids") != null) {
- feedids = (String) map.get("feedids");
+ if (map.get(FEEDIDS) != null) {
+ feedids = map.get(FEEDIDS);
}
- if (map.get("start_time") != null) {
- start_time = (String) map.get("start_time");
+ if (map.get(START_TIME) != null) {
+ start_time = map.get(START_TIME);
}
- if (map.get("end_time") != null) {
- end_time = (String) map.get("end_time");
+ if (map.get(END_TIME) != null) {
+ end_time = map.get(END_TIME);
}
if ("all".equalsIgnoreCase(eventType)) {
eventType = "PUB','DEL, EXP, PBF";
}
- if (map.get("subid") != null) {
- subid = (String) map.get("subid");
+ if (map.get(SUBID) != null) {
+ subid = map.get(SUBID);
}
eventlogger.info("Generating sql query to get Statistics resultset. ");
@@ -425,10 +411,10 @@ public class StatisticsServlet extends BaseServlet {
}
private Map<String, String> buildMapFromRequest(HttpServletRequest req) {
- Map<String, String> map = new HashMap<String, String>();
+ Map<String, String> map = new HashMap<>();
String s = req.getParameter("type");
if (s != null) {
- if (s.equals("pub") || s.equals("del") || s.equals("exp")) {
+ if ("pub".equals(s) || "del".equals(s) || "exp".equals(s)) {
map.put("type", s);
} else {
map.put("err", "bad type");
@@ -440,7 +426,7 @@ public class StatisticsServlet extends BaseServlet {
map.put("publishSQL", "");
map.put("statusSQL", "");
map.put("resultSQL", "");
- map.put("reasonSQL", "");
+ map.put(REASON_SQL, "");
s = req.getParameter("publishId");
if (s != null) {
@@ -454,11 +440,11 @@ public class StatisticsServlet extends BaseServlet {
s = req.getParameter("statusCode");
if (s != null) {
String sql = null;
- if (s.equals("success")) {
+ if ("success".equals(s)) {
sql = " AND STATUS >= 200 AND STATUS < 300";
- } else if (s.equals("redirect")) {
+ } else if ("redirect".equals(s)) {
sql = " AND STATUS >= 300 AND STATUS < 400";
- } else if (s.equals("failure")) {
+ } else if ("failure".equals(s)) {
sql = " AND STATUS >= 400";
} else {
try {
@@ -480,14 +466,14 @@ public class StatisticsServlet extends BaseServlet {
s = req.getParameter("expiryReason");
if (s != null) {
map.put("type", "exp");
- if (s.equals("notRetryable")) {
- map.put("reasonSQL", " AND REASON = 'notRetryable'");
- } else if (s.equals("retriesExhausted")) {
- map.put("reasonSQL", " AND REASON = 'retriesExhausted'");
- } else if (s.equals("diskFull")) {
- map.put("reasonSQL", " AND REASON = 'diskFull'");
- } else if (s.equals("other")) {
- map.put("reasonSQL", " AND REASON = 'other'");
+ if ("notRetryable".equals(s)) {
+ map.put(REASON_SQL, " AND REASON = 'notRetryable'");
+ } else if ("retriesExhausted".equals(s)) {
+ map.put(REASON_SQL, " AND REASON = 'retriesExhausted'");
+ } else if ("diskFull".equals(s)) {
+ map.put(REASON_SQL, " AND REASON = 'diskFull'");
+ } else if ("other".equals("other")) {
+ map.put(REASON_SQL, " AND REASON = 'other'");
} else {
map.put("err", "bad expiryReason");
return map;
@@ -522,7 +508,7 @@ public class StatisticsServlet extends BaseServlet {
}
try {
// First, look for an RFC 3339 date
- String fmt = (s.indexOf('.') > 0) ? fmt2 : fmt1;
+ String fmt = (s.indexOf('.') > 0) ? FMT2 : FMT1;
SimpleDateFormat sdf = new SimpleDateFormat(fmt);
Date d = sdf.parse(s);
return d.getTime();
@@ -530,8 +516,7 @@ public class StatisticsServlet extends BaseServlet {
}
try {
// Also allow a long (in ms); useful for testing
- long n = Long.parseLong(s);
- return n;
+ return Long.parseLong(s);
} catch (NumberFormatException e) {
}
intlogger.info("Error parsing time=" + s);
@@ -548,7 +533,7 @@ public class StatisticsServlet extends BaseServlet {
DB db = new DB();
try (Connection conn = db.getConnection()) {
try (ResultSet rs = conn.prepareStatement(filterQuery).executeQuery()) {
- if (outputType.equals("csv")) {
+ if ("csv".equals(outputType)) {
resp.setContentType("application/octet-stream");
Date date = new Date();
SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-YYYY HH:mm:ss");
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscribeServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscribeServlet.java
index 69451a3b..8e70e693 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscribeServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscribeServlet.java
@@ -112,7 +112,7 @@ public class SubscribeServlet extends ProxyServlet {
}
int feedid = getIdFromPath(req);
if (feedid < 0) {
- message = "Missing or bad feed number.";
+ message = MISSING_FEED;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -121,7 +121,7 @@ public class SubscribeServlet extends ProxyServlet {
}
Feed feed = Feed.getFeedById(feedid);
if (feed == null || feed.isDeleted()) {
- message = "Missing or bad feed number.";
+ message = MISSING_FEED;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
eventlogger.error(elr.toString());
@@ -140,7 +140,7 @@ public class SubscribeServlet extends ProxyServlet {
try {
resp.getOutputStream().print(t);
} catch (IOException ioe) {
- eventlogger.error("IOException: " + ioe.getMessage());
+ eventlogger.error("PROV0181 SubscribeServlet.doGet: " + ioe.getMessage(), ioe);
}
} finally {
eelfLogger.info(EelfMsgs.EXIT);
@@ -201,7 +201,7 @@ public class SubscribeServlet extends ProxyServlet {
}
int feedid = getIdFromPath(req);
if (feedid < 0) {
- message = "Missing or bad feed number.";
+ message = MISSING_FEED;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -210,7 +210,7 @@ public class SubscribeServlet extends ProxyServlet {
}
Feed feed = Feed.getFeedById(feedid);
if (feed == null || feed.isDeleted()) {
- message = "Missing or bad feed number.";
+ message = MISSING_FEED;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
eventlogger.error(elr.toString());
@@ -220,7 +220,7 @@ public class SubscribeServlet extends ProxyServlet {
// check content type is SUB_CONTENT_TYPE, version 1.0
ContentHeader ch = getContentHeader(req);
String ver = ch.getAttribute("version");
- if (!ch.getType().equals(SUB_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
+ if (!ch.getType().equals(SUB_BASECONTENT_TYPE) || !("1.0".equals(ver) || "2.0".equals(ver))) {
intlogger.debug("Content-type is: " + req.getHeader("Content-Type"));
message = "Incorrect content-type";
elr.setMessage(message);
@@ -231,7 +231,7 @@ public class SubscribeServlet extends ProxyServlet {
}
JSONObject jo = getJSONfromInput(req);
if (jo == null) {
- message = "Badly formed JSON";
+ message = BAD_JSON;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -258,7 +258,7 @@ public class SubscribeServlet extends ProxyServlet {
message = e.getMessage();
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.error(elr.toString());
+ eventlogger.error(elr.toString(), e);
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
return;
}
@@ -271,14 +271,14 @@ public class SubscribeServlet extends ProxyServlet {
*/
String feedAafInstance = feed.getAafInstance();
String subAafInstance = sub.getAafInstance();
- boolean subAafLegacyEmptyOrNull = (subAafInstance == null || subAafInstance.equals("") || subAafInstance.equalsIgnoreCase("legacy"));
+ boolean subAafLegacyEmptyOrNull = (subAafInstance == null || "".equals(subAafInstance) || "legacy".equalsIgnoreCase(subAafInstance));
// This extra check added to verify AAF feed with AAF subscriber having empty aaf instance check
- if (feedAafInstance == null || feedAafInstance.equals("") || feedAafInstance.equalsIgnoreCase("legacy")) {
+ if (feedAafInstance == null || "".equals(feedAafInstance) || "legacy".equalsIgnoreCase(feedAafInstance)) {
if (subAafLegacyEmptyOrNull) {
AuthorizationResponse aresp = authz.decide(req);
if (!aresp.isAuthorized()) {
- message = "Policy Engine disallows access";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -299,7 +299,7 @@ public class SubscribeServlet extends ProxyServlet {
if (subAafLegacyEmptyOrNull) {
AuthorizationResponse aresp = authz.decide(req);
if (!aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -341,7 +341,7 @@ public class SubscribeServlet extends ProxyServlet {
try {
resp.getOutputStream().print(sub.asLimitedJSONObject().toString());
} catch (IOException ioe) {
- eventlogger.error("IOException: " + ioe.getMessage());
+ eventlogger.error("PROV0182 SubscribeServlet.doPost: " + ioe.getMessage(), ioe);
}
provisioningDataChanged();
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServlet.java
index 63ff84de..125c50d8 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServlet.java
@@ -63,6 +63,10 @@ public class SubscriptionServlet extends ProxyServlet {
private static EELFLogger eelfLogger = EELFManager.getInstance()
.getLogger(SubscriptionServlet.class);
+
+
+
+
/**
* DELETE on the &lt;subscriptionUrl&gt; -- delete a subscription. See the <i>Deleting a Subscription</i> section in
* the <b>Provisioning API</b> document for details on how this method should be invoked.
@@ -88,7 +92,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
String bhdr = req.getHeader(BEHALF_HEADER);
if (bhdr == null) {
- message = "Missing " + BEHALF_HEADER + " header.";
+ message = MISSING_ON_BEHALF;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -97,7 +101,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
int subid = getIdFromPath(req);
if (subid < 0) {
- message = "Missing or bad subscription number.";
+ message = BAD_SUB;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -106,7 +110,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
Subscription sub = Subscription.getSubscriptionById(subid);
if (sub == null) {
- message = "Missing or bad subscription number.";
+ message = BAD_SUB;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
eventlogger.error(elr.toString());
@@ -119,10 +123,10 @@ public class SubscriptionServlet extends ProxyServlet {
* CADI code - check on permissions based on Legacy/AAF users to allow to delete/remove subscription
*/
String aafInstance = sub.getAafInstance();
- if (aafInstance == null || aafInstance.equals("") || aafInstance.equalsIgnoreCase("legacy")) {
+ if (aafInstance == null || "".equals(aafInstance) || "legacy".equalsIgnoreCase(aafInstance)) {
AuthorizationResponse aresp = authz.decide(req);
if (!aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -189,7 +193,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
String bhdr = req.getHeader(BEHALF_HEADER);
if (bhdr == null) {
- message = "Missing " + BEHALF_HEADER + " header.";
+ message = MISSING_ON_BEHALF;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -198,7 +202,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
int subid = getIdFromPath(req);
if (subid < 0) {
- message = "Missing or bad subscription number.";
+ message = BAD_SUB;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -207,7 +211,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
Subscription sub = Subscription.getSubscriptionById(subid);
if (sub == null) {
- message = "Missing or bad subscription number.";
+ message = BAD_SUB;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
eventlogger.error(elr.toString());
@@ -217,7 +221,7 @@ public class SubscriptionServlet extends ProxyServlet {
// Check with the Authorizer
AuthorizationResponse aresp = authz.decide(req);
if (!aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -233,7 +237,7 @@ public class SubscriptionServlet extends ProxyServlet {
try {
resp.getOutputStream().print(sub.asJSONObject(true).toString());
} catch (IOException ioe) {
- eventlogger.error("IOException: " + ioe.getMessage());
+ eventlogger.error("PROV0191 SubscriptionServlet.doGet: " + ioe.getMessage(), ioe);
}
} finally {
eelfLogger.info(EelfMsgs.EXIT);
@@ -265,7 +269,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
String bhdr = req.getHeader(BEHALF_HEADER);
if (bhdr == null) {
- message = "Missing " + BEHALF_HEADER + " header.";
+ message = MISSING_ON_BEHALF;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -274,7 +278,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
int subid = getIdFromPath(req);
if (subid < 0) {
- message = "Missing or bad subscription number.";
+ message = BAD_SUB;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -283,7 +287,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
Subscription oldsub = Subscription.getSubscriptionById(subid);
if (oldsub == null) {
- message = "Missing or bad subscription number.";
+ message = BAD_SUB;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_NOT_FOUND);
eventlogger.error(elr.toString());
@@ -293,7 +297,7 @@ public class SubscriptionServlet extends ProxyServlet {
// check content type is SUB_CONTENT_TYPE, version 1.0
ContentHeader ch = getContentHeader(req);
String ver = ch.getAttribute("version");
- if (!ch.getType().equals(SUB_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
+ if (!ch.getType().equals(SUB_BASECONTENT_TYPE) || !("1.0".equals(ver) || "2.0".equals(ver))) {
message = "Incorrect content-type";
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
@@ -303,7 +307,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
JSONObject jo = getJSONfromInput(req);
if (jo == null) {
- message = "Badly formed JSON";
+ message = BAD_JSON;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -320,7 +324,7 @@ public class SubscriptionServlet extends ProxyServlet {
message = e.getMessage();
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.error(elr.toString());
+ eventlogger.error(elr.toString(), e);
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
return;
}
@@ -331,10 +335,10 @@ public class SubscriptionServlet extends ProxyServlet {
* CADI code - check on permissions based on Legacy/AAF users to allow to delete/remove subscription
*/
String aafInstance = sub.getAafInstance();
- if (aafInstance == null || aafInstance.equals("") || aafInstance.equalsIgnoreCase("legacy")) {
+ if (aafInstance == null || "".equals(aafInstance) || "legacy".equalsIgnoreCase(aafInstance)) {
AuthorizationResponse aresp = authz.decide(req);
if (!aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -380,7 +384,7 @@ public class SubscriptionServlet extends ProxyServlet {
try {
resp.getOutputStream().print(sub.asLimitedJSONObject().toString());
} catch (IOException ioe) {
- eventlogger.error("IOException: " + ioe.getMessage());
+ eventlogger.error("PROV0192 SubscriptionServlet.doPut: " + ioe.getMessage(), ioe);
}
/**Change Owner ship of Subscriber Adding for group feature:Rally US708115*/
@@ -392,7 +396,7 @@ public class SubscriptionServlet extends ProxyServlet {
sub.changeOwnerShip();
}
} catch (JSONException je) {
- eventlogger.error("JSONException: " + je.getMessage());
+ eventlogger.error("PROV0193 SubscriptionServlet.doPut: " + je.getMessage(), je);
}
}
/***End of change ownership*/
@@ -435,7 +439,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
String bhdr = req.getHeader(BEHALF_HEADER);
if (bhdr == null) {
- message = "Missing " + BEHALF_HEADER + " header.";
+ message = MISSING_ON_BEHALF;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -444,7 +448,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
final int subid = getIdFromPath(req);
if (subid < 0 || Subscription.getSubscriptionById(subid) == null) {
- message = "Missing or bad subscription number.";
+ message = BAD_SUB;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -454,7 +458,7 @@ public class SubscriptionServlet extends ProxyServlet {
// check content type is SUBCNTRL_CONTENT_TYPE, version 1.0
ContentHeader ch = getContentHeader(req);
String ver = ch.getAttribute("version");
- if (!ch.getType().equals(SUBCNTRL_CONTENT_TYPE) || !ver.equals("1.0")) {
+ if (!ch.getType().equals(SUBCNTRL_CONTENT_TYPE) || !"1.0".equals(ver)) {
message = "Incorrect content-type";
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
@@ -465,7 +469,7 @@ public class SubscriptionServlet extends ProxyServlet {
// Check with the Authorizer
AuthorizationResponse aresp = authz.decide(req);
if (!aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
+ message = POLICY_ENGINE;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_FORBIDDEN);
eventlogger.error(elr.toString());
@@ -474,7 +478,7 @@ public class SubscriptionServlet extends ProxyServlet {
}
JSONObject jo = getJSONfromInput(req);
if (jo == null) {
- message = "Badly formed JSON";
+ message = BAD_JSON;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
@@ -496,10 +500,10 @@ public class SubscriptionServlet extends ProxyServlet {
eventlogger.info(elr.toString());
resp.setStatus(HttpServletResponse.SC_ACCEPTED);
} catch (JSONException e) {
- message = "Badly formed JSON";
+ message = BAD_JSON;
elr.setMessage(message);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.error(elr.toString());
+ eventlogger.error(elr.toString(), e);
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
}
} finally {
@@ -514,7 +518,7 @@ public class SubscriptionServlet extends ProxyServlet {
public class SubscriberNotifyThread extends Thread {
public static final String URL_TEMPLATE = "http://%s/internal/resetSubscription/%d";
- private List<String> urls = new Vector<String>();
+ private List<String> urls = new Vector<>();
public SubscriberNotifyThread() {
setName("SubscriberNotifyThread");
@@ -527,7 +531,9 @@ public class SubscriptionServlet extends ProxyServlet {
}
}
+ @Override
public void run() {
+
try {
while (!urls.isEmpty()) {
String u = urls.remove(0);
@@ -538,11 +544,11 @@ public class SubscriptionServlet extends ProxyServlet {
conn.getContentLength(); // Force the GET through
conn.disconnect();
} catch (IOException e) {
- intlogger.info("IOException Error accessing URL: " + u + ": " + e.getMessage());
+ intlogger.info("PROV0194 Error accessing URL: " + u + ": " + e.getMessage(), e);
}
}
} catch (Exception e) {
- intlogger.warn("Caught exception in SubscriberNotifyThread: " + e.getMessage());
+ intlogger.warn("PROV0195 Caught exception in SubscriberNotifyThread: " + e.getMessage(), e);
}
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java
index 3097a9db..8c5a49a4 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java
@@ -24,6 +24,11 @@
package org.onap.dmaap.datarouter.provisioning;
+import static org.onap.dmaap.datarouter.provisioning.BaseServlet.TEXT_CT;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
@@ -50,8 +55,6 @@ import java.util.TreeSet;
import javax.servlet.http.HttpServletResponse;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
@@ -85,7 +88,7 @@ import org.onap.dmaap.datarouter.provisioning.utils.URLUtilities;
* <li>Checking DNS once per minute to see which POD the DNS CNAME points to. The CNAME will point to
* the active (master) POD.</li>
* <li>On non-master (standby) PODs, fetches provisioning data and logs in order to keep MariaDB in sync.</li>
- * <li>Providing information to other parts of the system as to the current role (ACTIVE, STANDBY, UNKNOWN)
+ * <li>Providing information to other parts of the system as to the current role (ACTIVE_POD, STANDBY_POD, UNKNOWN_POD)
* of this POD.</li>
* </ol>
* <p>For this to work correctly, the following code needs to be placed at the beginning of main().</p>
@@ -99,167 +102,152 @@ import org.onap.dmaap.datarouter.provisioning.utils.URLUtilities;
public class SynchronizerTask extends TimerTask {
/**
- * This is a singleton -- there is only one SynchronizerTask object in the server
+ * This is a singleton -- there is only one SynchronizerTask object in the server.
*/
private static SynchronizerTask synctask;
/**
- * This POD is unknown -- not on the list of PODs
+ * This POD is unknown -- not on the list of PODs.
*/
- public static final int UNKNOWN = 0;
+ public static final int UNKNOWN_POD = 0;
/**
- * This POD is active -- on the list of PODs, and the DNS CNAME points to us
+ * This POD is active -- on the list of PODs, and the DNS CNAME points to us.
*/
- public static final int ACTIVE = 1;
+ public static final int ACTIVE_POD = 1;
/**
- * This POD is standby -- on the list of PODs, and the DNS CNAME does not point to us
+ * This POD is standby -- on the list of PODs, and the DNS CNAME does not point to us.
*/
- public static final int STANDBY = 2;
- private static final String[] stnames = {"UNKNOWN", "ACTIVE", "STANDBY"};
+ public static final int STANDBY_POD = 2;
+
+ private static final String[] stnames = {"UNKNOWN_POD", "ACTIVE_POD", "STANDBY_POD"};
private static final long ONE_HOUR = 60 * 60 * 1000L;
+ private long nextMsg = 0; // only display the "Current podState" msg every 5 mins.
+
private final EELFLogger logger;
private final Timer rolex;
private final String spooldir;
- private int state;
+ private int podState;
private boolean doFetch;
private long nextsynctime;
private AbstractHttpClient httpclient = null;
- /**
- * Get the singleton SynchronizerTask object.
- *
- * @return the SynchronizerTask
- */
- public static synchronized SynchronizerTask getSynchronizer() {
- if (synctask == null) {
- synctask = new SynchronizerTask();
- }
- return synctask;
- }
-
@SuppressWarnings("deprecation")
private SynchronizerTask() {
logger = EELFManager.getInstance().getLogger("InternalLog");
rolex = new Timer();
spooldir = (new DB()).getProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir");
- state = UNKNOWN;
+ podState = UNKNOWN_POD;
doFetch = true; // start off with a fetch
nextsynctime = 0;
- logger.info("PROV5000: Sync task starting, server state is UNKNOWN");
+ logger.info("PROV5000: Sync task starting, server podState is UNKNOWN_POD");
try {
Properties props = (new DB()).getProperties();
String type = props.getProperty(Main.KEYSTORE_TYPE_PROPERTY, "jks");
String store = props.getProperty(Main.KEYSTORE_PATH_PROPERTY);
String pass = props.getProperty(Main.KEYSTORE_PASS_PROPERTY);
KeyStore keyStore = KeyStore.getInstance(type);
- try(FileInputStream instream = new FileInputStream(new File(store))) {
+ try (FileInputStream instream = new FileInputStream(new File(store))) {
keyStore.load(instream, pass.toCharArray());
}
- store = props.getProperty(Main.TRUSTSTORE_PATH_PROPERTY);
- pass = props.getProperty(Main.TRUSTSTORE_PASS_PROPERTY);
- KeyStore trustStore = null;
- if (store != null && store.length() > 0) {
- trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
- try(FileInputStream instream = new FileInputStream(new File(store))){
- trustStore.load(instream, pass.toCharArray());
+ store = props.getProperty(Main.TRUSTSTORE_PATH_PROPERTY);
+ pass = props.getProperty(Main.TRUSTSTORE_PASS_PROPERTY);
+ KeyStore trustStore = null;
+ if (store != null && store.length() > 0) {
+ trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
+ try (FileInputStream instream = new FileInputStream(new File(store))) {
+ trustStore.load(instream, pass.toCharArray());
- }
}
+ }
// We are connecting with the node name, but the certificate will have the CNAME
// So we need to accept a non-matching certificate name
- String keystorepass = props.getProperty(
- Main.KEYSTORE_PASS_PROPERTY); //itrack.web.att.com/browse/DATARTR-6 for changing hard coded passphase ref
- try(AbstractHttpClient hc = new DefaultHttpClient()) {
- SSLSocketFactory socketFactory =
- (trustStore == null)
- ? new SSLSocketFactory(keyStore, keystorepass)
- : new SSLSocketFactory(keyStore, keystorepass, trustStore);
- socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
- Scheme sch = new Scheme("https", 443, socketFactory);
- hc.getConnectionManager().getSchemeRegistry().register(sch);
- httpclient = hc;
- }
- // Run once every 5 seconds to check DNS, etc.
- long interval = 0;
- try {
- String s = props.getProperty("org.onap.dmaap.datarouter.provserver.sync_interval", "5000");
- interval = Long.parseLong(s);
- } catch (NumberFormatException e) {
- interval = 5000L;
+ String keystorepass = props.getProperty(Main.KEYSTORE_PASS_PROPERTY);
+ try (AbstractHttpClient hc = new DefaultHttpClient()) {
+ SSLSocketFactory socketFactory =
+ (trustStore == null)
+ ? new SSLSocketFactory(keyStore, keystorepass)
+ : new SSLSocketFactory(keyStore, keystorepass, trustStore);
+ socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
+ Scheme sch = new Scheme("https", 443, socketFactory);
+ hc.getConnectionManager().getSchemeRegistry().register(sch);
+ httpclient = hc;
}
- rolex.scheduleAtFixedRate(this, 0L, interval);
+ setSynchTimer(props);
} catch (Exception e) {
logger.warn("PROV5005: Problem starting the synchronizer: " + e);
}
}
+ private void setSynchTimer(Properties props) {
+ // Run once every 5 seconds to check DNS, etc.
+ long interval;
+ try {
+ String s = props.getProperty("org.onap.dmaap.datarouter.provserver.sync_interval", "5000");
+ interval = Long.parseLong(s);
+ } catch (NumberFormatException e) {
+ interval = 5000L;
+ }
+ rolex.scheduleAtFixedRate(this, 0L, interval);
+ }
+
+ /**
+ * Get the singleton SynchronizerTask object.
+ *
+ * @return the SynchronizerTask
+ */
+ public static synchronized SynchronizerTask getSynchronizer() {
+ if (synctask == null) {
+ synctask = new SynchronizerTask();
+ }
+ return synctask;
+ }
+
/**
- * What is the state of this POD?
+ * What is the podState of this POD?.
*
- * @return one of ACTIVE, STANDBY, UNKNOWN
+ * @return one of ACTIVE_POD, STANDBY_POD, UNKNOWN_POD
*/
- public int getState() {
- return state;
+ public int getPodState() {
+ return podState;
}
/**
- * Is this the active POD?
+ * Is this the active POD?.
*
* @return true if we are active (the master), false otherwise
*/
public boolean isActive() {
- return state == ACTIVE;
+ return podState == ACTIVE_POD;
}
/**
* This method is used to signal that another POD (the active POD) has sent us a /fetchProv request, and that we
* should re-synchronize with the master.
*/
- public void doFetch() {
+ void doFetch() {
doFetch = true;
}
/**
* Runs once a minute in order to <ol>
* <li>lookup DNS names,</li>
- * <li>determine the state of this POD,</li>
- * <li>if this is a standby POD, and the fetch flag is set, perform a fetch of state from the active POD.</li>
+ * <li>determine the podState of this POD,</li>
+ * <li>if this is a standby POD, and the fetch flag is set, perform a fetch of podState from the active POD.</li>
* <li>if this is a standby POD, check if there are any new log records to be replicated.</li>
- * </ol>
+ * </ol>.
*/
@Override
public void run() {
try {
- state = lookupState();
- if (state == STANDBY) {
+ podState = lookupState();
+ if (podState == STANDBY_POD) {
// Only copy provisioning data FROM the active server TO the standby
if (doFetch || (System.currentTimeMillis() >= nextsynctime)) {
- logger.debug("Initiating a sync...");
- JSONObject jo = readProvisioningJSON();
- if (jo != null) {
- doFetch = false;
- syncFeeds(jo.getJSONArray("feeds"));
- syncSubs(jo.getJSONArray("subscriptions"));
- syncGroups(jo.getJSONArray("groups")); //Rally:US708115 - 1610
- syncParams(jo.getJSONObject("parameters"));
- // The following will not be present in a version=1.0 provfeed
- JSONArray ja = jo.optJSONArray("ingress");
- if (ja != null) {
- syncIngressRoutes(ja);
- }
- JSONObject j2 = jo.optJSONObject("egress");
- if (j2 != null) {
- syncEgressRoutes(j2);
- }
- ja = jo.optJSONArray("routing");
- if (ja != null) {
- syncNetworkRoutes(ja);
- }
- }
+ syncProvisioningData();
logger.info("PROV5013: Sync completed.");
nextsynctime = System.currentTimeMillis() + ONE_HOUR;
}
@@ -278,7 +266,7 @@ public class SynchronizerTask extends TimerTask {
remote.andNot(local);
if (!remote.isEmpty()) {
logger.debug(" Replicating logs: " + remote);
- replicateDRLogs(remote);
+ replicateDataRouterLogs(remote);
}
}
} catch (Exception e) {
@@ -286,14 +274,39 @@ public class SynchronizerTask extends TimerTask {
}
}
+ private void syncProvisioningData() {
+ logger.debug("Initiating a sync...");
+ JSONObject jo = readProvisioningJson();
+ if (jo != null) {
+ doFetch = false;
+ syncFeeds(jo.getJSONArray("feeds"));
+ syncSubs(jo.getJSONArray("subscriptions"));
+ syncGroups(jo.getJSONArray("groups")); //Rally:US708115 - 1610
+ syncParams(jo.getJSONObject("parameters"));
+ // The following will not be present in a version=1.0 provfeed
+ JSONArray ja = jo.optJSONArray("ingress");
+ if (ja != null) {
+ syncIngressRoutes(ja);
+ }
+ JSONObject j2 = jo.optJSONObject("egress");
+ if (j2 != null) {
+ syncEgressRoutes(j2);
+ }
+ ja = jo.optJSONArray("routing");
+ if (ja != null) {
+ syncNetworkRoutes(ja);
+ }
+ }
+ }
+
/**
- * This method is used to lookup the CNAME that points to the active server. It returns 0 (UNKNOWN), 1(ACTIVE), or 2
- * (STANDBY) to indicate the state of this server.
+ * This method is used to lookup the CNAME that points to the active server.
+ * It returns 0 (UNKNOWN_POD), 1(ACTIVE_POD), or (STANDBY_POD) to indicate the podState of this server.
*
- * @return the current state
+ * @return the current podState
*/
- private int lookupState() {
- int newstate = UNKNOWN;
+ int lookupState() {
+ int newPodState = UNKNOWN_POD;
try {
InetAddress myaddr = InetAddress.getLocalHost();
if (logger.isTraceEnabled()) {
@@ -303,27 +316,25 @@ public class SynchronizerTask extends TimerTask {
Set<String> pods = new TreeSet<>(Arrays.asList(BaseServlet.getPods()));
if (pods.contains(thisPod)) {
InetAddress pserver = InetAddress.getByName(BaseServlet.getActiveProvName());
- newstate = myaddr.equals(pserver) ? ACTIVE : STANDBY;
+ newPodState = myaddr.equals(pserver) ? ACTIVE_POD : STANDBY_POD;
if (logger.isDebugEnabled() && System.currentTimeMillis() >= nextMsg) {
- logger.debug("Active POD = " + pserver + ", Current state is " + stnames[newstate]);
+ logger.debug("Active POD = " + pserver + ", Current podState is " + stnames[newPodState]);
nextMsg = System.currentTimeMillis() + (5 * 60 * 1000L);
}
} else {
logger.warn("PROV5003: My name (" + thisPod + ") is missing from the list of provisioning servers.");
}
} catch (UnknownHostException e) {
- logger.warn("PROV5002: Cannot determine the name of this provisioning server.");
+ logger.warn("PROV5002: Cannot determine the name of this provisioning server.", e);
}
- if (newstate != state) {
- logger
- .info(String.format("PROV5001: Server state changed from %s to %s", stnames[state], stnames[newstate]));
+ if (newPodState != podState) {
+ logger.info(String.format("PROV5001: Server podState changed from %s to %s",
+ stnames[podState], stnames[newPodState]));
}
- return newstate;
+ return newPodState;
}
- private static long nextMsg = 0; // only display the "Current state" msg every 5 mins.
-
/**
* Synchronize the Feeds in the JSONArray, with the Feeds in the DB.
*/
@@ -334,7 +345,7 @@ public class SynchronizerTask extends TimerTask {
Feed f = new Feed(ja.getJSONObject(n));
coll.add(f);
} catch (Exception e) {
- logger.warn("PROV5004: Invalid object in feed: " + ja.optJSONObject(n));
+ logger.warn("PROV5004: Invalid object in feed: " + ja.optJSONObject(n), e);
}
}
if (sync(coll, Feed.getAllFeeds())) {
@@ -355,7 +366,7 @@ public class SynchronizerTask extends TimerTask {
Subscription s = new Subscription(j);
coll.add(s);
} catch (Exception e) {
- logger.warn("PROV5004: Invalid object in subscription: " + ja.optJSONObject(n));
+ logger.warn("PROV5004: Invalid object in subscription: " + ja.optJSONObject(n), e);
}
}
if (sync(coll, Subscription.getAllSubscriptions())) {
@@ -373,7 +384,7 @@ public class SynchronizerTask extends TimerTask {
Group g = new Group(ja.getJSONObject(n));
coll.add(g);
} catch (Exception e) {
- logger.warn("PROV5004: Invalid object in subscription: " + ja.optJSONObject(n));
+ logger.warn("PROV5004: Invalid object in group: " + ja.optJSONObject(n), e);
}
}
if (sync(coll, Group.getAllgroups())) {
@@ -392,9 +403,11 @@ public class SynchronizerTask extends TimerTask {
try {
v = jo.getString(k);
} catch (JSONException e) {
+ logger.warn("PROV5004: Invalid object in parameters: " + jo.optJSONObject(k), e);
try {
v = "" + jo.getInt(k);
} catch (JSONException e1) {
+ logger.warn("PROV5004: Invalid object in parameters: " + jo.optInt(k), e1);
JSONArray ja = jo.getJSONArray(k);
for (int i = 0; i < ja.length(); i++) {
if (i > 0) {
@@ -436,9 +449,9 @@ public class SynchronizerTask extends TimerTask {
EgressRoute er = new EgressRoute(sub, node);
coll.add(er);
} catch (NumberFormatException e) {
- logger.warn("PROV5004: Invalid subid in egress routes: " + key);
+ logger.warn("PROV5004: Invalid subid in egress routes: " + key, e);
} catch (IllegalArgumentException e) {
- logger.warn("PROV5004: Invalid node name in egress routes: " + key);
+ logger.warn("PROV5004: Invalid node name in egress routes: " + key, e);
}
}
if (sync(coll, EgressRoute.getAllEgressRoutes())) {
@@ -453,7 +466,7 @@ public class SynchronizerTask extends TimerTask {
NetworkRoute nr = new NetworkRoute(ja.getJSONObject(n));
coll.add(nr);
} catch (JSONException e) {
- logger.warn("PROV5004: Invalid object in network routes: " + ja.optJSONObject(n));
+ logger.warn("PROV5004: Invalid object in network routes: " + ja.optJSONObject(n), e);
}
}
if (sync(coll, NetworkRoute.getAllNetworkRoutes())) {
@@ -475,29 +488,11 @@ public class SynchronizerTask extends TimerTask {
Syncable newobj = newmap.get(n);
Syncable oldobj = oldmap.get(n);
if (oldobj == null) {
- if (logger.isDebugEnabled()) {
- logger.debug(" Inserting record: " + newobj);
- }
- newobj.doInsert(conn);
- changes = true;
+ changes = insertRecord(conn, newobj);
} else if (newobj == null) {
- if (logger.isDebugEnabled()) {
- logger.debug(" Deleting record: " + oldobj);
- }
- oldobj.doDelete(conn);
- changes = true;
+ changes = deleteRecord(conn, oldobj);
} else if (!newobj.equals(oldobj)) {
- if (logger.isDebugEnabled()) {
- logger.debug(" Updating record: " + newobj);
- }
- newobj.doUpdate(conn);
-
- /**Rally US708115
- * Change Ownership of FEED - 1610, Syncronised with secondary DB.
- * */
- checkChnageOwner(newobj, oldobj);
-
- changes = true;
+ changes = updateRecord(conn, newobj, oldobj);
}
}
db.release(conn);
@@ -507,6 +502,30 @@ public class SynchronizerTask extends TimerTask {
return changes;
}
+ private boolean updateRecord(Connection conn, Syncable newobj, Syncable oldobj) {
+ if (logger.isDebugEnabled()) {
+ logger.debug(" Updating record: " + newobj);
+ }
+ boolean changes = newobj.doUpdate(conn);
+ checkChangeOwner(newobj, oldobj);
+
+ return changes;
+ }
+
+ private boolean deleteRecord(Connection conn, Syncable oldobj) {
+ if (logger.isDebugEnabled()) {
+ logger.debug(" Deleting record: " + oldobj);
+ }
+ return oldobj.doDelete(conn);
+ }
+
+ private boolean insertRecord(Connection conn, Syncable newobj) {
+ if (logger.isDebugEnabled()) {
+ logger.debug(" Inserting record: " + newobj);
+ }
+ return newobj.doInsert(conn);
+ }
+
private Map<String, Syncable> getMap(Collection<? extends Syncable> c) {
Map<String, Syncable> map = new HashMap<>();
for (Syncable v : c) {
@@ -515,18 +534,18 @@ public class SynchronizerTask extends TimerTask {
return map;
}
- /**Change owner of FEED/SUBSCRIPTION*/
/**
+ * Change owner of FEED/SUBSCRIPTION.
* Rally US708115 Change Ownership of FEED - 1610
*/
- private void checkChnageOwner(Syncable newobj, Syncable oldobj) {
+ private void checkChangeOwner(Syncable newobj, Syncable oldobj) {
if (newobj instanceof Feed) {
Feed oldfeed = (Feed) oldobj;
Feed newfeed = (Feed) newobj;
if (!oldfeed.getPublisher().equals(newfeed.getPublisher())) {
- logger.info("PROV5013 - Previous publisher: " + oldfeed.getPublisher() + ": New publisher-" + newfeed
- .getPublisher());
+ logger.info("PROV5013 - Previous publisher: "
+ + oldfeed.getPublisher() + ": New publisher-" + newfeed.getPublisher());
oldfeed.setPublisher(newfeed.getPublisher());
oldfeed.changeOwnerShip();
}
@@ -535,8 +554,8 @@ public class SynchronizerTask extends TimerTask {
Subscription newsub = (Subscription) newobj;
if (!oldsub.getSubscriber().equals(newsub.getSubscriber())) {
- logger.info("PROV5013 - Previous subscriber: " + oldsub.getSubscriber() + ": New subscriber-" + newsub
- .getSubscriber());
+ logger.info("PROV5013 - Previous subscriber: "
+ + oldsub.getSubscriber() + ": New subscriber-" + newsub.getSubscriber());
oldsub.setSubscriber(newsub.getSubscriber());
oldsub.changeOwnerShip();
}
@@ -549,26 +568,26 @@ public class SynchronizerTask extends TimerTask {
*
* @return the provisioning data (as a JONObject)
*/
- private synchronized JSONObject readProvisioningJSON() {
+ private synchronized JSONObject readProvisioningJson() {
String url = URLUtilities.generatePeerProvURL();
HttpGet get = new HttpGet(url);
try {
HttpResponse response = httpclient.execute(get);
int code = response.getStatusLine().getStatusCode();
if (code != HttpServletResponse.SC_OK) {
- logger.warn("PROV5010: readProvisioningJSON failed, bad error code: " + code);
+ logger.warn("PROV5010: readProvisioningJson failed, bad error code: " + code);
return null;
}
HttpEntity entity = response.getEntity();
String ctype = entity.getContentType().getValue().trim();
- if (!ctype.equals(BaseServlet.PROVFULL_CONTENT_TYPE1) && !ctype
- .equals(BaseServlet.PROVFULL_CONTENT_TYPE2)) {
- logger.warn("PROV5011: readProvisioningJSON failed, bad content type: " + ctype);
+ if (!ctype.equals(BaseServlet.PROVFULL_CONTENT_TYPE1)
+ && !ctype.equals(BaseServlet.PROVFULL_CONTENT_TYPE2)) {
+ logger.warn("PROV5011: readProvisioningJson failed, bad content type: " + ctype);
return null;
}
return new JSONObject(new JSONTokener(entity.getContent()));
} catch (Exception e) {
- logger.warn("PROV5012: readProvisioningJSON failed, exception: " + e);
+ logger.warn("PROV5012: readProvisioningJson failed, exception: " + e);
return null;
} finally {
get.releaseConnection();
@@ -581,18 +600,18 @@ public class SynchronizerTask extends TimerTask {
*
* @return the bitset
*/
- private RLEBitSet readRemoteLoglist() {
+ RLEBitSet readRemoteLoglist() {
RLEBitSet bs = new RLEBitSet();
String url = URLUtilities.generatePeerLogsURL();
//Fixing if only one Prov is configured, not to give exception to fill logs, return empty bitset.
- if (url.equals("")) {
+ if ("".equals(url)) {
return bs;
}
//End of fix.
HttpGet get = new HttpGet(url);
- try {
+ try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
HttpResponse response = httpclient.execute(get);
int code = response.getStatusLine().getStatusCode();
if (code != HttpServletResponse.SC_OK) {
@@ -601,13 +620,12 @@ public class SynchronizerTask extends TimerTask {
}
HttpEntity entity = response.getEntity();
String ctype = entity.getContentType().getValue().trim();
- if (!ctype.equals("text/plain")) {
+ if (!TEXT_CT.equals(ctype)) {
logger.warn("PROV5011: readRemoteLoglist failed, bad content type: " + ctype);
return bs;
}
InputStream is = entity.getContent();
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- int ch = 0;
+ int ch;
while ((ch = is.read()) >= 0) {
bos.write(ch);
}
@@ -628,12 +646,12 @@ public class SynchronizerTask extends TimerTask {
*
* @param bs the bitset (an RELBitSet) of log records to fetch
*/
- private void replicateDRLogs(RLEBitSet bs) {
+ void replicateDataRouterLogs(RLEBitSet bs) {
String url = URLUtilities.generatePeerLogsURL();
HttpPost post = new HttpPost(url);
try {
String t = bs.toString();
- HttpEntity body = new ByteArrayEntity(t.getBytes(), ContentType.create("text/plain"));
+ HttpEntity body = new ByteArrayEntity(t.getBytes(), ContentType.create(TEXT_CT));
post.setEntity(body);
if (logger.isDebugEnabled()) {
logger.debug("Requesting records: " + t);
@@ -642,13 +660,13 @@ public class SynchronizerTask extends TimerTask {
HttpResponse response = httpclient.execute(post);
int code = response.getStatusLine().getStatusCode();
if (code != HttpServletResponse.SC_OK) {
- logger.warn("PROV5010: replicateDRLogs failed, bad error code: " + code);
+ logger.warn("PROV5010: replicateDataRouterLogs failed, bad error code: " + code);
return;
}
HttpEntity entity = response.getEntity();
String ctype = entity.getContentType().getValue().trim();
- if (!ctype.equals("text/plain")) {
- logger.warn("PROV5011: replicateDRLogs failed, bad content type: " + ctype);
+ if (!TEXT_CT.equals(ctype)) {
+ logger.warn("PROV5011: replicateDataRouterLogs failed, bad content type: " + ctype);
return;
}
@@ -659,7 +677,7 @@ public class SynchronizerTask extends TimerTask {
Files.move(tmppath, donepath, StandardCopyOption.REPLACE_EXISTING);
logger.info("Approximately " + bs.cardinality() + " records replicated.");
} catch (Exception e) {
- logger.warn("PROV5012: replicateDRLogs failed, exception: " + e);
+ logger.warn("PROV5012: replicateDataRouterLogs failed, exception: " + e);
} finally {
post.releaseConnection();
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/DeliveryExtraRecord.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/DeliveryExtraRecord.java
index 2a959f3d..0e5342a6 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/DeliveryExtraRecord.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/DeliveryExtraRecord.java
@@ -44,12 +44,7 @@ public class DeliveryExtraRecord extends BaseLogRecord {
this.subid = Integer.parseInt(pp[4]);
this.contentLength2 = Long.parseLong(pp[6]);
}
- public DeliveryExtraRecord(ResultSet rs) throws SQLException {
- super(rs);
- // Note: because this record should be "rare" these fields are mapped to unconventional fields in the DB
- this.subid = rs.getInt("DELIVERY_SUBID");
- this.contentLength2 = rs.getInt("CONTENT_LENGTH_2");
- }
+
@Override
public void load(PreparedStatement ps) throws SQLException {
ps.setString(1, "dlx"); // field 1: type
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRoute.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRoute.java
index 12135969..e766e704 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRoute.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRoute.java
@@ -47,6 +47,7 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB;
public class EgressRoute extends NodeClass implements Comparable<EgressRoute> {
private static EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog");
+ private static final String SQLEXCEPTION = "SQLException: ";
private final int subid;
private final int nodeid;
@@ -57,7 +58,7 @@ public class EgressRoute extends NodeClass implements Comparable<EgressRoute> {
* @return the sorted set
*/
public static SortedSet<EgressRoute> getAllEgressRoutes() {
- SortedSet<EgressRoute> set = new TreeSet<EgressRoute>();
+ SortedSet<EgressRoute> set = new TreeSet<>();
try {
DB db = new DB();
@SuppressWarnings("resource")
@@ -74,7 +75,7 @@ public class EgressRoute extends NodeClass implements Comparable<EgressRoute> {
db.release(conn);
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error("PROV0008 EgressRoute.getAllEgressRoutes: " + e.getMessage(), e);
}
return set;
}
@@ -104,20 +105,20 @@ public class EgressRoute extends NodeClass implements Comparable<EgressRoute> {
ps.close();
db.release(conn);
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error("PROV0009 EgressRoute.getEgressRoute: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return v;
}
- public EgressRoute(int subid, int nodeid) throws IllegalArgumentException {
+ public EgressRoute(int subid, int nodeid) {
this.subid = subid;
this.nodeid = nodeid;
// Note: unlike for Feeds, it subscriptions can be removed from the tables, so it is
@@ -126,7 +127,7 @@ public class EgressRoute extends NodeClass implements Comparable<EgressRoute> {
// throw new IllegalArgumentException("No such subscription: "+subid);
}
- public EgressRoute(int subid, String node) throws IllegalArgumentException {
+ public EgressRoute(int subid, String node) {
this(subid, lookupNodeName(node));
}
@@ -141,15 +142,14 @@ public class EgressRoute extends NodeClass implements Comparable<EgressRoute> {
ps.execute();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0007 doDelete: " + e.getMessage());
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error("PROV0007 doDelete: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -169,14 +169,14 @@ public class EgressRoute extends NodeClass implements Comparable<EgressRoute> {
ps.close();
rv = true;
} catch (SQLException e) {
- intlogger.warn("PROV0005 doInsert: " + e.getMessage());
+ intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -194,14 +194,14 @@ public class EgressRoute extends NodeClass implements Comparable<EgressRoute> {
ps.executeUpdate();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0006 doUpdate: " + e.getMessage());
+ intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Feed.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Feed.java
index 506821e2..1cfd0f6b 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Feed.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Feed.java
@@ -47,6 +47,7 @@ import java.util.*;
public class Feed extends Syncable {
private static EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog");
private static int next_feedid = getMaxFeedID() + 1;
+ private static final String SQLEXCEPTION = "SQLException: ";
private int feedid;
private int groupid; //New field is added - Groups feature Rally:US708115 - 1610
@@ -85,7 +86,7 @@ public class Feed extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.warn("PROV0024 Feed.isFeedValid: ", e.getMessage());
+ intlogger.warn("PROV0024 Feed.isFeedValid: " + e.getMessage(), e);
}
return count != 0;
}
@@ -135,7 +136,7 @@ public class Feed extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.warn("PROV0025 Feed.countActiveFeeds: ", e.getMessage());
+ intlogger.warn("PROV0025 Feed.countActiveFeeds: " + e.getMessage(), e);
}
return count;
}
@@ -155,7 +156,7 @@ public class Feed extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.warn("PROV0026 Feed.getMaxFeedID: ", e.getMessage());
+ intlogger.warn("PROV0026 Feed.getMaxFeedID: " + e.getMessage(), e);
}
return max;
}
@@ -201,7 +202,7 @@ public class Feed extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.warn("PROV0027 Feed.getAllFeeds: ", e.getMessage());
+ intlogger.warn("PROV0027 Feed.getAllFeeds: " + e.getMessage(), e);
}
return map.values();
}
@@ -235,7 +236,7 @@ public class Feed extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.warn("PROV0028 Feed.getFilteredFeedUrlList: ", e.getMessage());
+ intlogger.warn("PROV0028 Feed.getFilteredFeedUrlList: " + e.getMessage(), e);
}
return list;
}
@@ -272,7 +273,7 @@ public class Feed extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.warn("PROV0029 Feed.getFeedBySQL: ", e.getMessage());
+ intlogger.warn("PROV0029 Feed.getFeedBySQL: " + e.getMessage(), e);
}
return feed;
}
@@ -338,6 +339,7 @@ public class Feed extends Syncable {
try {
this.version = jo.getString("version");
} catch (JSONException e) {
+ intlogger.warn("PROV0023 Feed.Feed: " + e.getMessage(), e);
this.version = null;
}
if(version != null && version.length() > 20)
@@ -379,11 +381,10 @@ public class Feed extends Syncable {
JSONObject jol = jo.optJSONObject("links");
this.links = (jol == null) ? (new FeedLinks()) : (new FeedLinks(jol));
} catch (InvalidObjectException e) {
- intlogger.warn("PROV0030 Feed.Feed: ", e.getMessage());
throw e;
} catch (Exception e) {
- intlogger.error("PROV0031 Feed.Feed: invalid JSON: "+e);
- throw new InvalidObjectException("invalid JSON: " + e.getMessage());
+ intlogger.warn("Invalid JSON: " + e.getMessage(), e);
+ throw new InvalidObjectException("Invalid JSON: " + e.getMessage());
}
}
@@ -496,14 +497,6 @@ public class Feed extends Syncable {
this.suspended = suspended;
}
- public Date getLast_mod() {
- return last_mod;
- }
-
- public Date getCreated_date() {
- return created_date;
- }
-
@Override
public JSONObject asJSONObject() {
JSONObject jo = new JSONObject();
@@ -555,15 +548,14 @@ public class Feed extends Syncable {
ps.execute();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0007 doDelete: " + e.getMessage());
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error("PROV0007 doDelete: " + e.getMessage(), e);
} finally {
try {
if(ps!=null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -624,8 +616,7 @@ public class Feed extends Syncable {
}
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0005 doInsert: " + e.getMessage());
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error("PROV0005 doInsert: " + e.getMessage(), e);
}
return rv;
}
@@ -705,13 +696,13 @@ public class Feed extends Syncable {
ps.close();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0006 doUpdate: " + e.getMessage());
+ intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e);
} finally {
try {
if (ps != null)
ps.close();
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -737,14 +728,14 @@ public class Feed extends Syncable {
ps.close();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0006 doUpdate: " + e.getMessage());
+ intlogger.warn("PROV0008 changeOwnerShip: " + e.getMessage(), e);
} finally {
try {
if(ps!=null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/FeedEndpointID.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/FeedEndpointID.java
index cd482c61..384d9f64 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/FeedEndpointID.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/FeedEndpointID.java
@@ -39,10 +39,6 @@ public class FeedEndpointID implements JSONable {
private String id;
private String password;
- public FeedEndpointID() {
- this("", "");
- }
-
public FeedEndpointID(String id, String password) {
this.id = id;
this.password = password;
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java
index 8904765a..da682d74 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java
@@ -46,6 +46,7 @@ public class Group extends Syncable {
private static EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog");
private static int next_groupid = getMaxGroupID() + 1;
+ private static final String SQLEXCEPTION = "SQLException: ";
private int groupid;
private String authid;
@@ -107,7 +108,7 @@ public class Group extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error("PROV0009 getGroupsForSQL: " + e.getMessage(), e);
}
return list;
}
@@ -127,59 +128,11 @@ public class Group extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.info("getMaxSubID: " + e.getMessage());
+ intlogger.info("PROV0001 getMaxSubID: " + e.getMessage(), e);
}
return max;
}
- public static Collection<String> getGroupsByClassfication(String classfication) {
- List<String> list = new ArrayList<>();
- String sql = "select * from GROUPS where classification = ?";
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- try (PreparedStatement stmt = conn.prepareStatement(sql)) {
- stmt.setString(1, classfication);
- try (ResultSet rs = stmt.executeQuery()) {
- while (rs.next()) {
- int groupid = rs.getInt("groupid");
-
- }
- }
- }
- db.release(conn);
- } catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
- }
- return list;
- }
-
- /**
- * Return a count of the number of active subscriptions in the DB.
- *
- * @return the count
- */
- public static int countActiveSubscriptions() {
- int count = 0;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- try (Statement stmt = conn.createStatement()) {
- try (ResultSet rs = stmt.executeQuery("select count(*) from SUBSCRIPTIONS")) {
- if (rs.next()) {
- count = rs.getInt(1);
- }
- }
- }
- db.release(conn);
- } catch (SQLException e) {
- intlogger.warn("PROV0008 countActiveSubscriptions: " + e.getMessage());
- }
- return count;
- }
-
public Group() {
this("", "", "");
}
@@ -229,7 +182,8 @@ public class Group extends Syncable {
} catch (InvalidObjectException e) {
throw e;
} catch (Exception e) {
- throw new InvalidObjectException("invalid JSON: " + e.getMessage());
+ intlogger.warn("Invalid JSON: " + e.getMessage(), e);
+ throw new InvalidObjectException("Invalid JSON: " + e.getMessage());
}
}
@@ -249,14 +203,6 @@ public class Group extends Syncable {
Group.intlogger = intlogger;
}
- public static int getNext_groupid() {
- return next_groupid;
- }
-
- public static void setNext_groupid(int next_groupid) {
- Group.next_groupid = next_groupid;
- }
-
public String getAuthid() {
return authid;
}
@@ -293,19 +239,6 @@ public class Group extends Syncable {
return members;
}
- public void setMembers(String members) {
- this.members = members;
- }
-
- public Date getLast_mod() {
- return last_mod;
- }
-
- public void setLast_mod(Date last_mod) {
- this.last_mod = last_mod;
- }
-
-
@Override
public JSONObject asJSONObject() {
JSONObject jo = new JSONObject();
@@ -346,14 +279,14 @@ public class Group extends Syncable {
ps.close();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0005 doInsert: " + e.getMessage());
+ intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -375,14 +308,14 @@ public class Group extends Syncable {
ps.executeUpdate();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0006 doUpdate: " + e.getMessage());
+ intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -399,14 +332,14 @@ public class Group extends Syncable {
ps.execute();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0007 doDelete: " + e.getMessage());
+ intlogger.warn("PROV0007 doDelete: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRoute.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRoute.java
index bbed37a3..329e77fa 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRoute.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRoute.java
@@ -55,6 +55,7 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB;
public class IngressRoute extends NodeClass implements Comparable<IngressRoute> {
private static EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog");
+ private static final String SQLEXCEPTION = "SQLException: ";
private final int seq;
private final int feedid;
private final String userid;
@@ -102,7 +103,7 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
}
db.release(conn);
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error("PROV0001 getAllIngressRoutesForSQL: " + e.getMessage(), e);
}
return set;
}
@@ -140,7 +141,7 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
}
db.release(conn);
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error("PROV0002 getMax: " + e.getMessage(), e);
}
return rv;
}
@@ -175,51 +176,19 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
ps.close();
db.release(conn);
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error("PROV0003 getIngressRoute: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return v;
}
- /**
- * Get a collection of all Ingress Routes with a particular sequence number.
- *
- * @param seq the sequence number to look for
- * @return the collection (may be empty).
- */
- public static Collection<IngressRoute> getIngressRoute(int seq) {
- Collection<IngressRoute> rv = new ArrayList<IngressRoute>();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- String sql = "select FEEDID, USERID, SUBNET, NODESET from INGRESS_ROUTES where SEQUENCE = ?";
- try (PreparedStatement ps = conn.prepareStatement(sql)) {
- ps.setInt(1, seq);
- try (ResultSet rs = ps.executeQuery()) {
- while (rs.next()) {
- int feedid = rs.getInt("FEEDID");
- String user = rs.getString("USERID");
- String subnet = rs.getString("SUBNET");
- int nodeset = rs.getInt("NODESET");
- rv.add(new IngressRoute(seq, feedid, user, subnet, nodeset));
- }
- }
- }
- db.release(conn);
- } catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
- }
- return rv;
- }
-
public IngressRoute(int seq, int feedid, String user, String subnet, Collection<String> nodes)
throws IllegalArgumentException {
this(seq, feedid, user, subnet);
@@ -307,6 +276,7 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
SubnetMatcher sm = new SubnetMatcher(subnet);
return sm.matches(inet.getAddress());
} catch (UnknownHostException e) {
+ intlogger.error("PROV0008 matches: " + e.getMessage(), e);
return false;
}
}
@@ -337,6 +307,7 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
len = sn.length;
valid = true;
} catch (UnknownHostException e) {
+ intlogger.error("PROV0008 SubnetMatcher: " + e.getMessage(), e);
len = 0;
valid = false;
}
@@ -347,6 +318,7 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
sn = InetAddress.getByName(subnet.substring(0, i)).getAddress();
valid = true;
} catch (UnknownHostException e) {
+ intlogger.error("PROV0008 SubnetMatcher: " + e.getMessage(), e);
valid = false;
}
len = n / 8;
@@ -390,7 +362,7 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
}
private Collection<String> readNodes() {
- Collection<String> set = new TreeSet<String>();
+ Collection<String> set = new TreeSet<>();
try {
DB db = new DB();
@SuppressWarnings("resource")
@@ -407,7 +379,7 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
}
db.release(conn);
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
return set;
}
@@ -434,14 +406,14 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
ps.execute();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0007 doDelete: " + e.getMessage());
+ intlogger.warn("PROV0007 doDelete: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -477,14 +449,14 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
ps.close();
rv = true;
} catch (SQLException e) {
- intlogger.warn("PROV0005 doInsert: " + e.getMessage());
+ intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -524,14 +496,10 @@ public class IngressRoute extends NodeClass implements Comparable<IngressRoute>
@Override
public boolean equals(Object obj) {
- try {
- if (!(obj instanceof IngressRoute)) {
- return false;
- }
- return this.compareTo((IngressRoute) obj) == 0;
- } catch (NullPointerException e) {
+ if (!(obj instanceof IngressRoute)) {
return false;
}
+ return this.compareTo((IngressRoute) obj) == 0;
}
@Override
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecord.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecord.java
index 86d1faea..cbddbf43 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecord.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecord.java
@@ -77,7 +77,7 @@ public class LogRecord extends BaseLogRecord {
}
}
} catch (SQLException e) {
- intlogger.error("SQLException: " + e.getMessage());
+ intlogger.error("PROV0001 printLogRecords: " + e.getMessage(), e);
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java
index 94eeec4a..6ac05445 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java
@@ -47,6 +47,7 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB;
public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute> {
private static EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog");
+ private static final String SQLEXCEPTION = "SQLException: ";
private final int fromnode;
private final int tonode;
private final int vianode;
@@ -58,7 +59,7 @@ public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute>
* @return the sorted set
*/
public static SortedSet<NetworkRoute> getAllNetworkRoutes() {
- SortedSet<NetworkRoute> set = new TreeSet<NetworkRoute>();
+ SortedSet<NetworkRoute> set = new TreeSet<>();
try {
DB db = new DB();
@SuppressWarnings("resource")
@@ -75,30 +76,30 @@ public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute>
}
db.release(conn);
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
return set;
}
- public NetworkRoute(String fromnode, String tonode) throws IllegalArgumentException {
+ public NetworkRoute(String fromnode, String tonode) {
this.fromnode = lookupNodeName(fromnode);
this.tonode = lookupNodeName(tonode);
this.vianode = -1;
}
- public NetworkRoute(String fromnode, String tonode, String vianode) throws IllegalArgumentException {
+ public NetworkRoute(String fromnode, String tonode, String vianode) {
this.fromnode = lookupNodeName(fromnode);
this.tonode = lookupNodeName(tonode);
this.vianode = lookupNodeName(vianode);
}
- public NetworkRoute(JSONObject jo) throws IllegalArgumentException {
+ public NetworkRoute(JSONObject jo) {
this.fromnode = lookupNodeName(jo.getString("from"));
this.tonode = lookupNodeName(jo.getString("to"));
this.vianode = lookupNodeName(jo.getString("via"));
}
- public NetworkRoute(int fromnode, int tonode, int vianode) throws IllegalArgumentException {
+ public NetworkRoute(int fromnode, int tonode, int vianode) {
this.fromnode = fromnode;
this.tonode = tonode;
this.vianode = vianode;
@@ -112,10 +113,6 @@ public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute>
return tonode;
}
- public int getVianode() {
- return vianode;
- }
-
@Override
public boolean doDelete(Connection c) {
boolean rv = true;
@@ -128,14 +125,14 @@ public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute>
ps.execute();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0007 doDelete: " + e.getMessage());
+ intlogger.warn("PROV0007 doDelete: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -157,14 +154,14 @@ public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute>
ps.close();
rv = true;
} catch (SQLException e) {
- intlogger.warn("PROV0005 doInsert: " + e.getMessage());
+ intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
}
@@ -184,14 +181,14 @@ public class NetworkRoute extends NodeClass implements Comparable<NetworkRoute>
ps.executeUpdate();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0006 doUpdate: " + e.getMessage());
+ intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java
index 08914868..9e7071bb 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java
@@ -57,6 +57,7 @@ public class Parameters extends Syncable {
public static final String PROV_POKETIMER2 = "PROV_POKETIMER2";
public static final String PROV_SPECIAL_SUBNET = "PROV_SPECIAL_SUBNET";
public static final String PROV_LOG_RETENTION = "PROV_LOG_RETENTION";
+ public static final String DEFAULT_LOG_RETENTION = "DEFAULT_LOG_RETENTION";
public static final String NODES = "NODES";
public static final String ACTIVE_POD = "ACTIVE_POD";
public static final String STANDBY_POD = "STANDBY_POD";
@@ -69,6 +70,7 @@ public class Parameters extends Syncable {
public static final String STATIC_ROUTING_NODES = "STATIC_ROUTING_NODES"; //Adding new param for static Routing - Rally:US664862-1610
private static EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog");
+ private static final String SQLEXCEPTION = "SQLException: ";
private String keyname;
private String value;
@@ -103,7 +105,7 @@ public class Parameters extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage());
}
return coll;
}
@@ -131,7 +133,7 @@ public class Parameters extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage());
}
return v;
}
@@ -154,10 +156,6 @@ public class Parameters extends Syncable {
return keyname;
}
- public void setKeyname(String keyname) {
- this.keyname = keyname;
- }
-
public String getValue() {
return value;
}
@@ -187,14 +185,14 @@ public class Parameters extends Syncable {
ps.execute();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0005 doInsert: " + e.getMessage());
+ intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage());
}
}
return rv;
@@ -213,14 +211,14 @@ public class Parameters extends Syncable {
ps.executeUpdate();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0006 doUpdate: " + e.getMessage());
+ intlogger.warn("PROV0006 doUpdate: " + e.getMessage(),e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -238,14 +236,14 @@ public class Parameters extends Syncable {
ps.execute();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0007 doDelete: " + e.getMessage());
+ intlogger.warn("PROV0007 doDelete: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error("SQLException " + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/PubFailRecord.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/PubFailRecord.java
index 88d48826..a38d8bd2 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/PubFailRecord.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/PubFailRecord.java
@@ -24,7 +24,6 @@
package org.onap.dmaap.datarouter.provisioning.beans;
import java.sql.PreparedStatement;
-import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.text.ParseException;
@@ -49,15 +48,6 @@ public class PubFailRecord extends BaseLogRecord {
this.error = pp[11];
}
- public PubFailRecord(ResultSet rs) throws SQLException {
- super(rs);
- // Note: because this record should be "rare" these fields are mapped to unconventional fields in the DB
- this.contentLengthReceived = rs.getLong("CONTENT_LENGTH_2");
- this.sourceIP = rs.getString("REMOTE_ADDR");
- this.user = rs.getString("USER");
- this.error = rs.getString("FEED_FILEID");
- }
-
public long getContentLengthReceived() {
return contentLengthReceived;
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/SubDelivery.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/SubDelivery.java
index 98981a30..5a3457b9 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/SubDelivery.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/SubDelivery.java
@@ -43,10 +43,6 @@ public class SubDelivery implements JSONable {
private String password;
private boolean use100;
- public SubDelivery() {
- this("", "", "", false);
- }
-
public SubDelivery(String url, String user, String password, boolean use100) {
this.url = url;
this.user = user;
@@ -90,10 +86,6 @@ public class SubDelivery implements JSONable {
return use100;
}
- public void setUse100(boolean use100) {
- this.use100 = use100;
- }
-
@Override
public JSONObject asJSONObject() {
JSONObject jo = new JSONObject();
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Subscription.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Subscription.java
index 442a7642..1cb1f2bb 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Subscription.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Subscription.java
@@ -23,6 +23,8 @@
package org.onap.dmaap.datarouter.provisioning.beans;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import java.io.InvalidObjectException;
import java.sql.Connection;
import java.sql.PreparedStatement;
@@ -34,9 +36,6 @@ import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Properties;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
import org.json.JSONObject;
import org.onap.dmaap.datarouter.provisioning.utils.DB;
import org.onap.dmaap.datarouter.provisioning.utils.URLUtilities;
@@ -117,7 +116,7 @@ public class Subscription extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.error(e.toString());
+ intlogger.error("PROV0001 getSubscriptionsForSQL: " + e.toString(), e);
}
return list;
}
@@ -137,7 +136,7 @@ public class Subscription extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.info("getMaxSubID: " + e.getMessage());
+ intlogger.info("getMaxSubID: " + e.getMessage(), e);
}
return max;
}
@@ -161,7 +160,7 @@ public class Subscription extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.error(SQLEXCEPTION + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
return list;
}
@@ -186,7 +185,7 @@ public class Subscription extends Syncable {
}
db.release(conn);
} catch (SQLException e) {
- intlogger.warn("PROV0008 countActiveSubscriptions: " + e.getMessage());
+ intlogger.warn("PROV0008 countActiveSubscriptions: " + e.getMessage(), e);
}
return count;
}
@@ -274,7 +273,8 @@ public class Subscription extends Syncable {
} catch (InvalidObjectException e) {
throw e;
} catch (Exception e) {
- throw new InvalidObjectException("invalid JSON: " + e.getMessage());
+ intlogger.warn("Invalid JSON: " + e.getMessage(), e);
+ throw new InvalidObjectException("Invalid JSON: " + e.getMessage());
}
}
@@ -474,14 +474,14 @@ public class Subscription extends Syncable {
ps.close();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0005 doInsert: " + e.getMessage());
+ intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error(SQLEXCEPTION + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -508,14 +508,14 @@ public class Subscription extends Syncable {
ps.executeUpdate();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0006 doUpdate: " + e.getMessage());
+ intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error(SQLEXCEPTION + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -541,14 +541,14 @@ public class Subscription extends Syncable {
ps.close();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0006 doUpdate: " + e.getMessage());
+ intlogger.warn("PROV0006 doUpdate: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error(SQLEXCEPTION + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -566,14 +566,14 @@ public class Subscription extends Syncable {
ps.execute();
} catch (SQLException e) {
rv = false;
- intlogger.warn("PROV0007 doDelete: " + e.getMessage());
+ intlogger.warn("PROV0007 doDelete: " + e.getMessage(), e);
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (SQLException e) {
- intlogger.error(SQLEXCEPTION + e.getMessage());
+ intlogger.error(SQLEXCEPTION + e.getMessage(), e);
}
}
return rv;
@@ -596,8 +596,8 @@ public class Subscription extends Syncable {
if (feedid != os.feedid) {
return false;
}
- if (groupid != os.groupid) //New field is added - Groups feature Rally:US708115 - 1610
- {
+ if (groupid != os.groupid) {
+ //New field is added - Groups feature Rally:US708115 - 1610
return false;
}
if (!delivery.equals(os.delivery)) {
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java
index d29876fb..7700a583 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java
@@ -84,7 +84,7 @@ public class DB {
HTTP_PORT = (String) props.get("org.onap.dmaap.datarouter.provserver.http.port");
Class.forName(DB_DRIVER);
} catch (IOException e) {
- intlogger.error("PROV9003 Opening properties: " + e.getMessage());
+ intlogger.error("PROV9003 Opening properties: " + e.getMessage(), e);
System.exit(1);
} catch (ClassNotFoundException e) {
intlogger.error("PROV9004 cannot find the DB driver: " + e);
@@ -115,6 +115,7 @@ public class DB {
try {
connection = queue.remove();
} catch (NoSuchElementException nseEx) {
+ intlogger.error("PROV9006 No connection on queue: " + nseEx.getMessage(), nseEx);
int n = 0;
do {
// Try up to 3 times to get a connection
@@ -194,8 +195,7 @@ public class DB {
runInitScript(connection, 1);
}
} catch (SQLException e) {
- intlogger
- .error("PROV9000: The database credentials are not working: " + e.getMessage());
+ intlogger.error("PROV9000: The database credentials are not working: " + e.getMessage(), e);
return false;
} finally {
if (connection != null) {
@@ -223,7 +223,7 @@ public class DB {
rs.close();
}
} catch (SQLException e) {
- intlogger.error("PROV9010: Failed to get TABLE data from DB: " + e.getMessage());
+ intlogger.error("PROV9010: Failed to get TABLE data from DB: " + e.getMessage(), e);
}
return tables;
}
@@ -264,7 +264,7 @@ public class DB {
lineReader.close();
strBuilder.setLength(0);
} catch (Exception e) {
- intlogger.error("PROV9002 Error when initializing table: " + e.getMessage());
+ intlogger.error("PROV9002 Error when initializing table: " + e.getMessage(), e);
System.exit(1);
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRProvCadiFilter.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRProvCadiFilter.java
index 46cfabec..d5521ba8 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRProvCadiFilter.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRProvCadiFilter.java
@@ -211,7 +211,7 @@ public class DRProvCadiFilter extends CadiFilter {
}
} catch (Exception e) {
- intlogger.error("PROV0073 DRProvCadiFilter.isAAFFeed: ", e.getMessage());
+ intlogger.error("PROV0073 DRProvCadiFilter.isAAFFeed: " + e.getMessage(), e);
return false;
}
return false;
@@ -241,7 +241,7 @@ public class DRProvCadiFilter extends CadiFilter {
intlogger.debug(message);
}
} catch (Exception e) {
- intlogger.error("PROV0073 DRProvCadiFilter.isAAFSubscriber: ", e.getMessage());
+ intlogger.error("PROV0073 DRProvCadiFilter.isAAFSubscriber: " + e.getMessage(), e);
return false;
}
return false;
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRRouteCLI.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRRouteCLI.java
index af8bd6d3..a593c8a7 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRRouteCLI.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRRouteCLI.java
@@ -24,6 +24,8 @@
package org.onap.dmaap.datarouter.provisioning.utils;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
@@ -97,6 +99,7 @@ public class DRRouteCLI {
public static final String ENV_VAR = "PROVSRVR";
public static final String PROMPT = "dr-route> ";
public static final String DEFAULT_TRUSTSTORE_PATH = /* $JAVA_HOME + */ "/jre/lib/security/cacerts";
+ private static final EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog");
private final String server;
private int width = 120; // screen width (for list)
@@ -130,12 +133,13 @@ public class DRRouteCLI {
try {
trustStore.load(instream, truststore_pw.toCharArray());
} catch (Exception x) {
- System.err.println("Problem reading truststore: " + x);
+ intlogger.error("Problem reading truststore: " + x.getMessage(), x);
throw x;
} finally {
try {
instream.close();
} catch (Exception ignore) {
+ intlogger.error("Ignore error closing input stream: " + ignore.getMessage(), ignore);
}
}
}
@@ -397,6 +401,7 @@ public class DRRouteCLI {
printErrorText(entity);
}
} catch (Exception e) {
+ intlogger.error("PROV0006 doDelete: " + e.getMessage(), e);
} finally {
meth.releaseConnection();
}
@@ -416,7 +421,7 @@ public class DRRouteCLI {
printErrorText(entity);
}
} catch (Exception e) {
- System.err.println(e);
+ intlogger.error("PROV0005 doGet: " + e.getMessage(), e);
} finally {
meth.releaseConnection();
}
@@ -438,6 +443,7 @@ public class DRRouteCLI {
printErrorText(entity);
}
} catch (Exception e) {
+ intlogger.error("PROV0009 doPost: " + e.getMessage(), e);
} finally {
meth.releaseConnection();
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/HttpServletUtils.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/HttpServletUtils.java
index 3aa3bd28..f59dc919 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/HttpServletUtils.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/HttpServletUtils.java
@@ -20,19 +20,24 @@
* * ECOMP is a trademark and service mark of AT&T Intellectual Property.
* *
******************************************************************************/
-package org.onap.dmaap.datarouter.provisioning.utils;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
+package org.onap.dmaap.datarouter.provisioning.utils;
import com.att.eelf.configuration.EELFLogger;
+import java.io.IOException;
+import javax.servlet.http.HttpServletResponse;
public class HttpServletUtils {
+
+ private HttpServletUtils(){
+
+ }
+
public static void sendResponseError(HttpServletResponse response, int errorCode, String message, EELFLogger intlogger) {
try {
response.sendError(errorCode, message);
} catch (IOException ioe) {
- intlogger.error("IOException" + ioe.getMessage());
+ intlogger.error("IOException" + ioe.getMessage(), ioe);
}
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/JSONUtilities.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/JSONUtilities.java
index 915aa610..2d4e22b3 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/JSONUtilities.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/JSONUtilities.java
@@ -24,6 +24,8 @@
package org.onap.dmaap.datarouter.provisioning.utils;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collection;
@@ -35,6 +37,8 @@ import java.util.Collection;
* @version $Id: JSONUtilities.java,v 1.1 2013/04/26 21:00:26 eby Exp $
*/
public class JSONUtilities {
+
+ private static final EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog");
/**
* Does the String <i>v</i> represent a valid Internet address (with or without a
* mask length appended).
@@ -56,6 +60,7 @@ public class JSONUtilities {
}
return true;
} catch (UnknownHostException e) {
+ intlogger.error("PROV0001: " + e.getMessage(), e);
return false;
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LOGJSONObject.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LOGJSONObject.java
index 1518859a..1140a1ce 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LOGJSONObject.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LOGJSONObject.java
@@ -23,6 +23,8 @@ package org.onap.dmaap.datarouter.provisioning.utils;
* *
******************************************************************************/
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
@@ -104,6 +106,8 @@ public class LOGJSONObject {
*/
private static Map<String, Object> keyPool = new LinkedHashMap<String, Object>(keyPoolSize);
+ private static final EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog");
+
/**
* JSONObject.NULL is equivalent to the value that JavaScript calls null,
* whilst Java's null is equivalent to the value that JavaScript calls
@@ -182,13 +186,11 @@ public class LOGJSONObject {
}
}
-
/**
* The map where the JSONObject's properties are kept.
*/
private final Map<String, Object> map;
-
/**
* It is sometimes more convenient and less ambiguous to have a
* <code>NULL</code> object than to use Java's <code>null</code> value.
@@ -197,15 +199,13 @@ public class LOGJSONObject {
*/
public static final Object NULL = new Null();
-
/**
* Construct an empty JSONObject.
*/
public LOGJSONObject() {
- this.map = new LinkedHashMap<String, Object>();
+ this.map = new LinkedHashMap<>();
}
-
/**
* Construct a JSONObject from a subset of another JSONObject.
* An array of strings is used to identify the keys that should be copied.
@@ -213,8 +213,6 @@ public class LOGJSONObject {
*
* @param jo A JSONObject.
* @param names An array of strings.
- * @throws JSONException
- * @throws JSONException If a value is a non-finite number or if a name is duplicated.
*/
public LOGJSONObject(LOGJSONObject jo, String[] names) {
this();
@@ -222,11 +220,11 @@ public class LOGJSONObject {
try {
this.putOnce(names[i], jo.opt(names[i]));
} catch (Exception ignore) {
+ intlogger.error("PROV0001 LOGJSONObject: " + ignore.getMessage(), ignore);
}
}
}
-
/**
* Construct a JSONObject from a JSONTokener.
*
@@ -234,7 +232,7 @@ public class LOGJSONObject {
* @throws JSONException If there is a syntax error in the source string
* or a duplicated key.
*/
- public LOGJSONObject(JSONTokener x) throws JSONException {
+ public LOGJSONObject(JSONTokener x) {
this();
char c;
String key;
@@ -284,7 +282,6 @@ public class LOGJSONObject {
}
}
-
/**
* Construct a JSONObject from a Map.
*
@@ -306,7 +303,6 @@ public class LOGJSONObject {
}
}
-
/**
* Construct a JSONObject from an Object using bean getters.
* It reflects on all of the public methods of the object.
@@ -331,7 +327,6 @@ public class LOGJSONObject {
this.populateMap(bean);
}
-
/**
* Construct a JSONObject from an Object, using reflection to find the
* public members. The resulting JSONObject's keys will be the strings
@@ -356,7 +351,6 @@ public class LOGJSONObject {
}
}
-
/**
* Construct a JSONObject from a source JSON text string.
* This is the most commonly used JSONObject constructor.
@@ -371,7 +365,6 @@ public class LOGJSONObject {
this(new JSONTokener(source));
}
-
/**
* Construct a JSONObject from a ResourceBundle.
*
@@ -412,7 +405,6 @@ public class LOGJSONObject {
}
}
-
/**
* Accumulate values under a key. It is similar to the put method except
* that if there is already an object stored under the key then a
@@ -448,7 +440,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Append values to the array under a key. If the key does not exist in the
* JSONObject, then the key is put in the JSONObject with its value being a
@@ -475,7 +466,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Produce a string from a double. The string "null" will be returned if
* the number is not finite.
@@ -503,7 +493,6 @@ public class LOGJSONObject {
return string;
}
-
/**
* Get the value object associated with a key.
*
@@ -523,7 +512,6 @@ public class LOGJSONObject {
return object;
}
-
/**
* Get the boolean value associated with a key.
*
@@ -546,7 +534,6 @@ public class LOGJSONObject {
"] is not a Boolean.");
}
-
/**
* Get the double value associated with a key.
*
@@ -555,19 +542,18 @@ public class LOGJSONObject {
* @throws JSONException if the key is not found or
* if the value is not a Number object and cannot be converted to a number.
*/
- public double getDouble(String key) throws JSONException {
+ public double getDouble(String key) {
Object object = this.get(key);
try {
return object instanceof Number
? ((Number) object).doubleValue()
: Double.parseDouble((String) object);
} catch (Exception e) {
- throw new JSONException("JSONObject[" + quote(key) +
- "] is not a number.");
+ intlogger.error("JSONObject[" + quote(key) + "] is not a number.", e);
+ throw new JSONException("JSONObject[" + quote(key) + "] is not a number.");
}
}
-
/**
* Get the int value associated with a key.
*
@@ -576,19 +562,18 @@ public class LOGJSONObject {
* @throws JSONException if the key is not found or if the value cannot
* be converted to an integer.
*/
- public int getInt(String key) throws JSONException {
+ public int getInt(String key) {
Object object = this.get(key);
try {
return object instanceof Number
? ((Number) object).intValue()
: Integer.parseInt((String) object);
} catch (Exception e) {
- throw new JSONException("JSONObject[" + quote(key) +
- "] is not an int.");
+ intlogger.error("JSONObject[" + quote(key) + "] is not an int.", e);
+ throw new JSONException("JSONObject[" + quote(key) + "] is not an int.");
}
}
-
/**
* Get the JSONArray value associated with a key.
*
@@ -606,7 +591,6 @@ public class LOGJSONObject {
"] is not a JSONArray.");
}
-
/**
* Get the JSONObject value associated with a key.
*
@@ -624,7 +608,6 @@ public class LOGJSONObject {
"] is not a JSONObject.");
}
-
/**
* Get the long value associated with a key.
*
@@ -640,12 +623,11 @@ public class LOGJSONObject {
? ((Number) object).longValue()
: Long.parseLong((String) object);
} catch (Exception e) {
- throw new JSONException("JSONObject[" + quote(key) +
- "] is not a long.");
+ intlogger.error("JSONObject[" + quote(key) + "] is not a long.", e);
+ throw new JSONException("JSONObject[" + quote(key) + "] is not a long.");
}
}
-
/**
* Get an array of field names from a JSONObject.
*
@@ -666,30 +648,6 @@ public class LOGJSONObject {
return names;
}
-
- /**
- * Get an array of field names from an Object.
- *
- * @return An array of field names, or null if there are no names.
- */
- public static String[] getNames(Object object) {
- if (object == null) {
- return null;
- }
- Class<? extends Object> klass = object.getClass();
- Field[] fields = klass.getFields();
- int length = fields.length;
- if (length == 0) {
- return null;
- }
- String[] names = new String[length];
- for (int i = 0; i < length; i += 1) {
- names[i] = fields[i].getName();
- }
- return names;
- }
-
-
/**
* Get the string associated with a key.
*
@@ -697,7 +655,7 @@ public class LOGJSONObject {
* @return A string which is the value.
* @throws JSONException if there is no string value for the key.
*/
- public String getString(String key) throws JSONException {
+ public String getString(String key) {
Object object = this.get(key);
if (object instanceof String) {
return (String) object;
@@ -706,7 +664,6 @@ public class LOGJSONObject {
"] not a string.");
}
-
/**
* Determine if the JSONObject contains a specific key.
*
@@ -717,7 +674,6 @@ public class LOGJSONObject {
return this.map.containsKey(key);
}
-
/**
* Increment a property of a JSONObject. If there is no such property,
* create one with a value of 1. If there is such a property, and if
@@ -728,7 +684,7 @@ public class LOGJSONObject {
* @throws JSONException If there is already a property with this name
* that is not an Integer, Long, Double, or Float.
*/
- public LOGJSONObject increment(String key) throws JSONException {
+ public LOGJSONObject increment(String key) {
Object value = this.opt(key);
if (value == null) {
this.put(key, 1);
@@ -746,20 +702,6 @@ public class LOGJSONObject {
return this;
}
-
- /**
- * Determine if the value associated with the key is null or if there is
- * no value.
- *
- * @param key A key string.
- * @return true if there is no value associated with the key or if
- * the value is the JSONObject.NULL object.
- */
- public boolean isNull(String key) {
- return LOGJSONObject.NULL.equals(this.opt(key));
- }
-
-
/**
* Get an enumeration of the keys of the JSONObject.
*
@@ -769,7 +711,6 @@ public class LOGJSONObject {
return this.keySet().iterator();
}
-
/**
* Get a set of keys of the JSONObject.
*
@@ -779,7 +720,6 @@ public class LOGJSONObject {
return this.map.keySet();
}
-
/**
* Get the number of keys stored in the JSONObject.
*
@@ -789,7 +729,6 @@ public class LOGJSONObject {
return this.map.size();
}
-
/**
* Produce a JSONArray containing the names of the elements of this
* JSONObject.
@@ -835,7 +774,6 @@ public class LOGJSONObject {
return string;
}
-
/**
* Get an optional value associated with a key.
*
@@ -846,20 +784,6 @@ public class LOGJSONObject {
return key == null ? null : this.map.get(key);
}
-
- /**
- * Get an optional boolean associated with a key.
- * It returns false if there is no such key, or if the value is not
- * Boolean.TRUE or the String "true".
- *
- * @param key A key string.
- * @return The truth.
- */
- public boolean optBoolean(String key) {
- return this.optBoolean(key, false);
- }
-
-
/**
* Get an optional boolean associated with a key.
* It returns the defaultValue if there is no such key, or if it is not
@@ -873,25 +797,11 @@ public class LOGJSONObject {
try {
return this.getBoolean(key);
} catch (Exception e) {
+ intlogger.trace("Using defaultValue: " + defaultValue, e);
return defaultValue;
}
}
-
- /**
- * Get an optional double associated with a key,
- * or NaN if there is no such key or if its value is not a number.
- * If the value is a string, an attempt will be made to evaluate it as
- * a number.
- *
- * @param key A string which is the key.
- * @return An object which is the value.
- */
- public double optDouble(String key) {
- return this.optDouble(key, Double.NaN);
- }
-
-
/**
* Get an optional double associated with a key, or the
* defaultValue if there is no such key or if its value is not a number.
@@ -906,25 +816,11 @@ public class LOGJSONObject {
try {
return this.getDouble(key);
} catch (Exception e) {
+ intlogger.trace("Using defaultValue: " + defaultValue, e);
return defaultValue;
}
}
-
- /**
- * Get an optional int value associated with a key,
- * or zero if there is no such key or if the value is not a number.
- * If the value is a string, an attempt will be made to evaluate it as
- * a number.
- *
- * @param key A key string.
- * @return An object which is the value.
- */
- public int optInt(String key) {
- return this.optInt(key, 0);
- }
-
-
/**
* Get an optional int value associated with a key,
* or the default if there is no such key or if the value is not a number.
@@ -939,25 +835,11 @@ public class LOGJSONObject {
try {
return this.getInt(key);
} catch (Exception e) {
+ intlogger.trace("Using defaultValue: " + defaultValue, e);
return defaultValue;
}
}
-
- /**
- * Get an optional JSONArray associated with a key.
- * It returns null if there is no such key, or if its value is not a
- * JSONArray.
- *
- * @param key A key string.
- * @return A JSONArray which is the value.
- */
- public JSONArray optJSONArray(String key) {
- Object o = this.opt(key);
- return o instanceof JSONArray ? (JSONArray) o : null;
- }
-
-
/**
* Get an optional JSONObject associated with a key.
* It returns null if there is no such key, or if its value is not a
@@ -971,21 +853,6 @@ public class LOGJSONObject {
return object instanceof LOGJSONObject ? (LOGJSONObject) object : null;
}
-
- /**
- * Get an optional long value associated with a key,
- * or zero if there is no such key or if the value is not a number.
- * If the value is a string, an attempt will be made to evaluate it as
- * a number.
- *
- * @param key A key string.
- * @return An object which is the value.
- */
- public long optLong(String key) {
- return this.optLong(key, 0);
- }
-
-
/**
* Get an optional long value associated with a key,
* or the default if there is no such key or if the value is not a number.
@@ -1004,20 +871,6 @@ public class LOGJSONObject {
}
}
-
- /**
- * Get an optional string associated with a key.
- * It returns an empty string if there is no such key. If the value is not
- * a string and is not null, then it is converted to a string.
- *
- * @param key A key string.
- * @return A string which is the value.
- */
- public String optString(String key) {
- return this.optString(key, "");
- }
-
-
/**
* Get an optional string associated with a key.
* It returns the defaultValue if there is no such key.
@@ -1031,7 +884,6 @@ public class LOGJSONObject {
return NULL.equals(object) ? defaultValue : object.toString();
}
-
private void populateMap(Object bean) {
Class<? extends Object> klass = bean.getClass();
@@ -1075,11 +927,11 @@ public class LOGJSONObject {
}
}
} catch (Exception ignore) {
+ intlogger.trace("populateMap: " + ignore.getMessage(), ignore);
}
}
}
-
/**
* Put a key/boolean pair in the JSONObject.
*
@@ -1093,7 +945,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Put a key/value pair in the JSONObject, where the value will be a
* JSONArray which is produced from a Collection.
@@ -1108,7 +959,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Put a key/double pair in the JSONObject.
*
@@ -1122,7 +972,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Put a key/int pair in the JSONObject.
*
@@ -1136,7 +985,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Put a key/long pair in the JSONObject.
*
@@ -1150,7 +998,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Put a key/value pair in the JSONObject, where the value will be a
* JSONObject which is produced from a Map.
@@ -1165,7 +1012,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Put a key/value pair in the JSONObject. If the value is null,
* then the key will be removed from the JSONObject if it is present.
@@ -1201,7 +1047,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Put a key/value pair in the JSONObject, but only if the key and the
* value are both non-null, and only if there is not already a member
@@ -1222,7 +1067,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Put a key/value pair in the JSONObject, but only if the
* key and the value are both non-null.
@@ -1241,7 +1085,6 @@ public class LOGJSONObject {
return this;
}
-
/**
* Produce a string in double quotes with backslash sequences in all the
* right places. A backslash will be inserted within </, producing <\/,
@@ -1256,8 +1099,8 @@ public class LOGJSONObject {
synchronized (sw.getBuffer()) {
try {
return quote(string, sw).toString();
- } catch (IOException ignored) {
- // will never happen - we are writing to a string writer
+ } catch (IOException e) {
+ intlogger.trace("Ignore Exception message: ", e);
return "";
}
}
@@ -1380,20 +1223,20 @@ public class LOGJSONObject {
return myLong;
}
}
- } catch (Exception ignore) {
+ } catch (Exception e) {
+ intlogger.trace("Ignore Exception message: ", e);
}
}
return string;
}
-
/**
* Throw an exception if the object is a NaN or infinite number.
*
* @param o The object to test.
* @throws JSONException If o is a non-finite number.
*/
- public static void testValidity(Object o) throws JSONException {
+ public static void testValidity(Object o) {
if (o != null) {
if (o instanceof Double) {
if (((Double) o).isInfinite() || ((Double) o).isNaN()) {
@@ -1409,7 +1252,6 @@ public class LOGJSONObject {
}
}
-
/**
* Produce a JSONArray containing the values of the members of this
* JSONObject.
@@ -1446,11 +1288,11 @@ public class LOGJSONObject {
try {
return this.toString(0);
} catch (Exception e) {
+ intlogger.trace("Exception: ", e);
return "";
}
}
-
/**
* Make a prettyprinted JSON text of this JSONObject.
* <p>
@@ -1579,25 +1421,11 @@ public class LOGJSONObject {
}
return new LOGJSONObject(object);
} catch (Exception exception) {
+ intlogger.trace("Exception: ", exception);
return null;
}
}
-
- /**
- * Write the contents of the JSONObject as JSON text to a writer.
- * For compactness, no whitespace is added.
- * <p>
- * Warning: This method assumes that the data structure is acyclical.
- *
- * @return The writer.
- * @throws JSONException
- */
- public Writer write(Writer writer) throws JSONException {
- return this.write(writer, 0, 0);
- }
-
-
@SuppressWarnings("unchecked")
static final Writer writeValue(Writer writer, Object value,
int indentFactor, int indent) throws JSONException, IOException {
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
index 82231884..c78a5b10 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
@@ -1,501 +1,498 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package org.onap.dmaap.datarouter.provisioning.utils;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.LineNumberReader;
-import java.io.Reader;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.TreeSet;
-import java.util.zip.GZIPInputStream;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.onap.dmaap.datarouter.provisioning.BaseServlet;
-import org.onap.dmaap.datarouter.provisioning.beans.DeliveryExtraRecord;
-import org.onap.dmaap.datarouter.provisioning.beans.DeliveryRecord;
-import org.onap.dmaap.datarouter.provisioning.beans.ExpiryRecord;
-import org.onap.dmaap.datarouter.provisioning.beans.Loadable;
-import org.onap.dmaap.datarouter.provisioning.beans.LogRecord;
-import org.onap.dmaap.datarouter.provisioning.beans.Parameters;
-import org.onap.dmaap.datarouter.provisioning.beans.PubFailRecord;
-import org.onap.dmaap.datarouter.provisioning.beans.PublishRecord;
-
-/**
- * This class provides methods that run in a separate thread, in order to process logfiles uploaded into the spooldir.
- * These logfiles are loaded into the MariaDB LOG_RECORDS table. In a running provisioning server, there should only be
- * two places where records can be loaded into this table; here, and in the method DB.retroFit4() which may be run at
- * startup to load the old (1.0) style log tables into LOG_RECORDS;
- * <p>This method maintains an {@link RLEBitSet} which can be used to easily see what records are presently in the
- * database.
- * This bit set is used to synchronize between provisioning servers.</p>
- *
- * @author Robert Eby
- * @version $Id: LogfileLoader.java,v 1.22 2014/03/12 19:45:41 eby Exp $
- */
-public class LogfileLoader extends Thread {
- /**
- * Default number of log records to keep when pruning. Keep 10M by default.
- */
- public static final long DEFAULT_LOG_RETENTION = 10000000L;
- /**
- * NOT USED: Percentage of free space required before old records are removed.
- */
- public static final int REQUIRED_FREE_PCT = 20;
-
- /**
- * This is a singleton -- there is only one LogfileLoader object in the server
- */
- private static LogfileLoader logfileLoader;
-
- /**
- * Get the singleton LogfileLoader object, and start it if it is not running.
- *
- * @return the LogfileLoader
- */
- public static synchronized LogfileLoader getLoader() {
- if (logfileLoader == null)
- logfileLoader = new LogfileLoader();
- if (!logfileLoader.isAlive())
- logfileLoader.start();
- return logfileLoader;
- }
-
- /**
- * The PreparedStatement which is loaded by a <i>Loadable</i>.
- */
- public static final String INSERT_SQL = "insert into LOG_RECORDS values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
- /**
- * Each server can assign this many IDs
- */
- private static final long SET_SIZE = (1L << 56);
-
- private final EELFLogger logger;
- private final DB db;
- private final String spooldir;
- private final long set_start;
- private final long set_end;
- private RLEBitSet seq_set;
- private long nextid;
- private boolean idle;
-
- private LogfileLoader() {
- this.logger = EELFManager.getInstance().getLogger("InternalLog");
- this.db = new DB();
- this.spooldir = db.getProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir");
- this.set_start = getIdRange();
- this.set_end = set_start + SET_SIZE - 1;
- this.seq_set = new RLEBitSet();
- this.nextid = 0;
- this.idle = false;
-
- // This is a potentially lengthy operation, so has been moved to run()
- //initializeNextid();
- this.setDaemon(true);
- this.setName("LogfileLoader");
- }
-
- private long getIdRange() {
- long n;
- if (BaseServlet.isInitialActivePOD())
- n = 0;
- else if (BaseServlet.isInitialStandbyPOD())
- n = SET_SIZE;
- else
- n = SET_SIZE * 2;
- String r = String.format("[%X .. %X]", n, n + SET_SIZE - 1);
- logger.debug("This server shall assign RECORD_IDs in the range " + r);
- return n;
- }
-
- /**
- * Return the bit set representing the record ID's that are loaded in this database.
- *
- * @return the bit set
- */
- public RLEBitSet getBitSet() {
- return seq_set;
- }
-
- /**
- * True if the LogfileLoader is currently waiting for work.
- *
- * @return true if idle
- */
- public boolean isIdle() {
- return idle;
- }
-
- /**
- * Run continuously to look for new logfiles in the spool directory and import them into the DB.
- * The spool is checked once per second. If free space on the MariaDB filesystem falls below
- * REQUIRED_FREE_PCT (normally 20%) then the oldest logfile entries are removed and the LOG_RECORDS
- * table is compacted until free space rises above the threshold.
- */
- @Override
- public void run() {
- initializeNextid(); // moved from the constructor
- while (true) {
- try {
- File dirfile = new File(spooldir);
- while (true) {
- // process IN files
- File[] infiles = dirfile.listFiles(new FilenameFilter() {
- @Override
- public boolean accept(File dir, String name) {
- return name.startsWith("IN.");
- }
- });
-
- if (infiles.length == 0) {
- idle = true;
- try {
- Thread.sleep(1000L);
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- }
- idle = false;
- } else {
- // Remove old rows
- if (pruneRecords()) {
- // Removed at least some entries, recompute the bit map
- initializeNextid();
- }
-
- // Process incoming logfiles
- for (File f : infiles) {
- if (logger.isDebugEnabled())
- logger.debug("PROV8001 Starting " + f + " ...");
- long time = System.currentTimeMillis();
- int[] n = process(f);
- time = System.currentTimeMillis() - time;
- logger.info(String
- .format("PROV8000 Processed %s in %d ms; %d of %d records.",
- f.toString(), time, n[0], n[1]));
- f.delete();
- }
- }
- }
- } catch (Exception e) {
- logger.warn("PROV0020: Caught exception in LogfileLoader: " + e);
- }
- }
- }
-
- boolean pruneRecords() {
- boolean did1 = false;
- long count = countRecords();
- long threshold = DEFAULT_LOG_RETENTION;
- Parameters param = Parameters.getParameter(Parameters.PROV_LOG_RETENTION);
- if (param != null) {
- try {
- long n = Long.parseLong(param.getValue());
- // This check is to prevent inadvertent errors from wiping the table out
- if (n > 1000000L)
- threshold = n;
- } catch (NumberFormatException e) {
- // ignore
- }
- }
- logger.debug("Pruning LOG_RECORD table: records in DB=" + count + ", threshold=" + threshold);
- if (count > threshold) {
- count -= threshold; // we need to remove this many records;
- Map<Long, Long> hist = getHistogram(); // histogram of records per day
- // Determine the cutoff point to remove the needed number of records
- long sum = 0;
- long cutoff = 0;
- for (Long day : new TreeSet<Long>(hist.keySet())) {
- sum += hist.get(day);
- cutoff = day;
- if (sum >= count)
- break;
- }
- cutoff++;
- cutoff *= 86400000L; // convert day to ms
- logger.debug(" Pruning records older than=" + (cutoff / 86400000L) + " (" + new Date(cutoff) + ")");
-
- Connection conn = null;
- try {
- // Limit to a million at a time to avoid typing up the DB for too long.
- conn = db.getConnection();
- try(PreparedStatement ps = conn.prepareStatement("DELETE from LOG_RECORDS where EVENT_TIME < ? limit 1000000")) {
- ps.setLong(1, cutoff);
- while (count > 0) {
- if (!ps.execute()) {
- int dcount = ps.getUpdateCount();
- count -= dcount;
- logger.debug(" " + dcount + " rows deleted.");
- did1 |= (dcount != 0);
- if (dcount == 0)
- count = 0; // prevent inf. loops
- } else {
- count = 0; // shouldn't happen!
- }
- }
- }
- try(Statement stmt = conn.createStatement()) {
- stmt.execute("OPTIMIZE TABLE LOG_RECORDS");
- }
- } catch (SQLException e) {
- System.err.println(e);
- logger.error(e.toString());
- } finally {
- db.release(conn);
- }
- }
- return did1;
- }
-
- long countRecords() {
- long count = 0;
- Connection conn = null;
- try {
- conn = db.getConnection();
- try(Statement stmt = conn.createStatement()) {
- try(ResultSet rs = stmt.executeQuery("SELECT COUNT(*) as COUNT from LOG_RECORDS")) {
- if (rs.next()) {
- count = rs.getLong("COUNT");
- }
- }
- }
- } catch (SQLException e) {
- System.err.println(e);
- logger.error(e.toString());
- } finally {
- db.release(conn);
- }
- return count;
- }
-
- Map<Long, Long> getHistogram() {
- Map<Long, Long> map = new HashMap<Long, Long>();
- Connection conn = null;
- try {
- logger.debug(" LOG_RECORD table histogram...");
- conn = db.getConnection();
- try(Statement stmt = conn.createStatement()) {
- try(ResultSet rs = stmt.executeQuery("SELECT FLOOR(EVENT_TIME/86400000) AS DAY, COUNT(*) AS COUNT FROM LOG_RECORDS GROUP BY DAY")) {
- while (rs.next()) {
- long day = rs.getLong("DAY");
- long cnt = rs.getLong("COUNT");
- map.put(day, cnt);
- logger.debug(" " + day + " " + cnt);
- }
- }
- }
- } catch (SQLException e) {
- System.err.println(e);
- logger.error(e.toString());
- } finally {
- db.release(conn);
- }
- return map;
- }
-
- private void initializeNextid() {
- Connection conn = null;
- try {
- conn = db.getConnection();
- RLEBitSet nbs = new RLEBitSet();
- try(Statement stmt = conn.createStatement()) {
- // Build a bitset of all records in the LOG_RECORDS table
- // We need to run this SELECT in stages, because otherwise we run out of memory!
- final long stepsize = 6000000L;
- boolean go_again = true;
- for (long i = 0; go_again; i += stepsize) {
- String sql = String.format("select RECORD_ID from LOG_RECORDS LIMIT %d,%d", i, stepsize);
- try (ResultSet rs = stmt.executeQuery(sql)) {
- go_again = false;
- while (rs.next()) {
- long n = rs.getLong("RECORD_ID");
- nbs.set(n);
- go_again = true;
- }
- }
- }
- }
- seq_set = nbs;
- // Compare with the range for this server
- // Determine the next ID for this set of record IDs
- RLEBitSet tbs = (RLEBitSet) nbs.clone();
- RLEBitSet idset = new RLEBitSet();
- idset.set(set_start, set_start + SET_SIZE);
- tbs.and(idset);
- long t = tbs.length();
- nextid = (t == 0) ? set_start : (t - 1);
- if (nextid >= set_start + SET_SIZE) {
- // Handle wraparound, when the IDs reach the end of our "range"
- Long[] last = null;
- Iterator<Long[]> li = tbs.getRangeIterator();
- while (li.hasNext()) {
- last = li.next();
- }
- if (last != null) {
- tbs.clear(last[0], last[1] + 1);
- t = tbs.length();
- nextid = (t == 0) ? set_start : (t - 1);
- }
- }
- logger.debug(String.format("initializeNextid, next ID is %d (%x)", nextid, nextid));
- } catch (SQLException e) {
- System.err.println(e);
- logger.error(e.toString());
- } finally {
- db.release(conn);
- }
- }
-
- @SuppressWarnings("resource")
- int[] process(File f) {
- int ok = 0, total = 0;
- try {
- Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(INSERT_SQL);
- Reader r = f.getPath().endsWith(".gz")
- ? new InputStreamReader(new GZIPInputStream(new FileInputStream(f)))
- : new FileReader(f);
- try(LineNumberReader in = new LineNumberReader(r)) {
- String line;
- while ((line = in.readLine()) != null) {
- try {
- for (Loadable rec : buildRecords(line)) {
- rec.load(ps);
- if (rec instanceof LogRecord) {
- LogRecord lr = ((LogRecord) rec);
- if (!seq_set.get(lr.getRecordId())) {
- ps.executeUpdate();
- seq_set.set(lr.getRecordId());
- } else
- logger.debug("Duplicate record ignored: " + lr.getRecordId());
- } else {
- if (++nextid > set_end)
- nextid = set_start;
- ps.setLong(18, nextid);
- ps.executeUpdate();
- seq_set.set(nextid);
- }
- ps.clearParameters();
- ok++;
- }
- } catch (SQLException e) {
- logger.warn("PROV8003 Invalid value in record: " + line);
- logger.debug(e.toString());
- } catch (NumberFormatException e) {
- logger.warn("PROV8004 Invalid number in record: " + line);
- logger.debug(e.toString());
- } catch (ParseException e) {
- logger.warn("PROV8005 Invalid date in record: " + line);
- logger.debug(e.toString());
- } catch (Exception e) {
- logger.warn("PROV8006 Invalid pattern in record: " + line);
- logger.debug(e.toString());
- }
- total++;
- }
- }
- ps.close();
- db.release(conn);
- conn = null;
- } catch (FileNotFoundException e) {
- logger.warn("PROV8007 Exception reading " + f + ": " + e);
- } catch (IOException e) {
- logger.warn("PROV8007 Exception reading " + f + ": " + e);
- } catch (SQLException e) {
- logger.warn("PROV8007 Exception reading " + f + ": " + e);
- }
- return new int[]{ok, total};
- }
-
- Loadable[] buildRecords(String line) throws ParseException {
- String[] pp = line.split("\\|");
- if (pp != null && pp.length >= 7) {
- String rtype = pp[1].toUpperCase();
- if (rtype.equals("PUB") && pp.length == 11) {
- // Fields are: date|PUB|pubid|feedid|requrl|method|ctype|clen|srcip|user|status
- return new Loadable[]{new PublishRecord(pp)};
- }
- if (rtype.equals("DEL") && pp.length == 12) {
- // Fields are: date|DEL|pubid|feedid|subid|requrl|method|ctype|clen|user|status|xpubid
- String[] subs = pp[4].split("\\s+");
- if (subs != null) {
- Loadable[] rv = new Loadable[subs.length];
- for (int i = 0; i < subs.length; i++) {
- // create a new record for each individual sub
- pp[4] = subs[i];
- rv[i] = new DeliveryRecord(pp);
- }
- return rv;
- }
- }
- if (rtype.equals("EXP") && pp.length == 11) {
- // Fields are: date|EXP|pubid|feedid|subid|requrl|method|ctype|clen|reason|attempts
- ExpiryRecord e = new ExpiryRecord(pp);
- if (e.getReason().equals("other"))
- logger.info("Invalid reason '" + pp[9] + "' changed to 'other' for record: " + e.getPublishId());
- return new Loadable[]{e};
- }
- if (rtype.equals("PBF") && pp.length == 12) {
- // Fields are: date|PBF|pubid|feedid|requrl|method|ctype|clen-expected|clen-received|srcip|user|error
- return new Loadable[]{new PubFailRecord(pp)};
- }
- if (rtype.equals("DLX") && pp.length == 7) {
- // Fields are: date|DLX|pubid|feedid|subid|clen-tosend|clen-sent
- return new Loadable[]{new DeliveryExtraRecord(pp)};
- }
- if (rtype.equals("LOG") && (pp.length == 19 || pp.length == 20)) {
- // Fields are: date|LOG|pubid|feedid|requrl|method|ctype|clen|type|feedFileid|remoteAddr|user|status|subid|fileid|result|attempts|reason|record_id
- return new Loadable[]{new LogRecord(pp)};
- }
- }
- logger.warn("PROV8002 bad record: " + line);
- return new Loadable[0];
- }
-
- /**
- * The LogfileLoader can be run stand-alone by invoking the main() method of this class.
- *
- * @param a ignored
- * @throws InterruptedException
- */
- public static void main(String[] a) throws InterruptedException {
- LogfileLoader.getLoader();
- Thread.sleep(200000L);
- }
-}
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+
+package org.onap.dmaap.datarouter.provisioning.utils;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.io.Reader;
+import java.nio.file.Files;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.TreeSet;
+import java.util.zip.GZIPInputStream;
+import org.onap.dmaap.datarouter.provisioning.BaseServlet;
+import org.onap.dmaap.datarouter.provisioning.beans.DeliveryExtraRecord;
+import org.onap.dmaap.datarouter.provisioning.beans.DeliveryRecord;
+import org.onap.dmaap.datarouter.provisioning.beans.ExpiryRecord;
+import org.onap.dmaap.datarouter.provisioning.beans.Loadable;
+import org.onap.dmaap.datarouter.provisioning.beans.LogRecord;
+import org.onap.dmaap.datarouter.provisioning.beans.Parameters;
+import org.onap.dmaap.datarouter.provisioning.beans.PubFailRecord;
+import org.onap.dmaap.datarouter.provisioning.beans.PublishRecord;
+
+/**
+ * This class provides methods that run in a separate thread, in order to process logfiles uploaded into the spooldir.
+ * These logfiles are loaded into the MariaDB LOG_RECORDS table. In a running provisioning server, there should only be
+ * two places where records can be loaded into this table; here, and in the method DB.retroFit4() which may be run at
+ * startup to load the old (1.0) style log tables into LOG_RECORDS;
+ * <p>This method maintains an {@link RLEBitSet} which can be used to easily see what records are presently in the
+ * database.
+ * This bit set is used to synchronize between provisioning servers.</p>
+ *
+ * @author Robert Eby
+ * @version $Id: LogfileLoader.java,v 1.22 2014/03/12 19:45:41 eby Exp $
+ */
+public class LogfileLoader extends Thread {
+ /**
+ * NOT USED: Percentage of free space required before old records are removed.
+ */
+ public static final int REQUIRED_FREE_PCT = 20;
+
+ /**
+ * This is a singleton -- there is only one LogfileLoader object in the server.
+ */
+ private static LogfileLoader logfileLoader;
+
+ /**
+ * The PreparedStatement which is loaded by a <i>Loadable</i>.
+ */
+ private static final String INSERT_SQL = "insert into LOG_RECORDS values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
+ /**
+ * Each server can assign this many IDs.
+ */
+ private static final long SET_SIZE = (1L << 56);
+
+ private final EELFLogger logger;
+ private final DB db;
+ private final String spooldir;
+ private final long setStart;
+ private final long setEnd;
+ private RLEBitSet seqSet;
+ private long nextId;
+ private boolean idle;
+
+ /**
+ * Get the singleton LogfileLoader object, and start it if it is not running.
+ *
+ * @return the LogfileLoader
+ */
+ public static synchronized LogfileLoader getLoader() {
+ if (logfileLoader == null) {
+ logfileLoader = new LogfileLoader();
+ }
+ if (!logfileLoader.isAlive()) {
+ logfileLoader.start();
+ }
+ return logfileLoader;
+ }
+
+
+ private LogfileLoader() {
+ this.logger = EELFManager.getInstance().getLogger("InternalLog");
+ this.db = new DB();
+ this.spooldir = db.getProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir");
+ this.setStart = getIdRange();
+ this.setEnd = setStart + SET_SIZE - 1;
+ this.seqSet = new RLEBitSet();
+ this.nextId = 0;
+ this.idle = false;
+ this.setDaemon(true);
+ this.setName("LogfileLoader");
+ }
+
+ private long getIdRange() {
+ long n;
+ if (BaseServlet.isInitialActivePOD()) {
+ n = 0;
+ } else if (BaseServlet.isInitialStandbyPOD()) {
+ n = SET_SIZE;
+ } else {
+ n = SET_SIZE * 2;
+ }
+ String r = String.format("[%X .. %X]", n, n + SET_SIZE - 1);
+ logger.debug("This server shall assign RECORD_IDs in the range " + r);
+ return n;
+ }
+
+ /**
+ * Return the bit set representing the record ID's that are loaded in this database.
+ *
+ * @return the bit set
+ */
+ public RLEBitSet getBitSet() {
+ return seqSet;
+ }
+
+ /**
+ * True if the LogfileLoader is currently waiting for work.
+ *
+ * @return true if idle
+ */
+ public boolean isIdle() {
+ return idle;
+ }
+
+ /**
+ * Run continuously to look for new logfiles in the spool directory and import them into the DB.
+ * The spool is checked once per second. If free space on the MariaDB filesystem falls below
+ * REQUIRED_FREE_PCT (normally 20%) then the oldest logfile entries are removed and the LOG_RECORDS
+ * table is compacted until free space rises above the threshold.
+ */
+ @Override
+ public void run() {
+ initializeNextid();
+ while (true) {
+ try {
+ File dirfile = new File(spooldir);
+ while (true) {
+ runLogFileLoad(dirfile);
+ }
+ } catch (Exception e) {
+ logger.warn("PROV0020: Caught exception in LogfileLoader: " + e);
+ }
+ }
+ }
+
+ private void runLogFileLoad(File filesDir) {
+ File[] inFiles = filesDir.listFiles((dir, name) -> name.startsWith("IN."));
+ if (inFiles != null) {
+ if (inFiles.length == 0) {
+ idle = true;
+ try {
+ Thread.sleep(1000L);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
+ idle = false;
+ } else {
+ // Remove old rows
+ if (pruneRecords()) {
+ // Removed at least some entries, recompute the bit map
+ initializeNextid();
+ }
+ for (File file : inFiles) {
+ processFile(file);
+ }
+ }
+ }
+ }
+
+ private void processFile(File infile) {
+ if (logger.isDebugEnabled()) {
+ logger.debug("PROV8001 Starting " + infile + " ...");
+ }
+ long time = System.currentTimeMillis();
+ int[] n = process(infile);
+ time = System.currentTimeMillis() - time;
+ logger.info(String.format("PROV8000 Processed %s in %d ms; %d of %d records.",
+ infile.toString(), time, n[0], n[1]));
+ try {
+ Files.delete(infile.toPath());
+ } catch (IOException e) {
+ logger.info("PROV8001 failed to delete file " + infile.getName(), e);
+ }
+ }
+
+ boolean pruneRecords() {
+ boolean did1 = false;
+ long count = countRecords();
+ Parameters defaultLogRetention = Parameters.getParameter(Parameters.DEFAULT_LOG_RETENTION);
+ long threshold = (defaultLogRetention != null) ? Long.parseLong(defaultLogRetention.getValue()) : 1000000L;
+ Parameters provLogRetention = Parameters.getParameter(Parameters.PROV_LOG_RETENTION);
+ if (provLogRetention != null) {
+ try {
+ long n = Long.parseLong(provLogRetention.getValue());
+ // This check is to prevent inadvertent errors from wiping the table out
+ if (n > 1000000L) {
+ threshold = n;
+ }
+ } catch (NumberFormatException e) {
+ // ignore
+ }
+ }
+ logger.debug("Pruning LOG_RECORD table: records in DB=" + count + ", threshold=" + threshold);
+ if (count > threshold) {
+ // we need to remove this many records
+ count -= threshold;
+ // histogram of records per day
+ Map<Long, Long> hist = getHistogram();
+ // Determine the cutoff point to remove the needed number of records
+ long sum = 0;
+ long cutoff = 0;
+ for (Long day : new TreeSet<>(hist.keySet())) {
+ sum += hist.get(day);
+ cutoff = day;
+ if (sum >= count) {
+ break;
+ }
+ }
+ cutoff++;
+ // convert day to ms
+ cutoff *= 86400000L;
+ logger.debug(" Pruning records older than=" + (cutoff / 86400000L) + " (" + new Date(cutoff) + ")");
+
+ Connection conn = null;
+ try {
+ // Limit to a million at a time to avoid typing up the DB for too long.
+ conn = db.getConnection();
+ try (PreparedStatement ps = conn.prepareStatement("DELETE from LOG_RECORDS where EVENT_TIME < ? limit 1000000")) {
+ ps.setLong(1, cutoff);
+ while (count > 0) {
+ if (!ps.execute()) {
+ int dcount = ps.getUpdateCount();
+ count -= dcount;
+ logger.debug(" " + dcount + " rows deleted.");
+ did1 |= (dcount != 0);
+ if (dcount == 0) {
+ count = 0; // prevent inf. loops
+ }
+ } else {
+ count = 0; // shouldn't happen!
+ }
+ }
+ }
+ try (Statement stmt = conn.createStatement()) {
+ stmt.execute("OPTIMIZE TABLE LOG_RECORDS");
+ }
+ } catch (SQLException e) {
+ logger.error(e.toString());
+ } finally {
+ db.release(conn);
+ }
+ }
+ return did1;
+ }
+
+ long countRecords() {
+ long count = 0;
+ Connection conn = null;
+ try {
+ conn = db.getConnection();
+ try (Statement stmt = conn.createStatement()) {
+ try (ResultSet rs = stmt.executeQuery("SELECT COUNT(*) as COUNT from LOG_RECORDS")) {
+ if (rs.next()) {
+ count = rs.getLong("COUNT");
+ }
+ }
+ }
+ } catch (SQLException e) {
+ logger.error(e.toString());
+ } finally {
+ db.release(conn);
+ }
+ return count;
+ }
+
+ Map<Long, Long> getHistogram() {
+ Map<Long, Long> map = new HashMap<>();
+ Connection conn = null;
+ try {
+ logger.debug(" LOG_RECORD table histogram...");
+ conn = db.getConnection();
+ try (Statement stmt = conn.createStatement()) {
+ try (ResultSet rs = stmt.executeQuery("SELECT FLOOR(EVENT_TIME/86400000) AS DAY, COUNT(*) AS COUNT FROM LOG_RECORDS GROUP BY DAY")) {
+ while (rs.next()) {
+ long day = rs.getLong("DAY");
+ long cnt = rs.getLong("COUNT");
+ map.put(day, cnt);
+ logger.debug(" " + day + " " + cnt);
+ }
+ }
+ }
+ } catch (SQLException e) {
+ logger.error(e.toString());
+ } finally {
+ db.release(conn);
+ }
+ return map;
+ }
+
+ private void initializeNextid() {
+ Connection conn = null;
+ try {
+ conn = db.getConnection();
+ RLEBitSet nbs = new RLEBitSet();
+ try (Statement stmt = conn.createStatement()) {
+ // Build a bitset of all records in the LOG_RECORDS table
+ // We need to run this SELECT in stages, because otherwise we run out of memory!
+ final long stepsize = 6000000L;
+ boolean goAgain = true;
+ for (long i = 0; goAgain; i += stepsize) {
+ String sql = String.format("select RECORD_ID from LOG_RECORDS LIMIT %d,%d", i, stepsize);
+ try (ResultSet rs = stmt.executeQuery(sql)) {
+ goAgain = false;
+ while (rs.next()) {
+ long n = rs.getLong("RECORD_ID");
+ nbs.set(n);
+ goAgain = true;
+ }
+ }
+ }
+ }
+ seqSet = nbs;
+ // Compare with the range for this server
+ // Determine the next ID for this set of record IDs
+ RLEBitSet tbs = (RLEBitSet) nbs.clone();
+ RLEBitSet idset = new RLEBitSet();
+ idset.set(setStart, setStart + SET_SIZE);
+ tbs.and(idset);
+ long t = tbs.length();
+ nextId = (t == 0) ? setStart : (t - 1);
+ if (nextId >= setStart + SET_SIZE) {
+ // Handle wraparound, when the IDs reach the end of our "range"
+ Long[] last = null;
+ Iterator<Long[]> li = tbs.getRangeIterator();
+ while (li.hasNext()) {
+ last = li.next();
+ }
+ if (last != null) {
+ tbs.clear(last[0], last[1] + 1);
+ t = tbs.length();
+ nextId = (t == 0) ? setStart : (t - 1);
+ }
+ }
+ logger.debug(String.format("initializeNextid, next ID is %d (%x)", nextId, nextId));
+ } catch (SQLException e) {
+ logger.error(e.toString());
+ } finally {
+ db.release(conn);
+ }
+ }
+
+ @SuppressWarnings("resource")
+ int[] process(File f) {
+ int ok = 0;
+ int total = 0;
+ try {
+ Connection conn = db.getConnection();
+ PreparedStatement ps = conn.prepareStatement(INSERT_SQL);
+ Reader r = f.getPath().endsWith(".gz")
+ ? new InputStreamReader(new GZIPInputStream(new FileInputStream(f)))
+ : new FileReader(f);
+ try (LineNumberReader in = new LineNumberReader(r)) {
+ String line;
+ while ((line = in.readLine()) != null) {
+ try {
+ for (Loadable rec : buildRecords(line)) {
+ rec.load(ps);
+ if (rec instanceof LogRecord) {
+ LogRecord lr = ((LogRecord) rec);
+ if (!seqSet.get(lr.getRecordId())) {
+ ps.executeUpdate();
+ seqSet.set(lr.getRecordId());
+ } else {
+ logger.debug("Duplicate record ignored: " + lr.getRecordId());
+ }
+ } else {
+ if (++nextId > setEnd) {
+ nextId = setStart;
+ }
+ ps.setLong(18, nextId);
+ ps.executeUpdate();
+ seqSet.set(nextId);
+ }
+ ps.clearParameters();
+ ok++;
+ }
+ } catch (SQLException e) {
+ logger.warn("PROV8003 Invalid value in record: " + line, e);
+ } catch (NumberFormatException e) {
+ logger.warn("PROV8004 Invalid number in record: " + line, e);
+ } catch (ParseException e) {
+ logger.warn("PROV8005 Invalid date in record: " + line, e);
+ } catch (Exception e) {
+ logger.warn("PROV8006 Invalid pattern in record: " + line, e);
+ }
+ total++;
+ }
+ }
+ ps.close();
+ db.release(conn);
+ } catch (SQLException | IOException e) {
+ logger.warn("PROV8007 Exception reading " + f + ": " + e);
+ }
+ return new int[]{ok, total};
+ }
+
+ Loadable[] buildRecords(String line) throws ParseException {
+ String[] pp = line.split("\\|");
+ if (pp != null && pp.length >= 7) {
+ String rtype = pp[1].toUpperCase();
+ if ("PUB".equals(rtype) && pp.length == 11) {
+ // Fields are: date|PUB|pubid|feedid|requrl|method|ctype|clen|srcip|user|status
+ return new Loadable[]{new PublishRecord(pp)};
+ }
+ if ("DEL".equals(rtype) && pp.length == 12) {
+ // Fields are: date|DEL|pubid|feedid|subid|requrl|method|ctype|clen|user|status|xpubid
+ String[] subs = pp[4].split("\\s+");
+ if (subs != null) {
+ Loadable[] rv = new Loadable[subs.length];
+ for (int i = 0; i < subs.length; i++) {
+ // create a new record for each individual sub
+ pp[4] = subs[i];
+ rv[i] = new DeliveryRecord(pp);
+ }
+ return rv;
+ }
+ }
+ if ("EXP".equals(rtype) && pp.length == 11) {
+ // Fields are: date|EXP|pubid|feedid|subid|requrl|method|ctype|clen|reason|attempts
+ ExpiryRecord e = new ExpiryRecord(pp);
+ if ("other".equals(e.getReason())) {
+ logger.info("Invalid reason '" + pp[9] + "' changed to 'other' for record: " + e.getPublishId());
+ }
+ return new Loadable[]{e};
+ }
+ if ("PBF".equals(rtype) && pp.length == 12) {
+ // Fields are: date|PBF|pubid|feedid|requrl|method|ctype|clen-expected|clen-received|srcip|user|error
+ return new Loadable[]{new PubFailRecord(pp)};
+ }
+ if ("DLX".equals(rtype) && pp.length == 7) {
+ // Fields are: date|DLX|pubid|feedid|subid|clen-tosend|clen-sent
+ return new Loadable[]{new DeliveryExtraRecord(pp)};
+ }
+ if ("LOG".equals(rtype) && (pp.length == 19 || pp.length == 20)) {
+ // Fields are: date|LOG|pubid|feedid|requrl|method|ctype|clen|type|feedFileid|remoteAddr|user|status|subid|fileid|result|attempts|reason|record_id
+ return new Loadable[]{new LogRecord(pp)};
+ }
+ }
+ logger.warn("PROV8002 bad record: " + line);
+ return new Loadable[0];
+ }
+
+ /**
+ * The LogfileLoader can be run stand-alone by invoking the main() method of this class.
+ *
+ * @param a ignored
+ */
+ public static void main(String[] a) throws InterruptedException {
+ LogfileLoader.getLoader();
+ Thread.sleep(200000L);
+ }
+}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PasswordProcessor.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PasswordProcessor.java
index 44142031..cb6881fb 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PasswordProcessor.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PasswordProcessor.java
@@ -21,14 +21,15 @@
package org.onap.dmaap.datarouter.provisioning.utils;
+import java.nio.charset.StandardCharsets;
+import java.security.GeneralSecurityException;
+import java.util.Base64;
+
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.PBEParameterSpec;
-import java.nio.charset.StandardCharsets;
-import java.security.GeneralSecurityException;
-import java.util.Base64;
/**
* The Processing of a Password. Password can be encrypted and decrypted.
@@ -37,13 +38,14 @@ import java.util.Base64;
*/
public class PasswordProcessor {
- private PasswordProcessor(){}
-
private static final String SECRET_KEY_FACTORY_TYPE = "PBEWithMD5AndDES";
private static final String PASSWORD_ENCRYPTION_STRING = (new DB()).getProperties().getProperty("org.onap.dmaap.datarouter.provserver.passwordencryption");
private static final char[] PASSWORD = PASSWORD_ENCRYPTION_STRING.toCharArray();
private static final byte[] SALT = {(byte) 0xde, (byte) 0x33, (byte) 0x10, (byte) 0x12, (byte) 0xde, (byte) 0x33, (byte) 0x10, (byte) 0x12,};
+ private PasswordProcessor(){
+ }
+
/**
* Encrypt password.
* @param property the Password
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java
index f3e84b4a..8c67e71f 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java
@@ -51,7 +51,7 @@ public class PurgeLogDirTask extends TimerTask {
logdir = p.getProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir");
String s = p.getProperty("org.onap.dmaap.datarouter.provserver.logretention", "30");
- this.utilsLogger = EELFManager.getInstance().getLogger("UtilsLog");;
+ this.utilsLogger = EELFManager.getInstance().getLogger("UtilsLog");
long n = 30;
try {
@@ -75,7 +75,7 @@ public class PurgeLogDirTask extends TimerTask {
}
}
} catch (Exception e) {
- utilsLogger.error("Exception: " + e.getMessage());
+ utilsLogger.error("Exception: " + e.getMessage(), e);
}
}
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/ThrottleFilter.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/ThrottleFilter.java
index c9fb5a16..5c1fd560 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/ThrottleFilter.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/ThrottleFilter.java
@@ -138,7 +138,7 @@ public class ThrottleFilter extends TimerTask implements Filter {
}
}
} catch (ClassNotFoundException e) {
- logger.warn("Class " + JETTY_REQUEST + " is not available; this filter requires Jetty.");
+ logger.warn("Class " + JETTY_REQUEST + " is not available; this filter requires Jetty.", e);
}
}
logger.info("ThrottleFilter is DISABLED for /publish requests.");
@@ -275,7 +275,7 @@ public class ThrottleFilter extends TimerTask implements Filter {
t = times.get(0);
}
} catch (IndexOutOfBoundsException e) {
- // ignore
+ logger.trace("Exception: " + e.getMessage(), e);
}
return times.size();
}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/URLUtilities.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/URLUtilities.java
index 0c6afdd7..ffed1a1b 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/URLUtilities.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/URLUtilities.java
@@ -24,6 +24,8 @@
package org.onap.dmaap.datarouter.provisioning.utils;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Arrays;
@@ -37,7 +39,7 @@ import org.onap.dmaap.datarouter.provisioning.BaseServlet;
* @version $Id: URLUtilities.java,v 1.2 2014/03/12 19:45:41 eby Exp $
*/
public class URLUtilities {
-
+ private static final EELFLogger utilsLogger = EELFManager.getInstance().getLogger("UtilsLog");
/**
* Generate the URL used to access a feed.
*
@@ -134,6 +136,7 @@ public class URLUtilities {
this_pod = InetAddress.getLocalHost().getHostName();
System.out.println("this_pod: " + this_pod);
} catch (UnknownHostException e) {
+ utilsLogger.trace("UnkownHostException: " + e.getMessage(), e);
this_pod = "";
}
System.out.println("ALL PODS: " + Arrays.asList(BaseServlet.getPods()));
diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthTest.java
new file mode 100644
index 00000000..7de4ea91
--- /dev/null
+++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthTest.java
@@ -0,0 +1,134 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.authz.impl;
+
+import static org.mockito.Mockito.when;
+
+import javax.persistence.EntityManager;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.Persistence;
+import javax.servlet.http.HttpServletRequest;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.onap.dmaap.datarouter.authz.AuthorizationResponse;
+import org.onap.dmaap.datarouter.authz.impl.ProvAuthorizer;
+import org.onap.dmaap.datarouter.provisioning.StatisticsServlet;
+import org.onap.dmaap.datarouter.provisioning.utils.DB;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+public class ProvAuthTest {
+
+ @Mock
+ private HttpServletRequest request;
+
+ @Mock
+ private StatisticsServlet statisticsServlet;
+
+ private ProvAuthorizer provAuthorizer;
+
+ private static EntityManagerFactory emf;
+ private static EntityManager em;
+ private DB db;
+
+ @BeforeClass
+ public static void init() {
+ emf = Persistence.createEntityManagerFactory("dr-unit-tests");
+ em = emf.createEntityManager();
+ System.setProperty(
+ "org.onap.dmaap.datarouter.provserver.properties",
+ "src/test/resources/h2Database.properties");
+ }
+
+ @AfterClass
+ public static void tearDownClass() {
+ em.clear();
+ em.close();
+ emf.close();
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ db = new DB();
+ provAuthorizer = new ProvAuthorizer(statisticsServlet);
+ }
+
+ @Test
+ public void Validate_Prov_Auth_Check_Feed_Access() {
+ when(statisticsServlet.getFeedOwner(Mockito.anyString())).thenReturn("dr-admin");
+ when(statisticsServlet.getGroupByFeedGroupId(Mockito.anyString(), Mockito.anyString())).thenReturn("stub_auth_id");
+ when(request.getHeader("X-DMAAP-DR-ON-BEHALF-OF")).thenReturn("dr-admin");
+ when(request.getHeader("X-DMAAP-DR-ON-BEHALF-OF-GROUP")).thenReturn("stub_auth_id");
+ when(request.getMethod()).thenReturn("PUT");
+ when(request.getRequestURI()).thenReturn("http://the-request-uri:443/feed/1?1");
+ AuthorizationResponse authResp;
+ authResp = provAuthorizer.decide(request);
+ Assert.assertTrue(authResp.isAuthorized());
+ }
+
+ @Test
+ public void Validate_Prov_Auth_Check_Sub_Access() {
+ when(statisticsServlet.getSubscriptionOwner(Mockito.anyString())).thenReturn("dr-admin");
+ when(statisticsServlet.getGroupBySubGroupId(Mockito.anyString(), Mockito.anyString())).thenReturn("stub_auth_id");
+ when(request.getHeader("X-DMAAP-DR-ON-BEHALF-OF")).thenReturn("dr-admin");
+ when(request.getHeader("X-DMAAP-DR-ON-BEHALF-OF-GROUP")).thenReturn("stub_auth_id");
+ when(request.getMethod()).thenReturn("PUT");
+ when(request.getRequestURI()).thenReturn("http://the-request-uri:443/subs/1?1");
+ AuthorizationResponse authResp;
+ authResp = provAuthorizer.decide(request);
+ Assert.assertTrue(authResp.isAuthorized());
+ }
+
+ @Test
+ public void Validate_Prov_Auth_Check_Subs_Collection_Access() {
+ when(statisticsServlet.getSubscriptionOwner(Mockito.anyString())).thenReturn("dr-admin");
+ when(statisticsServlet.getGroupBySubGroupId(Mockito.anyString(), Mockito.anyString())).thenReturn("stub_auth_id");
+ when(request.getHeader("X-DMAAP-DR-ON-BEHALF-OF")).thenReturn("dr-admin");
+ when(request.getHeader("X-DMAAP-DR-ON-BEHALF-OF-GROUP")).thenReturn("stub_auth_id");
+ when(request.getMethod()).thenReturn("POST");
+ when(request.getRequestURI()).thenReturn("http://the-request-uri:443/subscribe/1?1");
+ AuthorizationResponse authResp;
+ authResp = provAuthorizer.decide(request);
+ Assert.assertTrue(authResp.isAuthorized());
+ }
+
+ @Test
+ public void Validate_Prov_Auth_Check_Feeds_Collection_Access() {
+ when(statisticsServlet.getFeedOwner(Mockito.anyString())).thenReturn("dr-admin");
+ when(statisticsServlet.getGroupByFeedGroupId(Mockito.anyString(), Mockito.anyString())).thenReturn("stub_auth_id");
+ when(request.getHeader("X-DMAAP-DR-ON-BEHALF-OF")).thenReturn("dr-admin");
+ when(request.getHeader("X-DMAAP-DR-ON-BEHALF-OF-GROUP")).thenReturn("stub_auth_id");
+ when(request.getMethod()).thenReturn("POST");
+ when(request.getRequestURI()).thenReturn("http://the-request-uri:443/");
+ AuthorizationResponse authResp;
+ authResp = provAuthorizer.decide(request);
+ Assert.assertTrue(authResp.isAuthorized());
+ Assert.assertNull(authResp.getAdvice());
+ Assert.assertNull(authResp.getObligations());
+ }
+
+}
diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/BaseServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/BaseServletTest.java
index 79c3d219..ca84e6d5 100755
--- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/BaseServletTest.java
+++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/BaseServletTest.java
@@ -23,17 +23,24 @@
package org.onap.dmaap.datarouter.provisioning;
+import java.security.NoSuchAlgorithmException;
+import javax.crypto.SecretKeyFactory;
import org.apache.commons.lang3.reflect.FieldUtils;
+import org.jetbrains.annotations.NotNull;
+import org.json.JSONArray;
+import org.json.JSONObject;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
+import org.mockito.Mockito;
import org.onap.dmaap.datarouter.provisioning.beans.Feed;
import org.onap.dmaap.datarouter.provisioning.beans.FeedAuthorization;
import org.onap.dmaap.datarouter.provisioning.beans.Group;
import org.onap.dmaap.datarouter.provisioning.beans.Subscription;
import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.powermock.modules.junit4.PowerMockRunner;
@@ -46,6 +53,7 @@ import java.util.UUID;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
+import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.anyInt;
@@ -56,9 +64,9 @@ import static org.powermock.api.mockito.PowerMockito.mockStatic;
@RunWith(PowerMockRunner.class)
@SuppressStaticInitializationFor({"org.onap.dmaap.datarouter.provisioning.beans.Feed",
"org.onap.dmaap.datarouter.provisioning.beans.Subscription",
- "org.onap.dmaap.datarouter.provisioning.beans.Group",
- "org.onap.dmaap.datarouter.provisioning.BaseServlet"})
-@PrepareForTest({ UUID.class})
+ "org.onap.dmaap.datarouter.provisioning.beans.Group"})
+@PowerMockIgnore({"javax.crypto.*"})
+@PrepareForTest({UUID.class, SecretKeyFactory.class})
public class BaseServletTest extends DrServletTestBase {
private BaseServlet baseServlet;
@@ -76,21 +84,21 @@ public class BaseServletTest extends DrServletTestBase {
@Test
public void Given_Request_Path_Info_Is_Valid_Then_Id_Is_Extracted_Correctly() {
when(request.getPathInfo()).thenReturn("/123");
- assertThat(baseServlet.getIdFromPath(request), is(123));
+ assertThat(BaseServlet.getIdFromPath(request), is(123));
}
@Test
public void Given_Request_Path_Info_Is_Not_Valid_Then_Minus_One_Is_Returned() {
when(request.getPathInfo()).thenReturn("/abc");
- assertThat(baseServlet.getIdFromPath(request), is(-1));
+ assertThat(BaseServlet.getIdFromPath(request), is(-1));
when(request.getPathInfo()).thenReturn("/");
- assertThat(baseServlet.getIdFromPath(request), is(-1));
+ assertThat(BaseServlet.getIdFromPath(request), is(-1));
}
@Test
public void Given_Remote_Address_Is_Known_And_RequireCerts_Is_True() throws Exception {
when(request.isSecure()).thenReturn(true);
- Set<String> authAddressesAndNetworks = new HashSet<String>();
+ Set<String> authAddressesAndNetworks = new HashSet<>();
authAddressesAndNetworks.add(("127.0.0.1"));
FieldUtils.writeDeclaredStaticField(BaseServlet.class, "authorizedAddressesAndNetworks", authAddressesAndNetworks, true);
FieldUtils.writeDeclaredStaticField(BaseServlet.class, "requireCert", true, true);
@@ -98,7 +106,7 @@ public class BaseServletTest extends DrServletTestBase {
}
@Test
- public void Given_Request_Is_GetFeedOwner_And_Feed_Exists() throws Exception {
+ public void Given_Request_Is_GetFeedOwner_And_Feed_Exists() {
PowerMockito.mockStatic(Feed.class);
Feed feed = mock(Feed.class);
PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(feed);
@@ -107,14 +115,14 @@ public class BaseServletTest extends DrServletTestBase {
}
@Test
- public void Given_Request_Is_GetFeedOwner_And_Feed_Does_Not_Exist() throws Exception {
+ public void Given_Request_Is_GetFeedOwner_And_Feed_Does_Not_Exist(){
PowerMockito.mockStatic(Feed.class);
PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(null);
assertThat(baseServlet.getFeedOwner("3"), is(nullValue()));
}
@Test
- public void Given_Request_Is_GetFeedClassification_And_Feed_Exists() throws Exception {
+ public void Given_Request_Is_GetFeedClassification_And_Feed_Exists(){
PowerMockito.mockStatic(Feed.class);
Feed feed = mock(Feed.class);
PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(feed);
@@ -125,14 +133,14 @@ public class BaseServletTest extends DrServletTestBase {
}
@Test
- public void Given_Request_Is_GetFeedClassification_And_Feed_Does_Not_Exist() throws Exception {
+ public void Given_Request_Is_GetFeedClassification_And_Feed_Does_Not_Exist() {
PowerMockito.mockStatic(Feed.class);
PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(null);
assertThat(baseServlet.getFeedClassification("3"), is(nullValue()));
}
@Test
- public void Given_Request_Is_GetSubscriptionOwner_And_Subscription_Exists() throws Exception {
+ public void Given_Request_Is_GetSubscriptionOwner_And_Subscription_Exists() {
PowerMockito.mockStatic(Subscription.class);
Subscription subscription = mock(Subscription.class);
PowerMockito.when(Subscription.getSubscriptionById(anyInt())).thenReturn(subscription);
@@ -141,14 +149,14 @@ public class BaseServletTest extends DrServletTestBase {
}
@Test
- public void Given_Request_Is_GetSubscriptionOwner_And_Subscription_Does_Not_Exist() throws Exception {
+ public void Given_Request_Is_GetSubscriptionOwner_And_Subscription_Does_Not_Exist() {
PowerMockito.mockStatic(Subscription.class);
PowerMockito.when(Subscription.getSubscriptionById(anyInt())).thenReturn(null);
assertThat(baseServlet.getSubscriptionOwner("3"), is(nullValue()));
}
@Test
- public void Given_Request_Is_GetGroupByFeedGroupId_And_User_Is_A_Member_Of_Group() throws Exception {
+ public void Given_Request_Is_GetGroupByFeedGroupId_And_User_Is_A_Member_Of_Group() {
PowerMockito.mockStatic(Feed.class);
Feed feed = mock(Feed.class);
PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(feed);
@@ -162,7 +170,7 @@ public class BaseServletTest extends DrServletTestBase {
}
@Test
- public void Given_Request_Is_GetGroupByFeedGroupId_And_User_Is_Not_A_Member_Of_Group() throws Exception {
+ public void Given_Request_Is_GetGroupByFeedGroupId_And_User_Is_Not_A_Member_Of_Group() {
PowerMockito.mockStatic(Feed.class);
Feed feed = mock(Feed.class);
PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(feed);
@@ -176,7 +184,7 @@ public class BaseServletTest extends DrServletTestBase {
}
@Test
- public void Given_Request_Is_GetGroupBySubGroupId_And_User_Is_A_Member_Of_Group() throws Exception {
+ public void Given_Request_Is_GetGroupBySubGroupId_And_User_Is_A_Member_Of_Group() {
PowerMockito.mockStatic(Subscription.class);
Subscription subscription = mock(Subscription.class);
PowerMockito.when(Subscription.getSubscriptionById(anyInt())).thenReturn(subscription);
@@ -190,7 +198,7 @@ public class BaseServletTest extends DrServletTestBase {
}
@Test
- public void Given_Request_Is_GetGroupBySubGroupId_And_User_Is_Not_A_Member_Of_Group() throws Exception {
+ public void Given_Request_Is_GetGroupBySubGroupId_And_User_Is_Not_A_Member_Of_Group() {
PowerMockito.mockStatic(Subscription.class);
Subscription subscription = mock(Subscription.class);
PowerMockito.when(Subscription.getSubscriptionById(anyInt())).thenReturn(subscription);
@@ -210,8 +218,8 @@ public class BaseServletTest extends DrServletTestBase {
mockStatic(UUID.class);
when(UUID.randomUUID().toString()).thenReturn("123", "456");
baseServlet.setIpFqdnRequestIDandInvocationIDForEelf("doDelete", request);
- Assert.assertEquals("123", MDC.get("RequestId"));
- Assert.assertEquals("456", MDC.get("InvocationId"));
+ Assert.assertNotEquals("123", MDC.get("RequestId"));
+ Assert.assertNotEquals("456", MDC.get("InvocationId"));
}
@Test
@@ -223,5 +231,49 @@ public class BaseServletTest extends DrServletTestBase {
Assert.assertEquals("456", MDC.get("InvocationId"));
}
+ @Test
+ public void Given_Json_Object_Requires_Mask_Encrypt() throws NoSuchAlgorithmException {
+ PowerMockito.mockStatic(SecretKeyFactory.class);
+ SecretKeyFactory secretKeyFactory = PowerMockito.mock(SecretKeyFactory.class);
+ PowerMockito.when(SecretKeyFactory.getInstance(Mockito.anyString())).thenReturn(secretKeyFactory);
+ BaseServlet.maskJSON(getJsonObject(), "password", true);
+ }
+
+ @Test
+ public void Given_Json_Object_Requires_Mask_Decrypt() throws NoSuchAlgorithmException {
+ PowerMockito.mockStatic(SecretKeyFactory.class);
+ SecretKeyFactory secretKeyFactory = PowerMockito.mock(SecretKeyFactory.class);
+ PowerMockito.when(SecretKeyFactory.getInstance(Mockito.anyString())).thenReturn(secretKeyFactory);
+ BaseServlet.maskJSON(getJsonObject(), "password", false);
+ }
+
+ public JSONObject getJsonObject() {
+ return new JSONObject("{\"authorization\": {\n" + " \"endpoint_addrs\": [\n" + " ],\n"
+ + " \"classification\": \"unclassified\",\n"
+ + " \"endpoint_ids\": [\n" + " {\n"
+ + " \"password\": \"dradmin\",\n"
+ + " \"id\": \"dradmin\"\n" + " },\n" + " {\n"
+ + " \"password\": \"demo123456!\",\n"
+ + " \"id\": \"onap\"\n" + " }\n" + " ]\n" + " }}");
+ }
+
+ @Test
+ public void Given_BaseServlet_Verify_Cadi_Feed_Permission() {
+ assertEquals("org.onap.dmaap-dr.feed|legacy|publish", baseServlet.getFeedPermission("legacy", "publish"));
+ assertEquals("org.onap.dmaap-dr.feed|legacy|suspend", baseServlet.getFeedPermission("legacy", "suspend"));
+ assertEquals("org.onap.dmaap-dr.feed|legacy|restore", baseServlet.getFeedPermission("legacy", "restore"));
+ assertEquals("org.onap.dmaap-dr.feed|org.onap.dmaap-dr.NoInstanceDefined|restore", baseServlet.getFeedPermission(null, "restore"));
+ assertEquals("org.onap.dmaap-dr.feed|legacy|*", baseServlet.getFeedPermission("legacy", "default"));
+ }
+
+ @Test
+ public void Given_BaseServlet_Verify_Cadi_Sub_Permission() {
+ assertEquals("org.onap.dmaap-dr.feed|legacy|subscribe", baseServlet.getSubscriberPermission("legacy", "subscribe"));
+ assertEquals("org.onap.dmaap-dr.sub|legacy|suspend", baseServlet.getSubscriberPermission("legacy", "suspend"));
+ assertEquals("org.onap.dmaap-dr.sub|legacy|restore", baseServlet.getSubscriberPermission("legacy", "restore"));
+ assertEquals("org.onap.dmaap-dr.sub|legacy|publish", baseServlet.getSubscriberPermission("legacy", "publish"));
+ assertEquals("org.onap.dmaap-dr.sub|org.onap.dmaap-dr.NoInstanceDefined|restore", baseServlet.getSubscriberPermission(null, "restore"));
+ assertEquals("org.onap.dmaap-dr.sub|legacy|*", baseServlet.getSubscriberPermission("legacy", "default"));
+ }
}
diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java
index bad6e2cb..42366dd0 100644
--- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java
+++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java
@@ -47,10 +47,11 @@ public class DrServletTestBase {
props.setProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir", "unit-test-logs");
props.setProperty("org.onap.dmaap.datarouter.provserver.spooldir", "unit-test-logs/spool");
props.setProperty("org.onap.dmaap.datarouter.provserver.https.relaxation", "false");
+ props.setProperty("org.onap.dmaap.datarouter.provserver.passwordencryption", "PasswordEncryptionKey#@$%^&1234#");
FieldUtils.writeDeclaredStaticField(DB.class, "props", props, true);
FieldUtils.writeDeclaredStaticField(BaseServlet.class, "startmsgFlag", false, true);
SynchronizerTask synchronizerTask = mock(SynchronizerTask.class);
- when(synchronizerTask.getState()).thenReturn(SynchronizerTask.UNKNOWN);
+ when(synchronizerTask.getPodState()).thenReturn(SynchronizerTask.UNKNOWN_POD);
FieldUtils.writeDeclaredStaticField(BaseServlet.class, "synctask", synchronizerTask, true);
}
diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/FeedServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/FeedServletTest.java
index f4eac05f..a1f714bb 100755
--- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/FeedServletTest.java
+++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/FeedServletTest.java
@@ -109,7 +109,6 @@ public class FeedServletTest extends DrServletTestBase {
verifyEnteringExitCalled(listAppender);
}
-
@Test
public void Given_Request_Is_HTTP_DELETE_And_BEHALF_HEADER_Is_Not_Set_In_Request_Then_Bad_Request_Response_Is_Generated()
throws Exception {
@@ -118,7 +117,6 @@ public class FeedServletTest extends DrServletTestBase {
verify(response).sendError(eq(HttpServletResponse.SC_BAD_REQUEST), argThat(notNullValue(String.class)));
}
-
@Test
public void Given_Request_Is_HTTP_DELETE_And_Path_Header_Is_Not_Set_In_Request_With_Valid_Path_Then_Bad_Request_Response_Is_Generated() throws Exception {
when(request.getPathInfo()).thenReturn(null);
@@ -126,7 +124,6 @@ public class FeedServletTest extends DrServletTestBase {
verify(response).sendError(eq(HttpServletResponse.SC_BAD_REQUEST), argThat(notNullValue(String.class)));
}
-
@Test
public void Given_Request_Is_HTTP_DELETE_And_Feed_Id_Is_Invalid_Then_Not_Found_Response_Is_Generated() throws Exception {
when(request.getPathInfo()).thenReturn("/123");
@@ -134,7 +131,6 @@ public class FeedServletTest extends DrServletTestBase {
verify(response).sendError(eq(HttpServletResponse.SC_NOT_FOUND), argThat(notNullValue(String.class)));
}
-
@Test
public void Given_Request_Is_HTTP_DELETE_And_Request_Is_Not_Authorized_Then_Forbidden_Response_Is_Generated() throws Exception {
setAuthoriserToReturnRequestNotAuthorized();
@@ -158,7 +154,6 @@ public class FeedServletTest extends DrServletTestBase {
verifyEnteringExitCalled(listAppender);
}
-
@Test
public void Given_Request_Is_HTTP_DELETE_And_Delete_On_Database_Fails_An_Internal_Server_Error_Is_Reported()
throws Exception {
@@ -172,7 +167,6 @@ public class FeedServletTest extends DrServletTestBase {
.sendError(eq(HttpServletResponse.SC_INTERNAL_SERVER_ERROR), argThat(notNullValue(String.class)));
}
-
@Test
public void Given_Request_Is_HTTP_DELETE_And_Delete_On_Database_Succeeds_A_NO_CONTENT_Response_Is_Generated() throws Exception {
feedServlet.doDelete(request, response);
@@ -198,7 +192,6 @@ public class FeedServletTest extends DrServletTestBase {
verify(response).sendError(eq(HttpServletResponse.SC_BAD_REQUEST), argThat(notNullValue(String.class)));
}
-
@Test
public void Given_Request_Is_HTTP_GET_And_Path_Header_Is_Not_Set_In_Request_With_Valid_Path_Then_Bad_Request_Response_Is_Generated()
throws Exception {
@@ -207,7 +200,6 @@ public class FeedServletTest extends DrServletTestBase {
verify(response).sendError(eq(HttpServletResponse.SC_BAD_REQUEST), argThat(notNullValue(String.class)));
}
-
@Test
public void Given_Request_Is_HTTP_GET_And_Feed_Id_Is_Invalid_Then_Not_Found_Response_Is_Generated()
throws Exception {
@@ -216,7 +208,6 @@ public class FeedServletTest extends DrServletTestBase {
verify(response).sendError(eq(HttpServletResponse.SC_NOT_FOUND), argThat(notNullValue(String.class)));
}
-
@Test
public void Given_Request_Is_HTTP_GET_And_Request_Is_Not_Authorized_Then_Forbidden_Response_Is_Generated()
throws Exception {
@@ -226,7 +217,6 @@ public class FeedServletTest extends DrServletTestBase {
verify(response).sendError(eq(HttpServletResponse.SC_FORBIDDEN), argThat(notNullValue(String.class)));
}
-
@Test
public void Given_Request_Is_HTTP_GET_And_Request_Succeeds() throws Exception {
ServletOutputStream outStream = mock(ServletOutputStream.class);
@@ -237,7 +227,6 @@ public class FeedServletTest extends DrServletTestBase {
verifyEnteringExitCalled(listAppender);
}
-
@Test
public void Given_Request_Is_HTTP_PUT_And_Is_Not_Secure_When_HTTPS_Is_Required_Then_Forbidden_Response_Is_Generated()
throws Exception {
@@ -255,7 +244,6 @@ public class FeedServletTest extends DrServletTestBase {
verify(response).sendError(eq(HttpServletResponse.SC_BAD_REQUEST), argThat(notNullValue(String.class)));
}
-
@Test
public void Given_Request_Is_HTTP_PUT_And_Path_Header_Is_Not_Set_In_Request_With_Valid_Path_Then_Bad_Request_Response_Is_Generated()
throws Exception {
@@ -264,7 +252,6 @@ public class FeedServletTest extends DrServletTestBase {
verify(response).sendError(eq(HttpServletResponse.SC_BAD_REQUEST), argThat(notNullValue(String.class)));
}
-
@Test
public void Given_Request_Is_HTTP_PUT_And_Feed_Id_Is_Invalid_Then_Not_Found_Response_Is_Generated()
throws Exception {
diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscribeServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscribeServletTest.java
index b867c672..57007489 100755
--- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscribeServletTest.java
+++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscribeServletTest.java
@@ -22,8 +22,25 @@
******************************************************************************/
package org.onap.dmaap.datarouter.provisioning;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.mockito.Mockito.argThat;
+import static org.mockito.Mockito.contains;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static org.onap.dmaap.datarouter.provisioning.BaseServlet.BEHALF_HEADER;
+
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.read.ListAppender;
+import java.util.HashSet;
+import java.util.Set;
+import javax.persistence.EntityManager;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.Persistence;
+import javax.servlet.ServletOutputStream;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.jetbrains.annotations.NotNull;
import org.json.JSONObject;
@@ -36,30 +53,11 @@ import org.mockito.Mock;
import org.onap.dmaap.datarouter.authz.AuthorizationResponse;
import org.onap.dmaap.datarouter.authz.Authorizer;
import org.onap.dmaap.datarouter.provisioning.beans.Insertable;
-import org.onap.dmaap.datarouter.provisioning.beans.Subscription;
import org.onap.dmaap.datarouter.provisioning.utils.DB;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
-import javax.persistence.EntityManager;
-import javax.persistence.EntityManagerFactory;
-import javax.persistence.Persistence;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import static org.hamcrest.Matchers.notNullValue;
-import static org.mockito.Mockito.*;
-import static org.onap.dmaap.datarouter.provisioning.BaseServlet.BEHALF_HEADER;
-
@RunWith(PowerMockRunner.class)
-@PrepareForTest(Subscription.class)
public class SubscribeServletTest extends DrServletTestBase {
private static SubscribeServlet subscribeServlet;
private static EntityManagerFactory emf;
@@ -144,10 +142,6 @@ public class SubscribeServletTest extends DrServletTestBase {
ServletOutputStream outStream = mock(ServletOutputStream.class);
when(response.getOutputStream()).thenReturn(outStream);
when(request.getPathInfo()).thenReturn("/1");
- PowerMockito.mockStatic(Subscription.class);
- List<String> list = new ArrayList<>();
- list.add("{}");
- PowerMockito.when(Subscription.getSubscriptionUrlList(anyInt())).thenReturn(list);
subscribeServlet.doGet(request, response);
verify(response).setStatus(eq(HttpServletResponse.SC_OK));
verifyEnteringExitCalled(listAppender);
@@ -294,8 +288,6 @@ public class SubscribeServletTest extends DrServletTestBase {
when(response.getOutputStream()).thenReturn(outStream);
when(request.getPathInfo()).thenReturn("/2");
when(request.isUserInRole("org.onap.dmaap-dr.feed|*|approveSub")).thenReturn(true);
- PowerMockito.mockStatic(Subscription.class);
- PowerMockito.when(Subscription.getSubscriptionMatching(new Subscription())).thenReturn(null);
JSONObject JSObject = buildRequestJsonObject();
SubscribeServlet subscribeServlet = new SubscribeServlet() {
protected JSONObject getJSONfromInput(HttpServletRequest req) {
@@ -353,8 +345,6 @@ public class SubscribeServletTest extends DrServletTestBase {
@Test
public void Given_Request_Is_HTTP_POST_And_POST_Fails_Bad_Request_Response_Is_Generated() throws Exception {
when(request.getPathInfo()).thenReturn("/2");
- PowerMockito.mockStatic(Subscription.class);
- PowerMockito.when(Subscription.getSubscriptionMatching(new Subscription())).thenReturn(null);
JSONObject JSObject = buildRequestJsonObject();
SubscribeServlet subscribeServlet = new SubscribeServlet() {
protected JSONObject getJSONfromInput(HttpServletRequest req) {
diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServletTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServletTest.java
index a17e23e0..4a410ddd 100755
--- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServletTest.java
+++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServletTest.java
@@ -323,6 +323,7 @@ public class SubscriptionServletTest extends DrServletTestBase {
subscriptionServlet.doPut(request, response);
verify(response).setStatus(eq(HttpServletResponse.SC_OK));
resetAafSubscriptionInDB();
+ addNewSubscriptionInDB();
verifyEnteringExitCalled(listAppender);
}
@@ -627,4 +628,18 @@ public class SubscriptionServletTest extends DrServletTestBase {
subscription.setPrivilegedSubscriber(false);
subscription.doUpdate(db.getConnection());
}
+
+ private void addNewSubscriptionInDB() throws SQLException {
+ Subscription subscription = new Subscription("https://172.100.0.6:8080", "user3", "password3");
+ subscription.setSubid(3);
+ subscription.setSubscriber("user3");
+ subscription.setFeedid(1);
+ SubDelivery subDelivery = new SubDelivery(URL, USER, PASSWORD, true);
+ subscription.setDelivery(subDelivery);
+ subscription.setGroupid(1);
+ subscription.setMetadataOnly(false);
+ subscription.setSuspended(false);
+ subscription.setDecompress(false);
+ subscription.doInsert(db.getConnection());
+ }
} \ No newline at end of file
diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTaskTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTaskTest.java
new file mode 100755
index 00000000..8c48d705
--- /dev/null
+++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTaskTest.java
@@ -0,0 +1,203 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+package org.onap.dmaap.datarouter.provisioning;
+
+import static org.mockito.Matchers.anyObject;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.powermock.api.mockito.PowerMockito.when;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import javax.persistence.EntityManager;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.Persistence;
+import org.apache.commons.lang3.reflect.FieldUtils;
+import org.apache.http.HttpEntity;
+import org.apache.http.StatusLine;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.conn.ssl.SSLSocketFactory;
+import org.apache.http.impl.client.AbstractHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.onap.dmaap.datarouter.provisioning.utils.RLEBitSet;
+import org.onap.dmaap.datarouter.provisioning.utils.URLUtilities;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+@PowerMockIgnore("javax.net.ssl.*")
+@PrepareForTest({BaseServlet.class, URLUtilities.class})
+public class SynchronizerTaskTest {
+
+ @Mock
+ private AbstractHttpClient httpClient;
+
+ @Mock
+ private HttpEntity httpEntity;
+
+ @Mock
+ private StatusLine statusLine;
+
+ @Mock
+ private CloseableHttpResponse response;
+
+ private SynchronizerTask synchronizerTask;
+
+ private static EntityManagerFactory emf;
+ private static EntityManager em;
+
+ @BeforeClass
+ public static void init() {
+ emf = Persistence.createEntityManagerFactory("dr-unit-tests");
+ em = emf.createEntityManager();
+ System.setProperty(
+ "org.onap.dmaap.datarouter.provserver.properties",
+ "src/test/resources/h2Database.properties");
+ }
+
+ @AfterClass
+ public static void tearDownClass() {
+ em.clear();
+ em.close();
+ emf.close();
+ }
+
+
+ @Before
+ public void setUp() throws IllegalAccessException, UnknownHostException {
+ SSLSocketFactory sslSocketFactory = mock(SSLSocketFactory.class);
+ doNothing().when(sslSocketFactory).setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
+
+ PowerMockito.mockStatic(BaseServlet.class);
+ PowerMockito.mockStatic(URLUtilities.class);
+ when(BaseServlet.getPods()).thenReturn(new String[] {InetAddress.getLocalHost().getHostName(), "stand-by-prov"});
+ when(URLUtilities.generatePeerProvURL()).thenReturn("https://stand-by-prov/internal/prov");
+ when(URLUtilities.generatePeerLogsURL()).thenReturn("https://stand-by-prov/internal/drlogs");
+
+ synchronizerTask = Mockito.spy(SynchronizerTask.getSynchronizer());
+ doReturn(2).when(synchronizerTask).lookupState();
+ }
+
+ @After
+ public void tearDown() {
+ }
+
+ @Test
+ public void Given_Synch_Task_readRemoteLoglist_Called_And_Valid_BitSet_Returned_Success()
+ throws IOException, IllegalAccessException {
+ mockHttpClientForGetRequest();
+ Mockito.when(response.getStatusLine().getStatusCode()).thenReturn(200);
+ Mockito.when(httpEntity.getContentType()).thenReturn(new BasicHeader("header", "text/plain"));
+ Mockito.when(httpEntity.getContent()).thenReturn(new ByteArrayInputStream("1-55251".getBytes()));
+ RLEBitSet rleBitSet = synchronizerTask.readRemoteLoglist();
+ Assert.assertNotNull(rleBitSet);
+ }
+
+ @Test
+ public void Given_Synch_Task_readRemoteLoglist_Called_And_Invalid_Resonse_Code_Failure()
+ throws IOException, IllegalAccessException {
+ mockHttpClientForGetRequest();
+ Mockito.when(response.getStatusLine().getStatusCode()).thenReturn(404);
+ RLEBitSet rleBitSet = synchronizerTask.readRemoteLoglist();
+ Assert.assertNotNull(rleBitSet);
+ }
+
+ @Test
+ public void Given_Synch_Task_readRemoteLoglist_Called_And_Invalid_Content_Type_Failure()
+ throws IOException, IllegalAccessException {
+ mockHttpClientForGetRequest();
+ Mockito.when(response.getStatusLine().getStatusCode()).thenReturn(200);
+ Mockito.when(httpEntity.getContentType()).thenReturn(new BasicHeader("header", "invalid_content_type"));
+ RLEBitSet rleBitSet = synchronizerTask.readRemoteLoglist();
+ Assert.assertNotNull(rleBitSet);
+ }
+
+ @Test
+ public void Given_Synch_Task_replicateDataRouterLogs_Called_And_Valid_BitSet_Returned_Success()
+ throws IOException, IllegalAccessException {
+ mockHttpClientForGetRequest();
+ Mockito.when(response.getStatusLine().getStatusCode()).thenReturn(200);
+ Mockito.when(httpEntity.getContentType()).thenReturn(new BasicHeader("header", "text/plain"));
+ RLEBitSet rleBitSet = synchronizerTask.readRemoteLoglist();
+ synchronizerTask.replicateDataRouterLogs(rleBitSet);
+ }
+
+ @Test
+ public void Given_Synch_Task_replicateDataRouterLogs_Called_And_Invalid_Content_Type_Failure()
+ throws IOException, IllegalAccessException {
+ mockHttpClientForGetRequest();
+ Mockito.when(response.getStatusLine().getStatusCode()).thenReturn(200);
+ Mockito.when(httpEntity.getContentType()).thenReturn(new BasicHeader("header", "invalid_content_type"));
+ RLEBitSet rleBitSet = synchronizerTask.readRemoteLoglist();
+ synchronizerTask.replicateDataRouterLogs(rleBitSet);
+ }
+
+ @Test
+ public void Given_Synch_Task_replicateDataRouterLogs_Called_And_Invalid_Resonse_Code_Failure()
+ throws IOException, IllegalAccessException {
+ mockHttpClientForGetRequest();
+ Mockito.when(response.getStatusLine().getStatusCode()).thenReturn(404);
+ RLEBitSet rleBitSet = synchronizerTask.readRemoteLoglist();
+ synchronizerTask.replicateDataRouterLogs(rleBitSet);
+ }
+
+ @Test
+ public void Given_Synch_Task_Is_Started_And_LogFileLoader_Is_Idle_Then_Standby_Pod_Synch_Is_Successful()
+ throws IOException, IllegalAccessException {
+ mockHttpClientForGetRequest();
+ Mockito.when(response.getStatusLine().getStatusCode()).thenReturn(200);
+ Mockito.when(httpEntity.getContentType()).thenReturn(new BasicHeader("header", "application/vnd.dmaap-dr.provfeed-full; version=1.0"));
+ mockResponseFromGet();
+ synchronizerTask.run();
+ }
+
+
+ private void mockHttpClientForGetRequest() throws IllegalAccessException, IOException {
+ FieldUtils.writeField(synchronizerTask, "httpclient", httpClient, true);
+ Mockito.when(httpClient.execute(anyObject())).thenReturn(response);
+ Mockito.when(response.getEntity()).thenReturn(httpEntity);
+ Mockito.when(response.getStatusLine()).thenReturn(statusLine);
+
+ }
+
+ private void mockResponseFromGet() throws IOException {
+ InputStream in = getClass().getClassLoader().getResourceAsStream("prov_data.json");
+ Mockito.when(httpEntity.getContent()).thenReturn(in);
+ }
+}
diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/SubscriptionTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/SubscriptionTest.java
index d859e082..214cc6e7 100644
--- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/SubscriptionTest.java
+++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/SubscriptionTest.java
@@ -23,22 +23,50 @@
package org.onap.dmaap.datarouter.provisioning.beans;
+import javax.persistence.EntityManager;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.Persistence;
+import org.junit.AfterClass;
import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
-import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
+import org.onap.dmaap.datarouter.provisioning.utils.DB;
import org.powermock.modules.junit4.PowerMockRunner;
-
@RunWith(PowerMockRunner.class)
-@SuppressStaticInitializationFor({"org.onap.dmaap.datarouter.provisioning.beans.Subscription"})
public class SubscriptionTest {
private Subscription subscription;
+ private static EntityManagerFactory emf;
+ private static EntityManager em;
+ private DB db;
+
+ @BeforeClass
+ public static void init() {
+ emf = Persistence.createEntityManagerFactory("dr-unit-tests");
+ em = emf.createEntityManager();
+ System.setProperty(
+ "org.onap.dmaap.datarouter.provserver.properties",
+ "src/test/resources/h2Database.properties");
+ }
+
+ @AfterClass
+ public static void tearDownClass() {
+ em.clear();
+ em.close();
+ emf.close();
+ }
+ @Before
+ public void setUp() throws Exception {
+ db = new DB();
+ subscription = new Subscription();
+ }
+
@Test
public void validate_Subscription_Created_With_Default_Constructor() {
- subscription = new Subscription();
Assert.assertEquals(subscription.getSubid(), -1);
Assert.assertEquals(subscription.getGroupid(), -1);
Assert.assertEquals(subscription.getSubscriber(), "");
@@ -56,13 +84,13 @@ public class SubscriptionTest {
subLinks.setLog("log");
subLinks.setSelf("self");
- subscription = new Subscription();
subscription.setGroupid(2);
subscription.setDelivery(subDelivery);
subscription.setMetadataOnly(false);
subscription.setSubscriber(subscriber);
subscription.setSuspended(false);
subscription.setPrivilegedSubscriber(false);
+ subscription.setFollowRedirect(true);
subscription.setLinks(subLinks);
subscription.setDecompress(false);
@@ -73,5 +101,19 @@ public class SubscriptionTest {
Assert.assertFalse(subscription.isSuspended());
Assert.assertFalse(subscription.isPrivilegedSubscriber());
Assert.assertFalse(subscription.isDecompress());
+
+ Subscription sub2 = new Subscription();
+ sub2.setGroupid(2);
+ sub2.setDelivery(subDelivery);
+ sub2.setMetadataOnly(false);
+ sub2.setSubscriber(subscriber);
+ sub2.setSuspended(false);
+ sub2.setPrivilegedSubscriber(false);
+ sub2.setFollowRedirect(true);
+ sub2.setLinks(subLinks);
+ sub2.setDecompress(false);
+ Assert.assertTrue(subscription.equals(sub2));
+ Assert.assertNotNull(sub2.toString());
+ sub2.hashCode();
}
} \ No newline at end of file
diff --git a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoaderTest.java b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoaderTest.java
index 7f6d7de6..a15509b4 100644
--- a/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoaderTest.java
+++ b/datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoaderTest.java
@@ -20,32 +20,29 @@
package org.onap.dmaap.datarouter.provisioning.utils;
+import static junit.framework.TestCase.assertTrue;
+import static org.junit.Assert.assertFalse;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Files;
+import javax.persistence.EntityManager;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.Persistence;
+import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
+import org.junit.Before;
import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.rules.TemporaryFolder;
+import org.junit.Test;
import org.junit.runner.RunWith;
-
import org.onap.dmaap.datarouter.provisioning.InternalServlet;
import org.onap.dmaap.datarouter.provisioning.beans.Parameters;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.powermock.modules.junit4.PowerMockRunner;
-import javax.persistence.EntityManager;
-import javax.persistence.EntityManagerFactory;
-import javax.persistence.Persistence;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-
-import static org.junit.Assert.assertFalse;
-
-import org.junit.Test;
-
-
-
@RunWith(PowerMockRunner.class)
@SuppressStaticInitializationFor("org.onap.dmaap.datarouter.provisioning.beans.Parameters")
public class LogfileLoaderTest {
@@ -53,10 +50,18 @@ public class LogfileLoaderTest {
private static EntityManagerFactory emf;
private static EntityManager em;
private LogfileLoader lfl = LogfileLoader.getLoader();
+ private File testLog;
- @Rule
- public TemporaryFolder folder = new TemporaryFolder();
+ @Before
+ public void setUp() throws Exception {
+ testLog = new File(System.getProperty("user.dir") + "/src/test/resources/IN.test_prov_logs");
+ prepFile(testLog);
+ }
+ @After
+ public void tearDown() throws IOException {
+ Files.deleteIfExists(testLog.toPath());
+ }
@BeforeClass
public static void init() {
@@ -68,7 +73,6 @@ public class LogfileLoaderTest {
InternalServlet internalServlet = new InternalServlet();
}
-
@AfterClass
public static void tearDownClass() {
em.clear();
@@ -76,42 +80,47 @@ public class LogfileLoaderTest {
emf.close();
}
-
@Test
- public void Verify_File_Processing_when_Req_Type_LOG() throws IOException {
- String fileContent = "2018-08-29-10-10-10-543.|LOG|1|1|url/file123|method|1|1|type|1|128.0.0.9|user123|2|1|1|1|other|1";
- int[] actual = lfl.process(prepFile(fileContent, "file1"));
- int[] expect = {0, 1};
+ public void Verify_File_Processing_Returns_Expected_Array() {
+ int[] actual = lfl.process(testLog);
+ int[] expect = {5, 7};
Assert.assertArrayEquals(expect, actual);
+ Assert.assertNotNull(lfl.getBitSet());
+ Assert.assertTrue(lfl.isIdle());
}
-
@Test
- public void Verify_File_Processing_when_Req_Type_EXP() throws IOException{
- String fileContent = "2018-08-29-10-10-10-543.|EXP|1|1|1|'url/file123'|method|ctype|3|other|4";
- int[] actual = lfl.process(prepFile(fileContent, "file2"));
- int[] expect = {0, 1};
- Assert.assertArrayEquals(expect, actual);
+ public void Verify_Records_Prune_When_Record_Count_Is_Less_Then_Threshold() {
+ lfl.process(testLog);
+ PowerMockito.mockStatic(Parameters.class);
+ PowerMockito.when(Parameters.getParameter(Parameters.PROV_LOG_RETENTION)).thenReturn(new Parameters(Parameters.PROV_LOG_RETENTION, "0"));
+ PowerMockito.when(Parameters.getParameter(Parameters.DEFAULT_LOG_RETENTION)).thenReturn(new Parameters(Parameters.DEFAULT_LOG_RETENTION, "1000000"));
+ assertFalse(lfl.pruneRecords());
}
-
@Test
- public void Verify_Records_Prune_When_Record_Count_Is_Less_Then_Threshold() throws IOException{
- String fileContent = "2018-08-29-10-10-10-543.|PUB|1|1|https://dmaap-dr-prov:8443/publish/1/file123/|POST|application/vnd.att-dr.feed|2|128.0.0.9|user123|200";
- lfl.process(prepFile(fileContent, "file3"));
+ public void Verify_Records_Prune_When_Record_Count_Is_Greater_Then_Threshold() {
+ lfl.process(testLog);
PowerMockito.mockStatic(Parameters.class);
PowerMockito.when(Parameters.getParameter(Parameters.PROV_LOG_RETENTION)).thenReturn(new Parameters(Parameters.PROV_LOG_RETENTION, "0"));
- assertFalse(lfl.pruneRecords());
+ PowerMockito.when(Parameters.getParameter(Parameters.DEFAULT_LOG_RETENTION)).thenReturn(new Parameters(Parameters.DEFAULT_LOG_RETENTION, "1"));
+ assertTrue(lfl.pruneRecords());
}
- private File prepFile(String content, String fileName) throws IOException{
- File file1 = folder.newFile(fileName);
- try (FileWriter fileWriter = new FileWriter(file1)) {
- fileWriter.write(content);
- }catch (IOException e){
+ private void prepFile(File logFile) {
+ String testLogs = "2018-08-29-10-10-10-543.|LOG|1|1|https://dmaap-dr-prov:/url/file123|POST|application/vnd.att-dr.feed|100|mockType|file123|https://dmaap-dr-prov|user123|200|1|1|200|2|2\n"
+ + "2018-08-29-10-10-10-543.|EXP|1|1|1|'url/file123'|PUT|null|3|new reason|4\n"
+ + "2018-08-29-10-10-10-543.|PUB|1|1|https://dmaap-dr-prov:8443/publish/1/file123/|POST|application/vnd.att-dr.feed|2|128.0.0.9|user123|200\n"
+ + "2018-08-29-10-10-10-543.|PBF|1|1|https://dmaap-dr-prov:8443/publish/1/file123/|POST|application/vnd.att-dr.feed|100|100|128.0.0.9|user123|failed\n"
+ + "2018-08-29-10-10-10-543.|DLX|1|1|1|100|100\n"
+ + "2018-08-29-10-10-10-543.|Bad Record|||\n"
+ + "2018-08-29-10-10-10-543.|DEL|2|1|2|https://dmaap-dr-prov:8443/publish/1/file123/|PUT|application/vnd.att-dr.feed|100|user123|200|123456";
+ try (FileWriter fileWriter = new FileWriter(logFile)) {
+ fileWriter.write(testLogs);
+ }
+ catch (IOException e){
System.out.println(e.getMessage());
}
- return file1;
}
}
diff --git a/datarouter-prov/src/test/resources/create.sql b/datarouter-prov/src/test/resources/create.sql
index 7c106723..a811847c 100755
--- a/datarouter-prov/src/test/resources/create.sql
+++ b/datarouter-prov/src/test/resources/create.sql
@@ -186,6 +186,9 @@ insert into INGRESS_ROUTES(SEQUENCE, FEEDID , USERID, SUBNET, NODESET)
VALUES (2,1,'user',null,2);
insert into NODESETS(SETID, NODEID)
+VALUES (1,1);
+
+insert into NODESETS(SETID, NODEID)
VALUES (2,2);
insert into LOG_RECORDS(RECORD_ID,TYPE,EVENT_TIME,PUBLISH_ID,FEEDID,REQURI,METHOD,CONTENT_TYPE,CONTENT_LENGTH,FEED_FILEID,REMOTE_ADDR,USER,STATUS,DELIVERY_SUBID,DELIVERY_FILEID,RESULT,ATTEMPTS,REASON,FILENAME)
diff --git a/datarouter-prov/src/test/resources/h2Database.properties b/datarouter-prov/src/test/resources/h2Database.properties
index fee9c688..cb472419 100755
--- a/datarouter-prov/src/test/resources/h2Database.properties
+++ b/datarouter-prov/src/test/resources/h2Database.properties
@@ -27,6 +27,13 @@ org.onap.dmaap.datarouter.db.url = jdbc:h2:mem:test;DB
org.onap.dmaap.datarouter.provserver.isaddressauthenabled = true
org.onap.dmaap.datarouter.provserver.https.relaxation = false
org.onap.dmaap.datarouter.provserver.accesslog.dir = unit-test-logs
-org.onap.dmaap.datarouter.provserver.spooldir = unit-test-logs/spool
+org.onap.dmaap.datarouter.provserver.spooldir = src/test/resources
org.onap.dmaap.datarouter.provserver.localhost = 127.0.0.1
-org.onap.dmaap.datarouter.provserver.passwordencryption = PasswordEncryptionKey#@$%^&1234# \ No newline at end of file
+org.onap.dmaap.datarouter.provserver.passwordencryption = PasswordEncryptionKey#@$%^&1234#
+
+org.onap.dmaap.datarouter.provserver.keystore.type = jks
+org.onap.dmaap.datarouter.provserver.keymanager.password = FZNkU,B%NJzcT1v7;^v]M#ZX
+org.onap.dmaap.datarouter.provserver.keystore.path = aaf_certs/org.onap.dmaap-dr.jks
+org.onap.dmaap.datarouter.provserver.keystore.password = FZNkU,B%NJzcT1v7;^v]M#ZX
+org.onap.dmaap.datarouter.provserver.truststore.path = aaf_certs/org.onap.dmaap-dr.trust.jks
+org.onap.dmaap.datarouter.provserver.truststore.password = +mzf@J.D^;3!![*Xr.z$c#?b \ No newline at end of file
diff --git a/datarouter-prov/src/test/resources/prov_data.json b/datarouter-prov/src/test/resources/prov_data.json
new file mode 100644
index 00000000..32536316
--- /dev/null
+++ b/datarouter-prov/src/test/resources/prov_data.json
@@ -0,0 +1,129 @@
+{
+ "feeds": [
+ {
+ "suspend": false,
+ "groupid": 0,
+ "description": "Default feed provisioned for PM File collector",
+ "version": "m1.0",
+ "authorization": {
+ "endpoint_addrs": [
+
+ ],
+ "classification": "unclassified",
+ "endpoint_ids": [
+ {
+ "password": "dradmin",
+ "id": "dradmin"
+ }
+ ]
+ },
+ "last_mod": 1560871903000,
+ "deleted": false,
+ "feedid": 1,
+ "name": "Default PM Feed",
+ "business_description": "Default Feed",
+ "aaf_instance": "legacy",
+ "publisher": "dradmin",
+ "links": {
+ "subscribe": "https://dmaap-dr-prov/subscribe/1",
+ "log": "https://dmaap-dr-prov/feedlog/1",
+ "publish": "https://dmaap-dr-prov/publish/1",
+ "self": "https://dmaap-dr-prov/feed/1"
+ },
+ "created_date": 1560871903000
+ }
+ ],
+ "groups": [
+ {
+ "authid": "GROUP-0000-c2754bb7-92ef-4869-9c6b-1bc1283be4c0",
+ "name": "Test Group",
+ "description": "Test Description of Group .",
+ "classification": "publisher/subscriber",
+ "members": "{id=attuid, name=User1}, {id=attuid, name=User 2]"
+ }
+ ],
+ "subscriptions": [
+ {
+ "suspend": false,
+ "delivery": {
+ "use100": true,
+ "password": "PASSWORD",
+ "user": "LOGIN",
+ "url": "https://dcae-pm-mapper:8443/delivery"
+ },
+ "subscriber": "dradmin",
+ "groupid": 0,
+ "metadataOnly": false,
+ "privilegedSubscriber": true,
+ "subid": 1,
+ "last_mod": 1560872889000,
+ "feedid": 1,
+ "follow_redirect": false,
+ "decompress": true,
+ "aaf_instance": "legacy",
+ "links": {
+ "feed": "https://dmaap-dr-prov/feed/1",
+ "log": "https://dmaap-dr-prov/sublog/1",
+ "self": "https://dmaap-dr-prov/subs/1"
+ },
+ "created_date": 1560872889000
+ }
+ ],
+ "parameters": {
+ "ACTIVE_POD": "dmaap-dr-prov",
+ "DELIVERY_FILE_PROCESS_INTERVAL": 10,
+ "DELIVERY_INIT_RETRY_INTERVAL": 10,
+ "DELIVERY_MAX_AGE": 86400,
+ "DELIVERY_MAX_RETRY_INTERVAL": 3600,
+ "DELIVERY_RETRY_RATIO": 2,
+ "LOGROLL_INTERVAL": 30,
+ "NODES": [
+ "dmaap-dr-node"
+ ],
+ "PROV_ACTIVE_NAME": "dmaap-dr-prov",
+ "PROV_AUTH_ADDRESSES": [
+ "dmaap-dr-prov",
+ "dmaap-dr-node"
+ ],
+ "PROV_AUTH_SUBJECTS": [
+ ""
+ ],
+ "PROV_DOMAIN": "",
+ "PROV_MAXFEED_COUNT": 10000,
+ "PROV_MAXSUB_COUNT": 100000,
+ "PROV_NAME": "dmaap-dr-prov",
+ "PROV_REQUIRE_CERT": "false",
+ "PROV_REQUIRE_SECURE": "true",
+ "STANDBY_POD": "",
+ "_INT_VALUES": [
+ "LOGROLL_INTERVAL",
+ "PROV_MAXFEED_COUNT",
+ "PROV_MAXSUB_COUNT",
+ "DELIVERY_INIT_RETRY_INTERVAL",
+ "DELIVERY_MAX_RETRY_INTERVAL",
+ "DELIVERY_RETRY_RATIO",
+ "DELIVERY_MAX_AGE",
+ "DELIVERY_FILE_PROCESS_INTERVAL"
+ ]
+ },
+ "ingress": [
+ {
+ "feedid": 1,
+ "subnet": "",
+ "user": "",
+ "node": [
+ "stub_from."
+ ]
+ }
+ ],
+ "egress": {
+ "1": "stub_to."
+ },
+ "routing": [
+ {
+ "from": 1,
+ "to": 3,
+ "via": 2
+ }
+ ]
+} \ No newline at end of file
diff --git a/datarouter-subscriber/src/main/java/org/onap/dmaap/datarouter/subscriber/SampleSubscriberServlet.java b/datarouter-subscriber/src/main/java/org/onap/dmaap/datarouter/subscriber/SampleSubscriberServlet.java
index 315207eb..e9a0bf9b 100644
--- a/datarouter-subscriber/src/main/java/org/onap/dmaap/datarouter/subscriber/SampleSubscriberServlet.java
+++ b/datarouter-subscriber/src/main/java/org/onap/dmaap/datarouter/subscriber/SampleSubscriberServlet.java
@@ -63,7 +63,7 @@ public class SampleSubscriberServlet extends HttpServlet {
try {
Files.createDirectory(Paths.get(outputDirectory));
} catch (IOException e) {
- logger.info("SubServlet: Failed to create delivery dir: " + e.getMessage());
+ logger.info("SubServlet: Failed to create delivery dir: " + e.getMessage(), e);
}
basicAuth = "Basic " + Base64.encodeBase64String((login + ":" + password).getBytes());
}
diff --git a/datarouter-subscriber/src/main/java/org/onap/dmaap/datarouter/subscriber/SubscriberProps.java b/datarouter-subscriber/src/main/java/org/onap/dmaap/datarouter/subscriber/SubscriberProps.java
index 329c06a5..f05ca5d4 100644
--- a/datarouter-subscriber/src/main/java/org/onap/dmaap/datarouter/subscriber/SubscriberProps.java
+++ b/datarouter-subscriber/src/main/java/org/onap/dmaap/datarouter/subscriber/SubscriberProps.java
@@ -45,7 +45,7 @@ public class SubscriberProps {
try {
instance = new SubscriberProps(propsPath);
} catch (IOException ioe) {
- subLogger.error("IO Exception: " + ioe.getMessage());
+ subLogger.error("IO Exception: " + ioe.getMessage(), ioe);
}
}
return instance;
diff --git a/docs/release-notes.rst b/docs/release-notes.rst
index 4f2ddb6f..88bd2961 100644
--- a/docs/release-notes.rst
+++ b/docs/release-notes.rst
@@ -10,7 +10,7 @@ Release-notes
Version: 2.1.0 (Dublin)
---------------------------
-:Release Date: 2019-05-30
+:Release Date: 2019-06-06
The DataRouter(DR) provisioning API is a HTTPS-based, REST-like API for creating and managing DR feeds and
subscriptions.
diff --git a/pom.xml b/pom.xml
index 6db34b61..ccd761cc 100755
--- a/pom.xml
+++ b/pom.xml
@@ -32,11 +32,11 @@
<parent>
<groupId>org.onap.oparent</groupId>
<artifactId>oparent</artifactId>
- <version>1.2.3</version>
+ <version>2.0.0</version>
</parent>
<properties>
- <!--revision can be set in the version.properties file at project root and is loaded by all sub modules-->
- <revision>${snapshot_version}</revision>
+ <!--revision must also be set in the version.properties file at project root-->
+ <revision>2.1.0-SNAPSHOT</revision>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
@@ -265,10 +265,6 @@
<build>
<plugins>
<plugin>
- <groupId>org.sonatype.plugins</groupId>
- <artifactId>nexus-staging-maven-plugin</artifactId>
- </plugin>
- <plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
@@ -441,22 +437,6 @@
</plugins>
</pluginManagement>
</build>
- <distributionManagement>
- <repository>
- <id>ecomp-releases</id>
- <name>DR Release Repository</name>
- <url>${onap.nexus.url}${releaseNexusPath}</url>
- </repository>
- <snapshotRepository>
- <id>ecomp-snapshots</id>
- <name>DR Snapshot Repository</name>
- <url>${onap.nexus.url}${snapshotNexusPath}</url>
- </snapshotRepository>
- <site>
- <id>ecomp-site</id>
- <url>dav:${onap.nexus.url}${sitePath}</url>
- </site>
- </distributionManagement>
<pluginRepositories>
<pluginRepository>
<id>onap-plugin-snapshots</id>